diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 189de0cb494d27bdbe0aba1d59b25a6c75bb78ca..21a29aee9711c54f3acdc7a4354100ae2245f8f8 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -22,12 +22,11 @@ prepare_ci_lta_docker_image: - docker build -t ci_base -f Docker/lofar-ci/Dockerfile_ci_base . - docker build -t ci_lta -f Docker/lofar-ci/Dockerfile_ci_lta . -#TODO: make proper MAC docker image with WinCC (rpm packages from mcu001) -#prepare_ci_mac_docker_image: -# stage: prepare -# script: -# - docker build -t ci_base -f Docker/lofar-ci/Dockerfile_ci_base . -# - docker build -t ci_mac -f Docker/lofar-ci/Dockerfile_ci_mac . +prepare_ci_mac_docker_image: + stage: prepare + script: + - docker build -t ci_base -f Docker/lofar-ci/Dockerfile_ci_base . + - docker build -t ci_mac -f Docker/lofar-ci/Dockerfile_ci_mac . # # BUILD STAGE @@ -42,7 +41,7 @@ build_TMSS: - mkdir -p build/gnucxx11_opt - cd build/gnucxx11_opt - cmake -DBUILD_PACKAGES=$PACKAGE -DCASACORE_ROOT_DIR=/opt/casacore/ -DCASAREST_ROOT_DIR=/opt/casarest/ -DUSE_LOG4CPLUS=false ../.. - - make -j 8 + - make -j 12 - make install dependencies: - prepare_ci_sas_docker_image @@ -60,7 +59,7 @@ build_RAServices: - mkdir -p build/gnucxx11_opt - cd build/gnucxx11_opt - cmake -DBUILD_PACKAGES=$PACKAGE -DCASACORE_ROOT_DIR=/opt/casacore/ -DCASAREST_ROOT_DIR=/opt/casarest/ -DUSE_LOG4CPLUS=false ../.. - - make -j 8 + - make -j 12 - make install dependencies: - prepare_ci_sas_docker_image @@ -78,7 +77,7 @@ build_LTAIngest: - mkdir -p build/gnucxx11_opt - cd build/gnucxx11_opt - cmake -DBUILD_PACKAGES=$PACKAGE -DUSE_LOG4CPLUS=false ../.. - - make -j 8 + - make -j 12 - make install dependencies: - prepare_ci_lta_docker_image @@ -87,24 +86,23 @@ build_LTAIngest: paths: - build/gnucxx11_opt -# TODO: enable when prepare_ci_mac_docker_image is fixed -#build_MCU_MAC: -# stage: build -# image: ci_mac:latest -# script: -# - PACKAGE=MCU_MAC -# - echo "Building $PACKAGE..." -# - mkdir -p build/gnucxx11_opt -# - cd build/gnucxx11_opt -# - cmake -DBUILD_PACKAGES=$PACKAGE -DUSE_LOG4CPLUS=false ../.. -# - make -j 8 -# - make install -# dependencies: -# - prepare_ci_mac_docker_image -# artifacts: -# expire_in: 6 hours -# paths: -# - build/gnucxx11_opt +build_MCU_MAC: + stage: build + image: ci_mac:latest + script: + - PACKAGE=MainCU + - echo "Building $PACKAGE..." + - mkdir -p build/gnucxx11_opt + - cd build/gnucxx11_opt + - cmake -DBUILD_PACKAGES=$PACKAGE -DUSE_LOG4CPLUS=false -DWINCC_ROOT_DIR=/opt/WinCC_OA/3.14/ -DBLITZ_ROOT_DIR=/opt/blitz/ ../.. + - make -j 12 + - make install + dependencies: + - prepare_ci_mac_docker_image + artifacts: + expire_in: 6 hours + paths: + - build/gnucxx11_opt # # UNIT TEST STAGE @@ -169,25 +167,29 @@ unit_test_LTAIngest: when: always paths: - build/gnucxx11_opt/Testing/Temporary/LastTest.log - -# TODO: enable when build_MCU_MAC is fixed -#unit_test_MCU_MAC: -# stage: unit_test -# image: ci_mac:latest -# script: -# - PACKAGE=MCU_MAC -# - echo "Testing $PACKAGE..." -# - cd build/gnucxx11_opt -# - SKIP_INTEGRATION_TESTS=true ctest -# dependencies: -# - build_MCU_MAC -# artifacts: -# name: unit-test-report -# when: always -# paths: -# - build/gnucxx11_opt/Testing/Temporary/LastTest.log - - + +unit_test_MCU_MAC: + stage: unit_test + image: ci_mac:latest + script: + - PACKAGE=MainCu + - echo "Testing $PACKAGE..." + - cd build/gnucxx11_opt + - SKIP_INTEGRATION_TESTS=true ctest + services: + - rabbitmq:latest + variables: + RABBITMQ_DEFAULT_USER: guest + RABBITMQ_DEFAULT_PASS: guest + LOFAR_DEFAULT_BROKER: 'rabbitmq' # override default 'localhost' which does not work for CI service rabbitmq. + dependencies: + - build_MCU_MAC + artifacts: + name: unit-test-report + when: always + paths: + - build/gnucxx11_opt/Testing/Temporary/LastTest.log + allow_failure: true # allow failure for now, so MAC_MCU failure does not block this pipeline and we can deploy TMSS. TODO: fix docker_mac environment and services so the tests pass. # # DOCKERIZE diff --git a/CMake/FindCurl.cmake b/CMake/FindCurl.cmake new file mode 100644 index 0000000000000000000000000000000000000000..29be62b035b0552db12d266b5b467674ffa7124d --- /dev/null +++ b/CMake/FindCurl.cmake @@ -0,0 +1,47 @@ +# - Try to find libcurl, a library for doing http calls +# Variables used by this module: +# CURL_ROOT_DIR - curl root directory +# Variables defined by this module: +# CURL_FOUND - system has curl +# CURL_INCLUDE_DIR - the curl include directory (cached) +# CURL_INCLUDE_DIRS - the curl include directories +# (identical to CURL_INCLUDE_DIR) +# CURL_LIBRARY - the curl library (cached) +# CURL_LIBRARIES - the curl library + +# Copyright (C) 2009 +# ASTRON (Netherlands Institute for Radio Astronomy) +# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +# +# $Id$ + +if(NOT CURL_FOUND) + + find_path(CURL_INCLUDE_DIR curl/curl.h + HINTS ${CURL_ROOT_DIR} PATH_SUFFIXES include) # curl headers + + find_library(CURL_LIBRARY curl) # libcurl + mark_as_advanced(CURL_INCLUDE_DIR CURL_LIBRARY CURL_LIBRARY) + + include(FindPackageHandleStandardArgs) + find_package_handle_standard_args(curl DEFAULT_MSG + CURL_LIBRARY CURL_INCLUDE_DIR) + + set(CURL_INCLUDE_DIRS ${CURL_INCLUDE_DIR}) + set(CURL_LIBRARIES ${CURL_LIBRARY}) + +endif(NOT CURL_FOUND) diff --git a/CMake/FindCurlCpp.cmake b/CMake/FindCurlCpp.cmake new file mode 100644 index 0000000000000000000000000000000000000000..232e8b4df0863fd001660cec9bdd0fed3366ced4 --- /dev/null +++ b/CMake/FindCurlCpp.cmake @@ -0,0 +1,51 @@ +# - Try to find lib curlpp, a c++ library for http calls +# Variables used by this module: +# CURLCPP_ROOT_DIR - CurlCpp root directory +# Variables defined by this module: +# CURLCPP_FOUND - system has CurlCpp +# CURLCPP_INCLUDE_DIR - the CurlCpp include directory (cached) +# CURLCPP_INCLUDE_DIRS - the CurlCpp include directories +# (identical to CURLCPP_INCLUDE_DIR) +# CURLCPP_LIBRARY - the CurlCpp library (cached) +# CURLCPP_LIBRARIES - the CurlCpp library + +# Copyright (C) 2009 +# ASTRON (Netherlands Institute for Radio Astronomy) +# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +# +# $Id$ + +if(NOT CURLCPP_FOUND) + + find_path(CURLCPP_INCLUDE_DIR curlpp/cURLpp.hpp + HINTS ${CURLCPP_ROOT_DIR} PATH_SUFFIXES include) + find_path(CURL_INCLUDE_DIR curl/curl.h + HINTS ${CURL_ROOT_DIR} PATH_SUFFIXES include) # curlpp depends on curl headers + + find_library(CURLCPP_LIBRARY curlpp + HINTS ${CURLCPP_ROOT_DIR} PATH_SUFFIXES lib) + find_library(CURL_LIBRARY curl) # curlpp depends on libcurl + mark_as_advanced(CURLCPP_INCLUDE_DIR CURLCPP_LIBRARY CURL_LIBRARY) + + include(FindPackageHandleStandardArgs) + find_package_handle_standard_args(curlcpp DEFAULT_MSG + CURLCPP_LIBRARY CURLCPP_INCLUDE_DIR) + + set(CURLCPP_INCLUDE_DIRS ${CURLCPP_INCLUDE_DIR} ${CURL_INCLUDE_DIR}) + set(CURLCPP_LIBRARIES ${CURLCPP_LIBRARY} ${CURL_LIBRARY}) + +endif(NOT CURLCPP_FOUND) diff --git a/CMake/FindJsonCpp.cmake b/CMake/FindJsonCpp.cmake new file mode 100644 index 0000000000000000000000000000000000000000..1dccebca769c6ed8a89b86df96ce90dde864c283 --- /dev/null +++ b/CMake/FindJsonCpp.cmake @@ -0,0 +1,47 @@ +# - Try to find libjson, a library processing json blobs +# Variables used by this module: +# JSONCPP_ROOT_DIR - JsonCpp root directory +# Variables defined by this module: +# JSONCPP_FOUND - system has JsonCpp +# JSONCPP_INCLUDE_DIR - the JsonCpp include directory (cached) +# JSONCPP_INCLUDE_DIRS - the JsonCpp include directories +# (identical to JSONCPP_INCLUDE_DIR) +# JSONCPP_LIBRARY - the JsonCpp library (cached) +# JSONCPP_LIBRARIES - the JsonCpp library + +# Copyright (C) 2009 +# ASTRON (Netherlands Institute for Radio Astronomy) +# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +# +# $Id$ + +if(NOT JSONCPP_FOUND) + + find_path(JSONCPP_INCLUDE_DIR jsoncpp/json/json.h + HINTS ${JSONCPP_ROOT_DIR} PATH_SUFFIXES include) + find_library(JSONCPP_LIBRARY jsoncpp + HINTS ${JSONCPP_ROOT_DIR} PATH_SUFFIXES lib) + mark_as_advanced(JSONCPP_INCLUDE_DIR JSONCPP_LIBRARY) + + include(FindPackageHandleStandardArgs) + find_package_handle_standard_args(JsonCpp DEFAULT_MSG + JSONCPP_LIBRARY JSONCPP_INCLUDE_DIR) + + set(JSONCPP_INCLUDE_DIRS ${JSONCPP_INCLUDE_DIR}) + set(JSONCPP_LIBRARIES ${JSONCPP_LIBRARY}) + +endif(NOT JSONCPP_FOUND) diff --git a/CMake/variants/variants.lcs157 b/CMake/variants/variants.lcs157 index 39e331d9ab3a6e67968aad1d4ef60df21bfa096f..16996501e47d33d05dd108a53270f29261c09b94 100644 --- a/CMake/variants/variants.lcs157 +++ b/CMake/variants/variants.lcs157 @@ -2,8 +2,8 @@ # AS: put under comment as LOFAR general rule is to use shared libs now. #option(BUILD_SHARED_LIBS "Build shared libraries" OFF) -set(WINCC_ROOT_DIR /opt/WinCC_OA/3.14) -set(CASACORE_ROOT_DIR "/opt/casacore") +set(WINCC_ROOT_DIR /opt/WinCC_OA/3.16) +Set(CASACORE_ROOT_DIR "/opt/casacore") set(CASAREST_ROOT_DIR "/opt/casarest") set(PYRAP_ROOT_DIR "/opt/pyrap") set(AOFLAGGER_ROOT_DIR "/opt/aoflagger/build") diff --git a/Docker/lofar-base/Dockerfile.tmpl b/Docker/lofar-base/Dockerfile.tmpl index eddf712b1b77ecab25156a389d7232af12a53f2b..9ba56ab2373f0094e6ee0194f91790e608f64403 100644 --- a/Docker/lofar-base/Dockerfile.tmpl +++ b/Docker/lofar-base/Dockerfile.tmpl @@ -62,7 +62,20 @@ ENV CASACORE_VERSION=v3.1.0 \ # Allow to specify the number of cpus as --build-arg. # ARG J=6 -ENV J=${J} CXX_FLAGS="--std=c++11 -W -Wall -Woverloaded-virtual -Wno-unknown-pragmas -D_GLIBCXX_USE_CXX11_ABI=${CXX_ABI} -O3 -march=haswell" +ENV J=${J} + +# Allow to overwrite the default CXX_FLAGS settings for code compilation +# by specifying --build-args CXX_FLAGS="blah blah" +ARG CXX_FLAGS="--std=c++11 -W -Wall -Woverloaded-virtual -Wno-unknown-pragmas -D_GLIBCXX_USE_CXX11_ABI=${CXX_ABI} -O3" + +# Allow to overwrite the CPU optimisation default setting by specifying +# --build-arg CPU_OPTIMISATION="native" +ARG CPU_OPTIMISATION="haswell" +ENV CPU_OPTIMISATION="-march=${CPU_OPTIMISATION}" + +# Combine CXX_FLAGS and CPU_OPTIMISATION +ENV CXX_FLAGS="${CXX_FLAGS} ${CPU_OPTIMISATION}" + # # Base and runtime dependencies diff --git a/Docker/lofar-ci/Dockerfile_ci_base b/Docker/lofar-ci/Dockerfile_ci_base index 132ff1719a894e2b3d1068dce3f064b1df146610..83b0b77c28794f6932642c44830df1bee076c28d 100644 --- a/Docker/lofar-ci/Dockerfile_ci_base +++ b/Docker/lofar-ci/Dockerfile_ci_base @@ -7,9 +7,8 @@ FROM centos:centos7.6.1810 RUN yum -y groupinstall 'Development Tools' && \ yum -y install epel-release && \ - yum -y install cmake gcc git log4cplus-devel python3 python3-devel python3-pip which wget curl atop - -RUN pip3 install kombu requests coverage python-qpid-proton - -RUN adduser lofarsys + yum -y install cmake cmake3 gcc git log4cplus-devel python3 python3-devel python3-pip which wget curl atop valgrind && \ + pip3 install kombu requests coverage python-qpid-proton && \ + adduser lofarsys && \ + mkdir -p /opt && chown -R lofarsys:lofarsys /opt diff --git a/Docker/lofar-ci/Dockerfile_ci_mac b/Docker/lofar-ci/Dockerfile_ci_mac index 1c9338822a3de0049213d2d3189bde09bc8ddf11..5b48b8c395805b8a024730b56377a67e9b04007c 100644 --- a/Docker/lofar-ci/Dockerfile_ci_mac +++ b/Docker/lofar-ci/Dockerfile_ci_mac @@ -6,22 +6,32 @@ FROM ci_base:latest RUN echo "Installing packages for MAC..." && \ - yum -y install readline-devel boost-python36-devel hdf5-devel blas-devel lapack-devel cfitsio-devel wcslib-devel autogen postgresql-devel cmake3 libpqxx-devel qpid-cpp-server qpid-cpp-client-devel qpid-tools unittest-cpp-devel && \ + yum -y install readline-devel boost-python36-devel hdf5-devel blas-devel lapack-devel cfitsio-devel wcslib-devel autogen postgresql-devel cmake3 libpqxx-devel qpid-cpp-server qpid-cpp-client-devel qpid-tools unittest-cpp-devel jsoncpp-devel jsoncpp libcurl-devel libcurl && \ pip3 install psycopg2 testing.postgresql lxml mock numpy kombu requests python-dateutil fabric -RUN echo "Installing Casacore..." && \ - git clone https://github.com/casacore/casacore && \ - mkdir /casacore/build/ && \ - cd /casacore/build/ && \ - cmake -DCMAKE_INSTALL_PREFIX=/opt/casacore -DBUILD_PYTHON3=ON -DBUILD_PYTHON=OFF -DPYTHON_EXECUTABLE=/usr/bin/python3 -DUSE_OPENMP=ON -DUSE_FFTW3=TRUE -DUSE_HDF5=ON -DCMAKE_BUILD_TYPE=Release .. && \ - make -j 8 && \ - make install +USER lofarsys + +#RUN echo "Installing Casacore..." && \ +# git clone https://github.com/casacore/casacore && \ +# mkdir /casacore/build/ && \ +# cd /casacore/build/ && \ +# cmake -DCMAKE_INSTALL_PREFIX=/opt/casacore -DBUILD_PYTHON3=ON -DBUILD_PYTHON=OFF -DPYTHON_EXECUTABLE=/usr/bin/python3 -DUSE_OPENMP=ON -DUSE_FFTW3=TRUE -DUSE_HDF5=ON -DCMAKE_BUILD_TYPE=Release .. && \ +# make -j 8 && \ +# make install RUN echo "Installing Blitz++" && \ - cd / && \ - git clone --depth 1 https://github.com/blitzpp/blitz.git && \ - mkdir -p /blitz/build && \ - cd /blitz/build && \ - cmake --prefix=/opt/blitz/ .. && \ + mkdir -p /opt/3rdparty_sources/ && cd /opt/3rdparty_sources/ && \ + git clone --depth 1 https://github.com/blitzpp/blitz.git blitz && \ + cd blitz && mkdir -p build && cd build && \ + cmake3 -DCMAKE_INSTALL_PREFIX=/opt/blitz/ .. && \ make -j 8 lib && \ - make install \ No newline at end of file + make install + +RUN echo "Installing WinCC3.14 from nexus ALTA repo..." && \ + cd /tmp && \ + wget https://support.astron.nl/nexus/content/repositories/snapshots/nl/alta/buildWinCC314api.tar.gz && \ + tar -xvf buildWinCC314api.tar.gz && \ + cd opt && \ + mv WinCC_OA /opt/ + +ENV LD_LIBRARY_PATH /opt/WinCC_OA/3.14/bin:$LD_LIBRARY_PATH diff --git a/Docker/lofar-ci/Dockerfile_ci_rtcp b/Docker/lofar-ci/Dockerfile_ci_rtcp index 5c399ec3ecd50ea0354795209f647f55afe4f60f..0d09c62c76e0ec213a7cc70eccf401dad8fa856f 100644 --- a/Docker/lofar-ci/Dockerfile_ci_rtcp +++ b/Docker/lofar-ci/Dockerfile_ci_rtcp @@ -19,7 +19,7 @@ RUN echo "Installing Casacore..." && \ make install RUN echo "Installing DAL..." && \ - git clone https://github.com/nextgen-astrodata/DAL.git && \ + git clone https://git.astron.nl/ro/dal2.git && \ mkdir /DAL/build && \ cd /DAL/build/ && \ cmake -DCMAKE_INSTALL_PREFIX=/opt/DAL -DPYTHON_EXECUTABLE:FILEPATH=/usr/bin/python3 -DPYTHON_LIBRARY:FILEPATH=/usr/lib64/libpython3.6m.so -DPYTHON_INCLUDE_DIR=/usr/include/python3.6m/ .. && \ diff --git a/Docker/lofar-outputproc/Dockerfile.tmpl b/Docker/lofar-outputproc/Dockerfile.tmpl index 71c028e5e41cb4387eff9ceee4d108df0b9ee786..495fd3f145425e46e060326d852eaa203ffa54fd 100644 --- a/Docker/lofar-outputproc/Dockerfile.tmpl +++ b/Docker/lofar-outputproc/Dockerfile.tmpl @@ -34,7 +34,7 @@ RUN aptitude install -y libhdf5-${LIBHDF5_VERSION} python3 && \ RUN export BUILD_PACKAGES="git cmake g++ swig3.0 python3-setuptools python3-dev libhdf5-dev" && \ aptitude install -y ${BUILD_PACKAGES} && \ mkdir -p ${INSTALLDIR}/DAL/build && \ - git clone --branch ${DAL_VERSION//latest/master} https://github.com/nextgen-astrodata/DAL.git ${INSTALLDIR}/DAL/DAL.src && \ + git clone --branch ${DAL_VERSION//latest/master} https://git.astron.nl/ro/dal2.git ${INSTALLDIR}/DAL/DAL.src && \ cd ${INSTALLDIR}/DAL/build && \ cmake -DPYTHON_INCLUDE_DIR=/usr/include/python${PYTHON_VERSION} -DPYTHON_LIBRARY=/usr/lib/x86_64-linux-gnu/libpython${PYTHON_VERSION}m.so -DBUILD_TESTING=OFF -DCMAKE_CXX_FLAGS="${CXX_FLAGS} -fpermissive" -DCMAKE_INSTALL_PREFIX=${INSTALLDIR}/DAL ${INSTALLDIR}/DAL/DAL.src && \ make -j ${J} && \ diff --git a/Docker/lofar-pipeline/Dockerfile.tmpl b/Docker/lofar-pipeline/Dockerfile.tmpl index 502db64a39feba15556046c1ce5991e044b433a0..b2ac19d162750f877580d75b2849f290019a6acb 100644 --- a/Docker/lofar-pipeline/Dockerfile.tmpl +++ b/Docker/lofar-pipeline/Dockerfile.tmpl @@ -190,7 +190,7 @@ RUN aptitude install -y libhdf5-${LIBHDF5_VERSION} python3 && \ RUN export BUILD_PACKAGES="git cmake g++ swig3.0 python3-setuptools python3-dev libhdf5-dev" && \ aptitude install -y ${BUILD_PACKAGES} && \ mkdir -p ${INSTALLDIR}/DAL/build && \ - git clone --depth 1 --shallow-submodules --branch ${DAL_VERSION//latest/master} https://github.com/nextgen-astrodata/DAL.git ${INSTALLDIR}/DAL/DAL.src && \ + git clone --depth 1 --shallow-submodules --branch ${DAL_VERSION//latest/master} https://git.astron.nl/ro/dal2.git ${INSTALLDIR}/DAL/DAL.src && \ cd ${INSTALLDIR}/DAL/build && \ cmake -DPYTHON_INCLUDE_DIR=/usr/include/python${PYTHON_VERSION} -DPYTHON_LIBRARY=/usr/lib/x86_64-linux-gnu/libpython${PYTHON_VERSION}m.so -DBUILD_TESTING=OFF -DCMAKE_CXX_FLAGS="${CXX_FLAGS} -fpermissive" -DCMAKE_INSTALL_PREFIX=${INSTALLDIR}/DAL ${INSTALLDIR}/DAL/DAL.src && \ make -j ${J} && \ diff --git a/Docker/lofar-pipeline/_Dockerfile_ABI0.tmpl_ b/Docker/lofar-pipeline/_Dockerfile_ABI0.tmpl_ index 3654b07026c2a4a22274e43e65450dc4520ba4fa..2128cb5219b9cfe258170704a9f99f940cab3d17 100644 --- a/Docker/lofar-pipeline/_Dockerfile_ABI0.tmpl_ +++ b/Docker/lofar-pipeline/_Dockerfile_ABI0.tmpl_ @@ -185,7 +185,7 @@ RUN apt-get install -y python2.7 RUN export BUILD_PACKAGES="git cmake g++ swig python-setuptools python2.7-dev" && \ apt-get install -y ${BUILD_PACKAGES} && \ mkdir -p ${INSTALLDIR}/DAL/build && \ - git clone --branch ${DAL_VERSION//latest/master} https://github.com/nextgen-astrodata/DAL.git ${INSTALLDIR}/DAL/DAL.src && \ + git clone --branch ${DAL_VERSION//latest/master} https://git.astron.nl/ro/dal2.git ${INSTALLDIR}/DAL/DAL.src && \ cd ${INSTALLDIR}/DAL/build && \ cmake -DCMAKE_CXX_FLAGS="${CXX_FLAGS} -fpermissive" -DCMAKE_INSTALL_PREFIX=${INSTALLDIR}/DAL ${INSTALLDIR}/DAL/DAL.src && \ make -j ${J} && \ diff --git a/Docker/lofar-subbandtbbwriter/Dockerfile.tmpl b/Docker/lofar-subbandtbbwriter/Dockerfile.tmpl index cd5aad6dc527988c1a73634e8f635a4e1f39f412..0f75cd4a3ce34c5a5ec274425bf3db84e827931e 100644 --- a/Docker/lofar-subbandtbbwriter/Dockerfile.tmpl +++ b/Docker/lofar-subbandtbbwriter/Dockerfile.tmpl @@ -26,7 +26,7 @@ RUN aptitude install -y libhdf5-${LIBHDF5_VERSION} python3 && \ RUN export BUILD_PACKAGES="git cmake g++ swig3.0 python3-setuptools python3-dev libhdf5-dev" && \ aptitude install -y ${BUILD_PACKAGES} && \ mkdir -p ${INSTALLDIR}/DAL/build && \ - git clone --branch ${DAL_VERSION//latest/master} https://github.com/nextgen-astrodata/DAL.git ${INSTALLDIR}/DAL/DAL.src && \ + git clone --branch ${DAL_VERSION//latest/master} https://git.astron.nl/ro/dal2.git ${INSTALLDIR}/DAL/DAL.src && \ cd ${INSTALLDIR}/DAL/build && \ cmake -DPYTHON_INCLUDE_DIR=/usr/include/python${PYTHON_VERSION} -DPYTHON_LIBRARY=/usr/lib/x86_64-linux-gnu/libpython${PYTHON_VERSION}m.so -DBUILD_TESTING=OFF -DCMAKE_CXX_FLAGS="${CXX_FLAGS} -fpermissive" -DCMAKE_INSTALL_PREFIX=${INSTALLDIR}/DAL ${INSTALLDIR}/DAL/DAL.src && \ make -j ${J} && \ diff --git a/LCS/Messaging/python/messaging/messagebus.py b/LCS/Messaging/python/messaging/messagebus.py index de4479ed8eb9a730cf1f652c4b127aee26d69625..c5273956ec6e68066192054c9a9261b00b74188e 100644 --- a/LCS/Messaging/python/messaging/messagebus.py +++ b/LCS/Messaging/python/messaging/messagebus.py @@ -576,6 +576,10 @@ class _AbstractBus: return True if isinstance(error, kombu.exceptions.ConnectionError): return True + if isinstance(error, OSError) or isinstance(error, IOError): + msg = str(error).lower() + if 'connection' in msg or 'socket' in msg: + return True return False @@ -1381,7 +1385,7 @@ class BusListener: """ if not isinstance(handler_type, type): - raise TypeError("handler_type should be a ServiceMessageHandler subclass, not an instance!") + raise TypeError("handler_type should be a AbstractMessageHandler subclass, not an instance!") if not issubclass(handler_type, AbstractMessageHandler): raise TypeError("handler_type should be a AbstractMessageHandler subclass") @@ -1395,29 +1399,27 @@ class BusListener: self._lock = threading.Lock() self._running = threading.Event() self._listening = False - self.address = self.designated_queue_name(exchange, routing_key) + self.routing_key = routing_key + self.address = self.designated_queue_name() # make sure the queue is bound to the exchange # any created queue or binding is not removed on exit. See rational above. create_bound_queue(exchange=exchange, queue=self.address, routing_key=routing_key, broker=self.broker, log_level=logging.INFO) - @staticmethod - def designated_queue_name(exchange: str, routing_key: str="#") -> str: + def designated_queue_name(self) -> str: """ - create a designated queue name based on the given exchange name, routing_key, and the current running program name. + create a designated queue name based on this buslistener's exchange name, routing_key, and the current running program name. Like so: <exchange>.for.<program_name>.<listener_type_name>.on.<sanitzed_routing_key> In case the routing_key filters for wildcards only, then the routing key is replaced by 'all' - :param exchange: the exchange name to which the designated queue will bind - :param routing_key: the routing_key which is used for the binding. Any wildcards like ".#"/".*" are removed. :return: <exchange>.for.<program_name>.<listener_type_name>.on.<sanitzed_routing_key> """ - sanitized_routing_key = routing_key.replace(".#","").replace(".*","").replace("#","").replace("*","") + sanitized_routing_key = self.routing_key.replace(".#","").replace(".*","").replace("#","").replace("*","") if not sanitized_routing_key: sanitized_routing_key = "all" - return "%s.queue.for.%s.%s.on.%s" % (exchange, + return "%s.queue.for.%s.%s.on.%s" % (self.exchange, program_name(include_extension=False), - __class__.__name__, + self.__class__.__name__, sanitized_routing_key) def is_running(self) -> bool: diff --git a/LCS/Messaging/python/messaging/test/t_RPC.py b/LCS/Messaging/python/messaging/test/t_RPC.py index 739e4ff9f40b3ddb86298e465064aa7d2260f55e..f91e406b051965f933c5a9b60f7e95e78b097b63 100644 --- a/LCS/Messaging/python/messaging/test/t_RPC.py +++ b/LCS/Messaging/python/messaging/test/t_RPC.py @@ -35,6 +35,20 @@ class MyServiceMessageHandler(ServiceMessageHandler): def my_public_slow_method(self): sleep(2) + +class RPCServiceTests(unittest.TestCase): + def test_designated_queue_name_contains_subclass_name(self): + class MyService(RPCService): + pass + + with TemporaryExchange(self.__class__.__name__) as tmp_exchange: + service = MyService("my service", MyServiceMessageHandler, exchange=tmp_exchange.address) + queue_name = service.designated_queue_name() + + self.assertTrue(".MyService." in queue_name) + self.assertFalse(".BusListener." in queue_name) + + class TestRPC(unittest.TestCase): @unit_test diff --git a/LCS/Messaging/python/messaging/test/t_messagebus.py b/LCS/Messaging/python/messaging/test/t_messagebus.py index bd9b6697ac813911e9d13017ea89616fbaee463c..fab12ff650f8e04201303881b4f3603ef114a7ce 100644 --- a/LCS/Messaging/python/messaging/test/t_messagebus.py +++ b/LCS/Messaging/python/messaging/test/t_messagebus.py @@ -626,7 +626,7 @@ class RejectorTester(unittest.TestCase): with TemporaryExchange("Rejection") as tmp_exchange: tmp_exchange_address = tmp_exchange.address with BusListenerJanitor(Rejector(tmp_exchange.address)) as rejector: - rejector_address = Rejector.designated_queue_name(tmp_exchange_address) + rejector_address = rejector.designated_queue_name() with tmp_exchange.create_tobus() as spammer: for _ in range(number_of_messages): msg = EventMessage(content="ping", subject="spam") @@ -777,6 +777,18 @@ class PingPongTester(unittest.TestCase): self.assertFalse(queue_exists(player1_address)) self.assertFalse(queue_exists(player2_address)) +class BusListenerTests(unittest.TestCase): + def test_designated_queue_name_contains_subclass_name(self): + class MyListener(BusListener): + pass + + with TemporaryExchange(self.__class__.__name__) as tmp_exchange: + listener = MyListener(AbstractMessageHandler, exchange=tmp_exchange.address) + queue_name = listener.designated_queue_name() + + self.assertTrue(".MyListener." in queue_name) + self.assertFalse(".BusListener." in queue_name) + class MessageHandlerTester(unittest.TestCase): @@ -790,13 +802,13 @@ class MessageHandlerTester(unittest.TestCase): # try to start a BusListener using this handler. Should fail and raise a MessagingRuntimeError with TemporaryExchange(self.__class__.__name__) as tmp_exchange: tmp_exchange_name = tmp_exchange.address + listener = BusListener(handler_type=RaisingHandler, exchange=tmp_exchange_name) with self.assertRaises(MessagingRuntimeError): - with BusListenerJanitor(BusListener(handler_type=RaisingHandler, - exchange=tmp_exchange_name)) as listener: + with BusListenerJanitor(listener): pass self.assertFalse(exchange_exists(tmp_exchange_name)) - self.assertFalse(queue_exists(BusListener.designated_queue_name(tmp_exchange_name))) + self.assertFalse(queue_exists(listener.designated_queue_name())) @unit_test def test_empty_template_handler(self): @@ -860,13 +872,13 @@ class MessageHandlerTester(unittest.TestCase): # try to start a BusListener using a BaseTemplateHandler. Should fail and raise a TypeError with TemporaryExchange(self.__class__.__name__) as tmp_exchange: tmp_exchange_name = tmp_exchange.address + listener = BusListener(handler_type=BaseTemplateHandler, exchange=tmp_exchange_name) with self.assertRaises(RuntimeError): - with BusListenerJanitor(BusListener(handler_type=BaseTemplateHandler, - exchange=tmp_exchange_name)) as listener: + with BusListenerJanitor(listener): pass self.assertFalse(exchange_exists(tmp_exchange_name)) - self.assertFalse(queue_exists(BusListener.designated_queue_name(tmp_exchange_name))) + self.assertFalse(queue_exists(listener.designated_queue_name())) class ReconnectOnConnectionLossTests(unittest.TestCase): diff --git a/LCS/PyCommon/test_utils.py b/LCS/PyCommon/test_utils.py index 4bcde205eaf45fbc12df89f9ed5ef8545360d203..d4fb731466fd7928d7932e92568853d0494013b3 100644 --- a/LCS/PyCommon/test_utils.py +++ b/LCS/PyCommon/test_utils.py @@ -44,8 +44,18 @@ def assertEqualXML(test, expected): raise AssertionError(msg) +def skip_integration_tests() -> bool: + '''returns a boolen True of the environment var SKIP_INTEGRATION_TESTS has been set to a 'true' value''' + return os.environ.get('SKIP_INTEGRATION_TESTS', default='False').lower() in ['1', 'true', 'on'] + +def skip_unit_tests() -> bool: + '''returns a boolen True of the environment var SKIP_UNIT_TESTS has been set to a 'true' value''' + return os.environ.get('SKIP_UNIT_TESTS', default='False').lower() in ['1', 'true', 'on'] + + # decorators for selective tests -integration_test = unittest.skipIf(os.environ.get('SKIP_INTEGRATION_TESTS', default='False').lower() in ['1', 'true'], +integration_test = unittest.skipIf(skip_integration_tests(), 'Integration tests are disabled via env SKIP_INTEGRATION_TESTS') -unit_test = unittest.skipIf(os.environ.get('SKIP_UNIT_TESTS', default='False').lower() in ['1', 'true'], + +unit_test = unittest.skipIf(skip_unit_tests(), 'Unit tests are disabled via env SKIP_UNIT_TESTS') diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/test/t_ingestjobmanagementserver.py b/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/test/t_ingestjobmanagementserver.py index 8e8b328f275425c7d8b35dcc33cb6da86f1b9665..019e7262f5a4bc804907ebdb6811fc81d82e757c 100755 --- a/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/test/t_ingestjobmanagementserver.py +++ b/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/test/t_ingestjobmanagementserver.py @@ -11,6 +11,11 @@ import fnmatch import time import logging +from lofar.common.test_utils import skip_integration_tests + +if skip_integration_tests(): + exit(3) + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) logger = logging.getLogger(__name__) @@ -102,6 +107,8 @@ with TemporaryExchange(testname+"_bus") as tmp_bus: manager_thread.daemon = True manager_thread.start() + time.sleep(1.0) #TODO: should not wait fixed amount of time for IngestJobManager to be up and running, but poll with a timeout + assert manager.nrOfUnfinishedJobs() == 3, 'expected 3 jobs unfinished before any job was started' assert manager.nrOfJobs() == 3, 'expected 3 jobs in total before any job was started' diff --git a/MAC/APL/CASATools/include/CASATools/CasaConverter.h b/MAC/APL/CASATools/include/CASATools/CasaConverter.h index 533840c47e1284dd76b2c57a32f4e0dd57b3f191..3c6b4826d9fb4dad4df739e5d1f34f346e2441e2 100644 --- a/MAC/APL/CASATools/include/CASATools/CasaConverter.h +++ b/MAC/APL/CASATools/include/CASATools/CasaConverter.h @@ -67,7 +67,7 @@ public: // some functions to exploid the supported conversion types. bool isValidType(const string& refType) { return (itsDirectionTypes.find(refType) != itsDirectionTypes.end()); } - vector<string> validTypes(); + std::vector<string> validTypes(); private: // internal admin structures @@ -88,10 +88,10 @@ private: string itsTargetName; // name, type map - map<string, casacore::MDirection::Types> itsDirectionTypes; + std::map<string, casacore::MDirection::Types> itsDirectionTypes; // type, converter_t map - map<casacore::MDirection::Types, converter_t> itsConverters; + std::map<casacore::MDirection::Types, converter_t> itsConverters; }; // @} diff --git a/MAC/APL/CASATools/src/CasaConverter.cc b/MAC/APL/CASATools/src/CasaConverter.cc index eeb2a6f966815c0559596f38a09f9336ff8b1797..d175084b9e13600406a4d2e28ceea62889af642d 100644 --- a/MAC/APL/CASATools/src/CasaConverter.cc +++ b/MAC/APL/CASATools/src/CasaConverter.cc @@ -41,6 +41,7 @@ namespace LOFAR { using namespace casacore; using namespace blitz; using namespace RTC; +using namespace std; static const char* supportedTypes[] = { "J2000", "ITRF", "B1950", "HADEC", "AZELGEO", "TOPO", "ICRS", "APP", "GALACTIC", "ECLIPTIC", "COMET", @@ -89,7 +90,7 @@ CasaConverter::converter_t* CasaConverter::_getConverter(MDirection::Types theT string typeName(MDirection::showType(theType)); // try to find the converter. If it is already there then we are done - map<MDirection::Types, converter_t>::iterator iter(itsConverters.find(theType)); + std::map<MDirection::Types, converter_t>::iterator iter(itsConverters.find(theType)); if (iter != itsConverters.end()) { LOG_INFO_STR("Using existing " << typeName << " to " << itsTargetName << " converter"); return (&(iter->second)); @@ -171,7 +172,7 @@ bool CasaConverter::doConversion(const string& sourceType, } // find converter - map<string, MDirection::Types>::const_iterator iter(itsDirectionTypes.find(sourceType)); + std::map<string, MDirection::Types>::const_iterator iter(itsDirectionTypes.find(sourceType)); if (iter == itsDirectionTypes.end()) { LOG_FATAL_STR("No support for conversion from " << sourceType << " to " << itsTargetName); return (false); @@ -217,9 +218,9 @@ bool CasaConverter::doConversion(const string& sourceType, // vector<string> CasaConverter::validTypes() { - vector<string> result; - map<string, MDirection::Types>::const_iterator iter = itsDirectionTypes.begin(); - map<string, MDirection::Types>::const_iterator end = itsDirectionTypes.end (); + std::vector<string> result; + std::map<string, MDirection::Types>::const_iterator iter = itsDirectionTypes.begin(); + std::map<string, MDirection::Types>::const_iterator end = itsDirectionTypes.end (); while (iter != end) { result.push_back(iter->first); ++iter; diff --git a/MAC/APL/MainCU/docker/MACBuildDockerfile b/MAC/APL/MainCU/docker/MACBuildDockerfile new file mode 100644 index 0000000000000000000000000000000000000000..f520be4da1b34d8f58785dc4d0e1f4b9fc40032b --- /dev/null +++ b/MAC/APL/MainCU/docker/MACBuildDockerfile @@ -0,0 +1,36 @@ +# +# Goal: this dockerfile provides a 'production'-like centos7 system which can be used for building lofar +# MAC MainCU software +# Should be (almost) equivalent to buildhost lcs157 +# +FROM centos:centos7.6.1810 AS builder + +USER root +RUN yum -y groupinstall 'Development Tools' && \ + yum -y install epel-release && \ + yum -y install cmake log4cplus-devel python3 python3-devel python3-pip + +RUN yum install -y cmake gcc-c++ make log4cplus log4cplus-devel python3 python3-libs python3-devel python3-pip \ + boost readline-devel boost-devel binutils-devel boost-python36 boost-python36-devel \ + gettext which openldap-devel npm nodejs git java-11-openjdk + +RUN yum -y install readline-devel boost-python36-devel hdf5-devel blas-devel lapack-devel cfitsio-devel wcslib-devel \ + autogen postgresql-devel cmake3 libpqxx-devel qpid-cpp-server qpid-cpp-client-devel qpid-tools unittest-cpp-devel && \ + pip3 install psycopg2 testing.postgresql lxml mock numpy kombu requests python-dateutil fabric + +RUN echo "Installing Casacore..." && \ + git clone https://github.com/casacore/casacore && \ + mkdir /casacore/build/ && \ + cd /casacore/build/ && \ + cmake -DCMAKE_INSTALL_PREFIX=/opt/casacore -DBUILD_PYTHON3=ON -DBUILD_PYTHON=OFF -DPYTHON_EXECUTABLE=/usr/bin/python3 -DUSE_OPENMP=ON -DUSE_FFTW3=TRUE -DUSE_HDF5=ON -DCMAKE_BUILD_TYPE=Release .. && \ + make && \ + make install + +#RUN echo "Installing Blitz++" && \ +# cd / +# git clone --depth 1 https://github.com/blitzpp/blitz.git && \ +# mkdir -p /blitz/build && \ +# cd /blitz/build && \ +# cmake3 --prefix=/opt/blitz/ .. && \ +# make lib && \ +# make install diff --git a/MAC/APL/MainCU/src/MACScheduler/CMakeLists.txt b/MAC/APL/MainCU/src/MACScheduler/CMakeLists.txt index cb70cfd7ff9af481f8eab5cdb45d945b29efde41..4cea19a099560e54b425b64692b6f559851ab456 100644 --- a/MAC/APL/MainCU/src/MACScheduler/CMakeLists.txt +++ b/MAC/APL/MainCU/src/MACScheduler/CMakeLists.txt @@ -1,14 +1,27 @@ # $Id$ +lofar_find_package(JsonCpp) +lofar_find_package(Curl) + +set(DONT_COMPILE_OTDB_AND_PVSS_CODE FALSE CACHE BOOL + "Set this var to TRUE an all code references to OTDB and PVSS/WinCC are not compiled. This makes testing of MACScheduler against TMSS less dependend.") + +IF(DONT_COMPILE_OTDB_AND_PVSS_CODE) + # special compiler definition to keep out tight connections to OTDB/PVSS, so we can test MACScheduler in an isolated environment + add_definitions(-DDONT_COMPILE_OTDB_AND_PVSS_CODE) + MESSAGE(WARNING "Skipping compilation of OTDB and PVSS/WinCC code in MACScheduler") +ENDIF(DONT_COMPILE_OTDB_AND_PVSS_CODE) + lofar_add_bin_program(MACScheduler MACSchedulerMain.cc MACScheduler.cc + TMSSBridge.cc ObsClaimer.cc) lofar_add_bin_program(claimTest claimTest.cc ObsClaimer.cc) - + configure_file( ${CMAKE_CURRENT_SOURCE_DIR}/MACScheduler.conf.in ${CMAKE_CURRENT_BINARY_DIR}/MACScheduler.conf) @@ -16,3 +29,4 @@ configure_file( install(FILES ${CMAKE_CURRENT_BINARY_DIR}/MACScheduler.conf DESTINATION etc) + diff --git a/MAC/APL/MainCU/src/MACScheduler/MACScheduler.cc b/MAC/APL/MainCU/src/MACScheduler/MACScheduler.cc index 7ac95d6472d0ee691cdf2cad57fb9086e13f2206..a402f3bef071ed0c3779e28203c083849a405293 100644 --- a/MAC/APL/MainCU/src/MACScheduler/MACScheduler.cc +++ b/MAC/APL/MainCU/src/MACScheduler/MACScheduler.cc @@ -38,6 +38,7 @@ #include <APL/RTDBCommon/CM_Protocol.ph> #include <OTDB/TreeStateConv.h> #include <signal.h> +#include <boost/algorithm/string.hpp> #include "MACScheduler.h" #include "PVSSDatapointDefs.h" @@ -58,7 +59,7 @@ namespace LOFAR { using namespace DP_Protocol; using namespace CM_Protocol; using namespace APLCommon; - namespace MainCU { + namespace MainCU { #define MAX_CONCURRENT_OBSERVATIONS 100 #define MIN2(a,b) (((a) < (b)) ? (a) : (b)) @@ -84,6 +85,7 @@ MACScheduler::MACScheduler() : itsNrPlanned (0), itsNrActive (0), itsOTDBconnection (0), + itsTMSSconnection (0), itsMsgQueue (0) { LOG_TRACE_OBJ ("MACscheduler construction"); @@ -129,10 +131,12 @@ MACScheduler::MACScheduler() : // need port for timers itsTimerPort = new GCFTimerPort(*this, "Timerport"); +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE // setup MsgQueue string queueName = globalParameterSet()->getString("ParsetQueuename"); ASSERTSTR(!queueName.empty(), "Queuename for distributing parameterSets not specified"); itsMsgQueue = new ToBus(queueName); +#endif registerProtocol(CONTROLLER_PROTOCOL, CONTROLLER_PROTOCOL_STRINGS); registerProtocol(DP_PROTOCOL, DP_PROTOCOL_STRINGS); @@ -146,6 +150,7 @@ MACScheduler::~MACScheduler() { LOG_TRACE_OBJ ("~MACscheduler"); +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE if (itsPropertySet) { delete itsPropertySet; } @@ -153,10 +158,25 @@ MACScheduler::~MACScheduler() if (itsOTDBconnection) { delete itsOTDBconnection; } +#endif + + if (itsChildPort) { + delete itsChildPort; + } + + if (itsClaimerPort) { + delete itsClaimerPort; + } + + if (itsClaimerTask) { + delete itsClaimerTask; + } +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE if (itsMsgQueue) { delete itsMsgQueue; } +#endif } // @@ -208,7 +228,7 @@ void MACScheduler::_databaseEventHandler(GCFEvent& event) // GCFEvent::TResult MACScheduler::initial_state(GCFEvent& event, GCFPortInterface& /*port*/) { - LOG_DEBUG_STR ("initial_state:" << eventName(event)); + LOG_INFO_STR ("initial_state:" << eventName(event)); GCFEvent::TResult status = GCFEvent::HANDLED; @@ -218,11 +238,18 @@ GCFEvent::TResult MACScheduler::initial_state(GCFEvent& event, GCFPortInterface& case F_ENTRY: { // Get access to my own propertyset. +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE LOG_INFO_STR ("Activating my propertySet(" << PSN_MAC_SCHEDULER << ")"); itsPropertySet = new RTDBPropertySet(PSN_MAC_SCHEDULER, PST_MAC_SCHEDULER, PSAT_CW, this); +#else + //HACK: MacScheduler normally waits for an event (DP_CREATED, see below) from PVSS, and then starts the timers. + // Without PVSS, we have to start the timers. + itsTimerPort->cancelAllTimers(); + itsTimerPort->setTimer(0.0); +#endif } break; @@ -239,6 +266,7 @@ GCFEvent::TResult MACScheduler::initial_state(GCFEvent& event, GCFPortInterface& case F_TIMER: { // must be timer that PropSet is enabled. // update PVSS. LOG_TRACE_FLOW ("Updateing state to PVSS"); +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE itsPropertySet->setValue(PN_FSM_CURRENT_ACTION, GCFPVString ("initial")); itsPropertySet->setValue(PN_FSM_ERROR, GCFPVString ("")); itsPropertySet->setValue(PN_MS_OTDB_CONNECTED, GCFPVBool (false)); @@ -248,10 +276,11 @@ GCFEvent::TResult MACScheduler::initial_state(GCFEvent& event, GCFPortInterface& itsPropertySet->setValue(PN_MS_ACTIVE_OBSERVATIONS, GCFPVDynArr(LPT_STRING, emptyArr)); itsPropertySet->setValue(PN_MS_PLANNED_OBSERVATIONS, GCFPVDynArr(LPT_STRING, emptyArr)); itsPropertySet->setValue(PN_MS_FINISHED_OBSERVATIONS, GCFPVDynArr(LPT_STRING, emptyArr)); - +#endif // Try to connect to the SAS database. ParameterSet* pParamSet = globalParameterSet(); +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE std::string username; try { @@ -322,11 +351,23 @@ GCFEvent::TResult MACScheduler::initial_state(GCFEvent& event, GCFPortInterface& " using " << username << "," << password); LOG_INFO ("Connected to the OTDB"); itsPropertySet->setValue(PN_MS_OTDB_CONNECTED, GCFPVBool(true)); +#endif + std::string tmss_username = pParamSet->getString("TMSSusername", "test"); + std::string tmss_password = pParamSet->getString("TMSSpassword", "test"); + std::string tmss_hostname = pParamSet->getString("TMSShostname", "120.0.0.1"); + int tmss_port = pParamSet->getInt("TMSSport", 8008); + + LOG_INFO_STR ("Trying to connect to the TMSS " << tmss_hostname << ":" << tmss_port << " user/pass:" << tmss_username << "/******" ); + itsTMSSconnection = std::shared_ptr<TMSSBridge>(new TMSSBridge(tmss_hostname, tmss_port, tmss_username, tmss_password)); + LOG_INFO ("Connected to the TMSSBridge"); + +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE // Start ChildControl task LOG_DEBUG ("Enabling ChildControltask"); itsChildControl->openService(MAC_SVCMASK_SCHEDULERCTRL, 0); itsChildControl->registerCompletionPort(itsChildPort); +#endif // setup initial schedule: first planned, next run active, second run finished itsNextPlannedTime = time(0); @@ -370,9 +411,10 @@ GCFEvent::TResult MACScheduler::recover_state(GCFEvent& event, GCFPortInterface& case F_ENTRY: { // update PVSS +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE itsPropertySet->setValue(string(PN_FSM_CURRENT_ACTION),GCFPVString("recover")); itsPropertySet->setValue(string(PN_FSM_ERROR),GCFPVString("")); - +#endif // // TODO: do recovery @@ -413,9 +455,10 @@ GCFEvent::TResult MACScheduler::active_state(GCFEvent& event, GCFPortInterface& signal (SIGTERM, MACScheduler::sigintHandler); // kill // update PVSS +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE itsPropertySet->setValue(string(PN_FSM_CURRENT_ACTION),GCFPVString("active")); itsPropertySet->setValue(string(PN_FSM_ERROR),GCFPVString("")); - +#endif // Start heartbeat timer. itsSecondTimer = itsTimerPort->setTimer(1L); break; @@ -447,9 +490,16 @@ GCFEvent::TResult MACScheduler::active_state(GCFEvent& event, GCFPortInterface& int obsID = atoi(cmEvent.nameInAppl.c_str()); if (cmEvent.result != CM_NO_ERR) { LOG_ERROR_STR("Error during checking observation " << obsID); - OTDB::TreeMaintenance tm(itsOTDBconnection); - TreeStateConv tsc(itsOTDBconnection); - tm.setTreeState(obsID, tsc.get("aborted")); + if(obsID < 2000000) { +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE + OTDB::TreeMaintenance tm(itsOTDBconnection); + TreeStateConv tsc(itsOTDBconnection); + tm.setTreeState(obsID, tsc.get("aborted")); +#endif + } else { + itsTMSSconnection->setSubtaskState(obsID, "cancelled"); + } + itsPreparedObs.erase(obsID); break; } @@ -497,7 +547,7 @@ GCFEvent::TResult MACScheduler::active_state(GCFEvent& event, GCFPortInterface& // observationController was started (or not) CONTROLStartedEvent msg(event); if (msg.successful) { - LOG_DEBUG_STR("Start of " << msg.cntlrName << " was successful, waiting for connection."); + LOG_INFO_STR("Start of " << msg.cntlrName << " was successful, waiting for connection."); } else { LOG_ERROR_STR("Observation controller " << msg.cntlrName << " could not be started"); @@ -519,9 +569,16 @@ GCFEvent::TResult MACScheduler::active_state(GCFEvent& event, GCFPortInterface& LOG_WARN_STR("Cannot find controller " << conEvent.cntlrName << ". Can't update the SAS database"); break; } - OTDB::TreeMaintenance tm(itsOTDBconnection); - TreeStateConv tsc(itsOTDBconnection); - tm.setTreeState(theObs->second, tsc.get("queued")); + + if(theObs->second < 2000000) { +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE + OTDB::TreeMaintenance tm(itsOTDBconnection); + TreeStateConv tsc(itsOTDBconnection); + tm.setTreeState(theObs->second, tsc.get("queued")); +#endif + } else { + itsTMSSconnection->setSubtaskState(theObs->second, "queued"); + } break; } @@ -533,9 +590,16 @@ GCFEvent::TResult MACScheduler::active_state(GCFEvent& event, GCFPortInterface& LOG_WARN_STR("Cannot find controller " << msg.cntlrName << ". Can't update the SAS database"); break; } - OTDB::TreeMaintenance tm(itsOTDBconnection); - TreeStateConv tsc(itsOTDBconnection); - tm.setTreeState(theObs->second, tsc.get("active")); + if(theObs->second < 2000000) { +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE + OTDB::TreeMaintenance tm(itsOTDBconnection); + TreeStateConv tsc(itsOTDBconnection); + tm.setTreeState(theObs->second, tsc.get("active")); +#endif + } + else { + itsTMSSconnection->setSubtaskState(theObs->second, "started"); + } break; } @@ -547,9 +611,15 @@ GCFEvent::TResult MACScheduler::active_state(GCFEvent& event, GCFPortInterface& LOG_WARN_STR("Cannot find controller " << msg.cntlrName << ". Can't update the SAS database"); break; } - OTDB::TreeMaintenance tm(itsOTDBconnection); - TreeStateConv tsc(itsOTDBconnection); - tm.setTreeState(theObs->second, tsc.get("completing")); + if(theObs->second < 2000000) { +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE + OTDB::TreeMaintenance tm(itsOTDBconnection); + TreeStateConv tsc(itsOTDBconnection); + tm.setTreeState(theObs->second, tsc.get("completing")); +#endif + } else { + itsTMSSconnection->setSubtaskState(theObs->second, "finishing"); + } break; } @@ -564,16 +634,32 @@ GCFEvent::TResult MACScheduler::active_state(GCFEvent& event, GCFPortInterface& LOG_WARN_STR("Cannot find controller " << quitedEvent.cntlrName << ". Can't update the SAS database"); break; } - OTDB::TreeMaintenance tm(itsOTDBconnection); - TreeStateConv tsc(itsOTDBconnection); + // CT_RESULT_: MANUAL_REMOVED, MANUAL_ABORT, LOST_CONNECTION, NO_ERROR if (quitedEvent.result == CT_RESULT_NO_ERROR) { - tm.setTreeState(theObs->second, tsc.get("finished")); + if(theObs->second < 2000000) { +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE + OTDB::TreeMaintenance tm(itsOTDBconnection); + TreeStateConv tsc(itsOTDBconnection); + tm.setTreeState(theObs->second, tsc.get("finished")); +#endif + } + else { + itsTMSSconnection->setSubtaskState(theObs->second, "finished"); + } } else { - tm.setTreeState(theObs->second, tsc.get("aborted")); + if(theObs->second < 2000000) { +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE + OTDB::TreeMaintenance tm(itsOTDBconnection); + TreeStateConv tsc(itsOTDBconnection); + tm.setTreeState(theObs->second, tsc.get("aborted")); +#endif + } + else { + itsTMSSconnection->setSubtaskState(theObs->second, "cancelled"); + } } - // free claimed observation in PVSS itsClaimerTask->freeObservation(observationName(theObs->second)); @@ -612,10 +698,11 @@ GCFEvent::TResult MACScheduler::finishing_state(GCFEvent& event, GCFPortInterfac case F_ENTRY: { // update PVSS +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE itsPropertySet->setValue(PN_FSM_CURRENT_ACTION, GCFPVString("finished")); itsPropertySet->setValue(PN_FSM_ERROR, GCFPVString("")); itsPropertySet->setValue(PN_MS_OTDB_CONNECTED, GCFPVBool (false)); - +#endif itsTimerPort->setTimer(1L); break; } @@ -653,8 +740,9 @@ void MACScheduler::_doOTDBcheck() // update PVSS database with polltime time_t now = time(0); ptime currentTime = from_time_t(now); +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE itsPropertySet->setValue(string(PN_MS_OTDB_LAST_POLL), GCFPVString(to_simple_string(currentTime))); - +#endif // always update planned list because we might need to start some of those // (and we assumed that the PlannedItv was the smallest) _updatePlannedList(); @@ -688,6 +776,7 @@ void MACScheduler::_updatePlannedList() ptime currentTime = from_time_t(now); ASSERTSTR (currentTime != not_a_date_time, "Can't determine systemtime, bailing out"); +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE // get new list (list is ordered on starttime) of planned observations vector<OTDBtree> plannedDBlist = itsOTDBconnection->getTreeGroup(1, itsPlannedPeriod, itsExclPLcluster); @@ -697,6 +786,14 @@ void MACScheduler::_updatePlannedList() time_duration(plannedDBlist[0].starttime - currentTime).total_seconds())); } // NOTE: do not exit routine on emptylist: we need to write an empty list to clear the DB +#endif + + Json::Value upcomingSubTasks = itsTMSSconnection->getSubTasksStartingInThreeMinutes(); + + if (!upcomingSubTasks.empty()) { + LOG_DEBUG(formatString("TMSSCheck:First planned observation (%s) is at %s", + upcomingSubTasks[0]["url"].asCString(), upcomingSubTasks[0]["start_time"].asCString())); + } // make a copy of the current prepared observations (= observations shown in the navigator in the 'future' // list). By eliminating the observations that are in the current SAS list we end up (at the end of this function) @@ -707,10 +804,12 @@ void MACScheduler::_updatePlannedList() // still knows the observation and will use the OLD information of the observation. ObsList backupObsList = itsPreparedObs; - // walk through the list, prepare PVSS for the new obs, update own admin lists. + // walk through the plannedDBlist, prepare PVSS for the new obs, update own admin lists. + // after walking through the plannedDBlist, do same thing for upcomingSubTasks (TMSS) GCFPValueArray plannedArr; - int32 idx = MIN2(plannedDBlist.size(), itsMaxPlanned) - 1; +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE + int32 idx = MIN2(plannedDBlist.size(), itsMaxPlanned) - 1; for ( ; idx >= 0; idx--) { if (plannedDBlist[idx].processType=="RESERVATION" || plannedDBlist[idx].processType=="MAINTENANCE") { continue; @@ -790,10 +889,110 @@ void MACScheduler::_updatePlannedList() } } } - } // process all planned obs' + } // process all planned obs from OTDB +#endif + + // now walk through the upcomingSubTasks (TMSS), prepare PVSS for the new obs, update own admin lists. + //JS: 20200329: I decided to keep the loop simple at first, and then later add the same steps as in the loop above. + //That means, do all the stupid bookkeeping here in MAC as well, with its internal lists etc. + int idx2 = MIN2(upcomingSubTasks.size(), itsMaxPlanned) - 1; + for ( ; idx2 >= 0; idx2--) { + Json::Value subtask = upcomingSubTasks[idx2]; + + // get subtask_id from url. I know, ugly, needs to be in json itself. + vector<string> tmp; + string url(subtask["url"].asString()); + boost::split(tmp, url, [](char c){return c == '/';}); + int subtask_id = stoi(tmp[tmp.size()-2]); + + // construct name and timings info for observation + string obsName(observationName(subtask_id)); + ptime start_time = time_from_string(subtask["start_time"].asString().replace(10, 1, " ")); + ptime modTime = time_from_string(subtask["updated_at"].asString().replace(10, 1, " ")); + + // remove obs from backup of the planned-list (it is in the list again) + OLiter oldObsIter = backupObsList.find(subtask_id); + if (oldObsIter != backupObsList.end()) { + backupObsList.erase(oldObsIter); + } + + // must we claim this observation at the claimMgr? + OLiter prepIter = itsPreparedObs.find(subtask_id); + if ((prepIter == itsPreparedObs.end()) || (prepIter->second.prepReady == false) || + (prepIter->second.modTime != modTime)) { + // create a ParameterFile for this Observation + string parsetText = itsTMSSconnection->getParsetAsText(subtask_id); + if(prepIter == itsPreparedObs.end()) { + itsTMSSconnection->getSubTask(subtask_id); + LOG_INFO_STR(" *** PARSET for " << subtask_id << " ***" << std::endl << parsetText); + } + + string filename(observationParset(subtask_id)); + ParameterSet obsSpecs(false); + obsSpecs.adoptBuffer(parsetText); + obsSpecs.writeFile(filename); + LOG_INFO_STR("Wrote parset to " << filename); + + // Claim a DP in PVSS and write obssettings to it so the operator can see it. +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE + LOG_INFO_STR("Requesting preparation of PVSS for " << obsName); + itsClaimerTask->prepareObservation(obsName); +#endif + itsPreparedObs[subtask_id] = schedInfo(modTime, false); // requested claim but no answer yet. + } + else { + // only add observations to the PVSS list when the claim was succesfull + // otherwise thing will go wrong in the Navigator + plannedArr.push_back(new GCFPVString(obsName)); + } + + // should this observation (have) be(en) started? + int timeBeforeStart = time_duration(start_time - currentTime).total_seconds(); + LOG_INFO(formatString("%s (%s) starts at %s which is in %d seconds", + obsName.c_str(), url.c_str(), + to_simple_string(start_time).c_str(), + timeBeforeStart)); + + if (timeBeforeStart > 0 && timeBeforeStart <= (int)itsQueuePeriod) { + if (itsPreparedObs[subtask_id].prepReady == false) { + LOG_INFO_STR(obsName << " must be started but is not claimed yet."); + } + else { + // starttime of observation lays in queuePeriod. Start the controller-chain, + // this will result in CONTROL_STARTED event in our main task + // Note: as soon as the ObservationController has reported itself to the MACScheduler + // the observation will not be returned in the 'plannedDBlist' anymore. + string cntlrName(controllerName(CNTLRTYPE_OBSERVATIONCTRL, 0, subtask_id)); + if (itsControllerMap.find(cntlrName) == itsControllerMap.end()) { + LOG_INFO_STR("Requesting start of " << cntlrName << " for subtask_id: " << subtask_id << " url: " << url); + itsChildControl->startChild(CNTLRTYPE_OBSERVATIONCTRL, + subtask_id, + 0, // instanceNr + myHostname(false)); + // Note: controller is now in state NO_STATE/CONNECTED (C/R) + LOG_INFO_STR("Requested start of " << cntlrName << " for subtask_id: " << subtask_id << " url: " << url); + + // add controller to our 'monitor' administration + itsControllerMap[cntlrName] = subtask_id; + LOG_DEBUG_STR("itsControllerMap[" << cntlrName << "]=" << subtask_id); + LOG_INFO_STR("Requested start of " << cntlrName << " for subtask_id: " << subtask_id << " url: " << url); + if (!itsPreparedObs[subtask_id].parsetDistributed) { + _setParsetOnMsgBus(observationParset(subtask_id)); + itsPreparedObs[subtask_id].parsetDistributed = true; + } + } + else { + LOG_DEBUG_STR("Observation " << subtask_id << " is already (being) started"); + } + } + } + } + // Finally we can pass the list with planned observations to PVSS. +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE itsPropertySet->setValue(PN_MS_PLANNED_OBSERVATIONS, GCFPVDynArr(LPT_DYNSTRING, plannedArr)); +#endif itsNrPlanned = plannedArr.size(); // free used memory @@ -822,15 +1021,17 @@ void MACScheduler::_updateActiveList() { LOG_DEBUG("_updateActiveList()"); + GCFPValueArray activeArr; + +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE // get new list (list is ordered on starttime) vector<OTDBtree> activeDBlist = itsOTDBconnection->getTreeGroup(2, 0, itsExclPLcluster); if (activeDBlist.empty()) { - LOG_DEBUG ("No active Observations"); + LOG_DEBUG ("No active OTDB Observations"); // NOTE: do not exit routine on emptylist: we need to write an empty list to clear the DB } // walk through the list, prepare PVSS for the new obs, update own admin lists. - GCFPValueArray activeArr; int32 idx = activeDBlist.size() - 1; for ( ; idx >= 0; idx--) { if (activeDBlist[idx].processType=="RESERVATION" || activeDBlist[idx].processType=="MAINTENANCE") { @@ -847,9 +1048,47 @@ void MACScheduler::_updateActiveList() itsPreparedObs.erase(prepIter); } } // for +#endif + + + // get new list (list is/should_be ordered on starttime) + Json::Value activeSubTasks = itsTMSSconnection->getActiveSubTasks(); + if (activeSubTasks.empty()) { + LOG_DEBUG ("No active TMSS Observations"); + // NOTE: do not exit routine on emptylist: we need to write an empty list to clear the DB + } + + // walk through the list, prepare PVSS for the new obs, update own admin lists. + int32 idx2 = activeSubTasks.size() - 1; + for ( ; idx2 >= 0; idx2--) { + Json::Value subtask = activeSubTasks[idx2]; + +// if (subtask.processType=="RESERVATION" || subtask.processType=="MAINTENANCE") { +// continue; +// } + + // get subtask_id from url. I know, ugly, needs to be in json itself. + vector<string> tmp; + string url(subtask["url"].asString()); + boost::split(tmp, url, [](char c){return c == '/';}); + int subtask_id = stoi(tmp[tmp.size()-2]); + + // construct name info for observation + string obsName(observationName(subtask_id)); + activeArr.push_back(new GCFPVString(obsName)); + + // remove obs from planned-list if its still in there. + OLiter prepIter = itsPreparedObs.find(subtask_id); + if (prepIter != itsPreparedObs.end()) { + itsPreparedObs.erase(prepIter); + } + } // for +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE // Finally we can pass the list with active observations to PVSS. itsPropertySet->setValue(PN_MS_ACTIVE_OBSERVATIONS, GCFPVDynArr(LPT_DYNSTRING, activeArr)); +#endif + itsNrActive = activeArr.size(); // free used memory @@ -865,17 +1104,19 @@ void MACScheduler::_updateFinishedList() { LOG_DEBUG("_updateFinishedList()"); + GCFPValueArray finishedArr; + int32 freeSpace = MAX_CONCURRENT_OBSERVATIONS - itsNrPlanned - itsNrActive; + +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE // get new list (list is ordered on starttime) vector<OTDBtree> finishedDBlist = itsOTDBconnection->getTreeGroup(3, itsFinishedPeriod, itsExclPLcluster); if (finishedDBlist.empty()) { - LOG_DEBUG ("No finished Observations"); + LOG_DEBUG ("No finishing OTDB Observations"); // NOTE: do not exit routine on emptylist: we need to write an empty list to clear the DB } // walk through the list, prepare PVSS for the new obs, update own admin lists. // We must show the last part of the (optional) limited list. - GCFPValueArray finishedArr; - int32 freeSpace = MAX_CONCURRENT_OBSERVATIONS - itsNrPlanned - itsNrActive; int32 idx = finishedDBlist.size() - 1; int32 limit = idx - (MIN2(MIN2(finishedDBlist.size(), itsMaxFinished), (uint32)freeSpace) - 1); for ( ; idx >= limit ; idx--) { @@ -891,6 +1132,50 @@ void MACScheduler::_updateFinishedList() // Finally we can pass the list with finished observations to PVSS. itsPropertySet->setValue(PN_MS_FINISHED_OBSERVATIONS, GCFPVDynArr(LPT_DYNSTRING, finishedArr)); + // free used memory + for (int i = finishedArr.size()-1; i>=0; --i) { + delete finishedArr[i]; + } +#endif + + //reset for TMSS + finishedArr = GCFPValueArray(); + freeSpace = MAX_CONCURRENT_OBSERVATIONS - itsNrPlanned - itsNrActive; + + // get new list (list is/should_be ordered on starttime) + Json::Value finishingSubTasks = itsTMSSconnection->getFinishingSubTasks(); + if (finishingSubTasks.empty()) { + LOG_DEBUG ("No finishing TMSS Observations"); + // NOTE: do not exit routine on emptylist: we need to write an empty list to clear the DB + } + + // walk through the list, prepare PVSS for the new obs, update own admin lists. + // We must show the last part of the (optional) limited list. + int32 idx2 = finishingSubTasks.size() - 1; + int32 limit2 = idx2 - (MIN2(MIN2(finishingSubTasks.size(), itsMaxFinished), (uint32)freeSpace) - 1); + for ( ; idx2 >= limit2 ; idx2--) { + Json::Value subtask = finishingSubTasks[idx2]; + +// if (subtask.processType=="RESERVATION" || subtask.processType=="MAINTENANCE") { +// continue; +// } + + // get subtask_id from url. I know, ugly, needs to be in json itself. + vector<string> tmp; + string url(subtask["url"].asString()); + boost::split(tmp, url, [](char c){return c == '/';}); + int subtask_id = stoi(tmp[tmp.size()-2]); + + // construct name info for observation + string obsName(observationName(subtask_id)); + finishedArr.push_back(new GCFPVString(obsName)); + } // for + + // Finally we can pass the list with finished observations to PVSS. +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE + itsPropertySet->setValue(PN_MS_FINISHED_OBSERVATIONS, GCFPVDynArr(LPT_DYNSTRING, finishedArr)); +#endif + // free used memory for (int i = finishedArr.size()-1; i>=0; --i) { delete finishedArr[i]; @@ -910,7 +1195,9 @@ void MACScheduler::_setParsetOnMsgBus(const string& filename) const // from, forUser, summary, protocol, protocolVersion, momID, sasID TaskSpecificationSystem outMsg("LOFAR.MACScheduler", "", "", momID, sasID, obsSpecs); +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE itsMsgQueue->send(outMsg); +#endif } // diff --git a/MAC/APL/MainCU/src/MACScheduler/MACScheduler.conf.in b/MAC/APL/MainCU/src/MACScheduler/MACScheduler.conf.in index fd6dd597002a64673a96b3b0da3ea8272152e647..4678b69660c0b70ce2e88ec96ef65fd99470346a 100644 --- a/MAC/APL/MainCU/src/MACScheduler/MACScheduler.conf.in +++ b/MAC/APL/MainCU/src/MACScheduler/MACScheduler.conf.in @@ -3,10 +3,17 @@ # OTDB connection info OTDBdatabasename = LOFAR_4 OTDBhostname = sasdb +OTDBport = 5432 OTDBusername = paulus OTDBpassword = boskabouter OTDBpollInterval = 5s +# TMSS connection info +TMSShostname = 127.0.0.1 # tmss-ua.control.lofar +TMSSport = 8000 #8008 +TMSSusername = test # TODO: replace test user/pass with secret user/pass which is not stored in git +TMSSpassword = test # TODO: replace test user/pass with secret user/pass which is not stored in git + # startup periods of Observations QueuePeriod = 3m diff --git a/MAC/APL/MainCU/src/MACScheduler/MACScheduler.h b/MAC/APL/MainCU/src/MACScheduler/MACScheduler.h index 188d01364883ed3649aa4a234e3233eda771c34c..a318a89a992df60719a9a67223f7289ae667ece0 100644 --- a/MAC/APL/MainCU/src/MACScheduler/MACScheduler.h +++ b/MAC/APL/MainCU/src/MACScheduler/MACScheduler.h @@ -23,6 +23,8 @@ #ifndef MACScheduler_H #define MACScheduler_H +#include <memory> + //# GCF Includes #include <MACIO/GCF_Event.h> #include <GCF/TM/GCF_Control.h> @@ -48,6 +50,7 @@ #include <Common/ParameterSet.h> #include "ObsClaimer.h" +#include "TMSSBridge.h" #include <boost/date_time/posix_time/posix_time.hpp> @@ -168,6 +171,9 @@ private: // OTDB related variables. OTDB::OTDBconnection* itsOTDBconnection; // connection to the database + // TMSS Bridge + std::shared_ptr<TMSSBridge> itsTMSSconnection; // connection to TMSS + // Cluster to exclude for pipelines. Key is used in the getTreeGroup stored-procedure in OTDB. string itsExclPLcluster; // like !CEP2 or !CEP4 diff --git a/MAC/APL/MainCU/src/MACScheduler/MACSchedulerMain.cc b/MAC/APL/MainCU/src/MACScheduler/MACSchedulerMain.cc index 6d8caa91d240823ee307ba586b453bf28ae0091a..ab1915b7428152dc71cfef0562c7ca29cbc20d4e 100644 --- a/MAC/APL/MainCU/src/MACScheduler/MACSchedulerMain.cc +++ b/MAC/APL/MainCU/src/MACScheduler/MACSchedulerMain.cc @@ -41,8 +41,10 @@ int main(int argc, char* argv[]) MessageBus::init(); +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE ChildControl* cc = ChildControl::instance(); cc->start(); // make initial transition +#endif MACScheduler ms; ms.start(); // make initial transition diff --git a/MAC/APL/MainCU/src/MACScheduler/ObsClaimer.cc b/MAC/APL/MainCU/src/MACScheduler/ObsClaimer.cc index 6614dd202eb6a7e9ddc8358bb6762b30ec7b2f33..996862fd4815c63b423ca9dd97c8a42d9d237317 100644 --- a/MAC/APL/MainCU/src/MACScheduler/ObsClaimer.cc +++ b/MAC/APL/MainCU/src/MACScheduler/ObsClaimer.cc @@ -79,8 +79,10 @@ ObsClaimer::ObsClaimer(GCFTask* mainTask) : itsITCPort = new GCFITCPort(*mainTask, *this, "ITCPort", GCFPortInterface::SAP, CM_PROTOCOL); ASSERTSTR(itsITCPort, "Can't construct an ITC port"); +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE itsClaimMgrTask = ClaimMgrTask::instance(); ASSERTSTR(itsClaimMgrTask, "Can't construct a claimMgrTask"); +#endif registerProtocol(CM_PROTOCOL, CM_PROTOCOL_STRINGS); } @@ -173,7 +175,9 @@ GCFEvent::TResult ObsClaimer::idle_state (GCFEvent& event, GCFPortInterface& por while (iter != end) { if (iter->second->state == OS_NEW) { iter->second->state = OS_CLAIMING; +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE itsClaimMgrTask->claimObject("Observation", "LOFAR_ObsSW_"+iter->second->obsName, *itsITCPort); +#endif // will result in CM_CLAIM_RESULT event break; // claim one at the time. } @@ -182,11 +186,13 @@ GCFEvent::TResult ObsClaimer::idle_state (GCFEvent& event, GCFPortInterface& por if (iter == end) { // nothing to claim. Something to free? FMiter FreeIter = itsFreeMap.begin(); FMiter FreeEnd = itsFreeMap.end(); +#ifndef DONT_COMPILE_OTDB_AND_PVSS_CODE while (FreeIter != FreeEnd) { itsClaimMgrTask->freeObject("Observation", "LOFAR_ObsSW_"+FreeIter->second->obsName); // will not result in an event ++FreeIter; } +#endif itsFreeMap.clear(); } } diff --git a/MAC/APL/MainCU/src/MACScheduler/TMSSBridge.cc b/MAC/APL/MainCU/src/MACScheduler/TMSSBridge.cc new file mode 100644 index 0000000000000000000000000000000000000000..3083e6c8496c42c4cc6867059255bf1ce40ff426 --- /dev/null +++ b/MAC/APL/MainCU/src/MACScheduler/TMSSBridge.cc @@ -0,0 +1,261 @@ +// TMSSBridge.cc: Implementation of the TMSS Bridge, interface between MAC Scheduler and TMSS +// +// Copyright (C) 2020 +// ASTRON (Netherlands Foundation for Research in Astronomy) +// P.O.Box 2, 7990 AA Dwingeloo, The Netherlands, softwaresupport@astron.nl +// +// This program is free software; you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation; either version 2 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +// +// $Id$ + +#include <lofar_config.h> +#include <Common/LofarLogger.h> + +#include "TMSSBridge.h" + +#include <boost/date_time/posix_time/posix_time.hpp> +#include <boost/algorithm/string.hpp> +#include <cstdlib> +#include <iostream> +#include <string> +#include <curl/curl.h> +#include <jsoncpp/json/json.h> + +using namespace std; + + +namespace LOFAR { + namespace MainCU { + +using namespace boost::posix_time; +using namespace std; + +// +// TMSSBridge Constructor +// +TMSSBridge::TMSSBridge(const std::string &hostname, int port, const std::string &username, const std::string &password): + itsUser(username), + itsPassword(password), + itsHost(hostname), + itsPort(port) +{ +} + +// +// TMSSBridge Destructor +// +TMSSBridge::~TMSSBridge() +{ + +} + + +Json::Value TMSSBridge::getSubTask(int subtask_id) +{ + string queryStr = "/api/subtask/" + to_string(subtask_id) + "/"; + + Json::Value result; + if(httpGETAsJson(queryStr, result)) + return result; + return Json::Value(""); +} + +// +// get all subTaskIDS that should run within three minutes (ordered in time if multiple are found) +// for given cluster +// +Json::Value TMSSBridge::getSubTasksStartingInThreeMinutes() +{ + time_t now = time(0); + ptime lower_limit = from_time_t(now); + ptime upper_limit = from_time_t(now+3*60); + + //TODO: make exact query as in SAS/OTDB/sql/getTreeGroup_func.sql with OR'd states and exact timewindow + string queryStr = "/api/subtask/?state__value=scheduled&start_time__gt=" + to_iso_extended_string(lower_limit) + "&start_time__lt=" + to_iso_extended_string(upper_limit) + "&ordering=start_time"; + + Json::Value result; + if(httpGETAsJson(queryStr, result)) + return result["results"]; + return Json::Value(""); +} + +Json::Value TMSSBridge::getActiveSubTasks() +{ + ptime now = from_time_t(time(0)); + //TODO: make exact query as in SAS/OTDB/sql/getTreeGroup_func.sql with OR'd states and exact timewindow + string queryStr = "/api/subtask/?state__value=started&start_time__lt=" + to_iso_extended_string(now) + "&stop_time__gt=" + to_iso_extended_string(now) + "&ordering=start_time"; + + Json::Value result; + if(httpGETAsJson(queryStr, result)) + return result["results"]; + return Json::Value(""); +} + +Json::Value TMSSBridge::getFinishingSubTasks() +{ + ptime justnow = from_time_t(time(0)-3*60); + //TODO: make exact query as in SAS/OTDB/sql/getTreeGroup_func.sql with OR'd states and exact timewindow + string queryStr = "/api/subtask/?state__value=finishing&stop_time__gt=" + to_iso_extended_string(justnow) + "&ordering=start_time"; + + Json::Value result; + if(httpGETAsJson(queryStr, result)) + return result["results"]; + return Json::Value(""); +} + +std::string TMSSBridge::getParsetAsText(int subtask_id) +{ + string queryStr = "/api/subtask/" + to_string(subtask_id) + "/parset"; + string result; + if(httpQuery(queryStr, result, "GET")) + return result; + return ""; +} + +bool TMSSBridge::setSubtaskState(int subtask_id, const string& state) +{ + string queryStr = "/api/subtask/" + to_string(subtask_id) + "/"; + string result; + if(httpQuery(queryStr, result, "PATCH", "{ \"state\": \"/api/subtask_state/" + state +"/\" }")) { + LOG_INFO_STR("Updated subtask id=" << subtask_id << " to status=" << state); + return true; + } + + LOG_ERROR_STR("Could not update subtask id=" << subtask_id << " to status=" << state << " response=" << result); + return false; +} + +std::size_t callback(const char* in, + std::size_t size, + std::size_t num, + std::string* out) +{ + const std::size_t totalBytes(size * num); + out->append(in, totalBytes); + return totalBytes; +} + + +// +// Performs an HTTP query and return the response body +// Need to check response status code of http (200) +// Inspired by https://gist.github.com/connormanning/41efa6075515019e499c +// Example: +// httpQuery("/api/subtask/?start_time__lt=2020-03-04T12:03:00") +// results in a json string output +// +bool TMSSBridge::httpQuery(const string& target, string &result, const string& query_method, const string& data) +{ + const std::string url(std::string("http://") + itsHost + std::string(":") + std::to_string(itsPort) + target); + + CURL* curl = curl_easy_init(); + + // setup authentication + curl_easy_setopt(curl, CURLOPT_USERNAME, itsUser.c_str()); + curl_easy_setopt(curl, CURLOPT_PASSWORD, itsPassword.c_str()); + curl_easy_setopt(curl, CURLOPT_HTTPAUTH, CURLAUTH_BASIC); + + // Set remote URL. + curl_easy_setopt(curl, CURLOPT_URL, url.c_str()); + + // Set HTTP method + if (query_method == "GET") + { + curl_easy_setopt(curl, CURLOPT_HTTPGET, 1L); + } + else if (query_method == "POST") + { + curl_easy_setopt(curl, CURLOPT_POST, 1L); + curl_easy_setopt(curl, CURLOPT_POSTFIELDS, data.c_str()); + } + else if (query_method == "PUT" || query_method == "PATCH" ) + { + LOG_DEBUG_STR("[" << query_method << "] url=" << url << " data=" << data); + + curl_easy_setopt(curl, CURLOPT_CUSTOMREQUEST, query_method.c_str()); + curl_easy_setopt(curl, CURLOPT_USERAGENT, "TMSSBridge using libcurl"); + + struct curl_slist *headers = NULL; + headers = curl_slist_append(headers, "Expect:"); + headers = curl_slist_append(headers, "Content-Type: application/json"); + curl_easy_setopt(curl, CURLOPT_HTTPHEADER, headers); + + curl_easy_setopt(curl, CURLOPT_POSTFIELDS, data.c_str()); + curl_easy_setopt(curl, CURLOPT_POSTFIELDSIZE, -1L); + } + + // Don't bother trying IPv6, which would increase DNS resolution time. + curl_easy_setopt(curl, CURLOPT_IPRESOLVE, CURL_IPRESOLVE_V4); + + // Don't wait forever, time out after 10 seconds. + curl_easy_setopt(curl, CURLOPT_TIMEOUT, 10); + + // Follow HTTP redirects if necessary. + curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1L); + + + // Response information. + long httpCode(0); + std::unique_ptr<std::string> httpData(new std::string()); + + // Hook up data handling function. + curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, callback); + + // Hook up data container (will be passed as the last parameter to the + // callback handling function). Can be any pointer type, since it will + // internally be passed as a void pointer. + curl_easy_setopt(curl, CURLOPT_WRITEDATA, httpData.get()); + + // Run our HTTP GET command, capture the HTTP response code, and clean up. + curl_easy_perform(curl); + curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &httpCode); + result = string(*httpData.get()); + + // cleanup + curl_easy_cleanup(curl); + curl_global_cleanup(); + + LOG_INFO_STR(string("[") << query_method << "] code=" << httpCode << " " << url); + if (httpCode == 200) { + return true; + } + + LOG_ERROR_STR(string("Couldn't ") << query_method << " " << url << " code=" << to_string(httpCode) << " result: " << result); + return false; +} + +bool TMSSBridge::httpGETAsJson(const string& target, Json::Value &result) +{ + result = Json::Value(""); + + std::string text_result; + if(this->httpQuery(target, text_result)) { + Json::Reader jsonReader; + if (jsonReader.parse(text_result, result)) + { + if(result["count"] != 0) { + LOG_DEBUG_STR(string("JSON data for ") << target << std::endl << result.toStyledString()); + } + return true; + } + } + + LOG_ERROR_STR(string("Could not parse HTTP response from ") << target << " as JSON. response=\n" << text_result); + return false; +} + + + };//MainCU +};//LOFAR diff --git a/MAC/APL/MainCU/src/MACScheduler/TMSSBridge.h b/MAC/APL/MainCU/src/MACScheduler/TMSSBridge.h new file mode 100644 index 0000000000000000000000000000000000000000..3658bd59f75134036c05156b5d46a6c50da952f6 --- /dev/null +++ b/MAC/APL/MainCU/src/MACScheduler/TMSSBridge.h @@ -0,0 +1,68 @@ +// TMSSBridge.h: Implementation of the TMSS Bridge, interface between MAC Scheduler and TMSS +// +// Copyright (C) 2020 +// ASTRON (Netherlands Foundation for Research in Astronomy) +// P.O.Box 2, 7990 AA Dwingeloo, The Netherlands, softwaresupport@astron.nl +// +// This program is free software; you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation; either version 2 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +// +// $Id$ + +#ifndef TMSSBRIDGE_H +#define TMSSBRIDGE_H + +#include <Common/LofarTypes.h> +#include <boost/date_time/posix_time/posix_time.hpp> +#include <jsoncpp/json/json.h> + +namespace LOFAR { + namespace MainCU { + +class TMSSBridge +{ +public: + TMSSBridge (const std::string &hostname, int port, const std::string &username, const std::string &password); + ~TMSSBridge (); + + Json::Value getSubTask(int subtask_id); + + Json::Value getSubTasksStartingInThreeMinutes(); + Json::Value getActiveSubTasks(); + Json::Value getFinishingSubTasks(); + std::string getParsetAsText(int subtask_id); + bool setSubtaskState(int subtask_id, const std::string& state); + + // Actually the next method are private, make it public to be able to use in UnitTest++ + std::vector<std::string> translateHttpResultToSortedUrlList(Json::Value result); + +protected: + // http request to TMSS + bool httpQuery(const std::string& target, std::string &result, const std::string& query_method="GET", const std::string& data=""); + bool httpGETAsJson(const std::string& target, Json::Value &result); + +private: + // Copying is not allowed + TMSSBridge(const TMSSBridge&); + TMSSBridge& operator=(const TMSSBridge&); + + std::string itsUser; + std::string itsPassword; + std::string itsHost; + int itsPort; +}; + + };//MainCU +};//LOFAR +#endif diff --git a/MAC/CMakeLists.txt b/MAC/CMakeLists.txt index c1c9d2dcfb965a6dbdd6572cc3300a12fdf420e8..62b099b2e98fc9e324fcb81fe168bbc12793f743 100644 --- a/MAC/CMakeLists.txt +++ b/MAC/CMakeLists.txt @@ -11,7 +11,7 @@ lofar_add_package(WinCC_Datapoints Deployment/data/PVSS) lofar_add_package(OTDB_Comps Deployment/data/OTDB) lofar_add_package(StaticMetaData Deployment/data/StaticMetaData) lofar_add_package(WinCCPublisher WinCCPublisher) -lofar_add_package(WinCCREST) +# RGOE does not build on buildhost to skip for now... lofar_add_package(WinCCREST) lofar_add_package(WinCCDBBridge) diff --git a/MAC/Deployment/data/PVSS/License/471_3031_Astron_Central_5_2.log b/MAC/Deployment/data/PVSS/License/471_3031_Astron_Central_5_2.log new file mode 100644 index 0000000000000000000000000000000000000000..1b8815223bdd9fdd951cf7bf63752825c4df41da --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/471_3031_Astron_Central_5_2.log @@ -0,0 +1,34 @@ + +--------------------------------------------------- +[license] +code = "mcu001 90351139072" +version = 31600002 +sn = "471_3031_Astron_Central_5_2/4" +date = 2020.04.09;12:05:23,000 +comment = "Centos7 MCUmain System" +expire = 0000.00.00;00:00:00,000 +redundancy = 1 +ui = 14 +para = 4 +dde = 5 +event = 1 +ios = 100000 +ssi = 0 +api = 80 +excelreport = 5 +http = 19 +infoserver = 5000 +comcenter = 5 +maintenance = 0 +scheduler = 0 +s7 = 1 +distributed = 255 +ultralight = 5 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/471_3031_Astron_Station_5_2.log b/MAC/Deployment/data/PVSS/License/471_3031_Astron_Station_5_2.log new file mode 100644 index 0000000000000000000000000000000000000000..c2e12c952ee1d3c36801b42503cf4b6c20178de9 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/471_3031_Astron_Station_5_2.log @@ -0,0 +1,1890 @@ + +--------------------------------------------------- +[license] +code = "ccu199.control.lofar 80507640383" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/1" +date = 2020.04.08;14:58:31,000 +comment = "Centos7 test CCU System" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "CS001C 10456784610" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/2" +date = 2020.04.09;09:07:02,000 +comment = "Centos7 CS001" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "CS002C 80447254084" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/3" +date = 2020.04.09;09:15:10,000 +comment = "Centos7 CS001" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "CS003C 30475116407" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/4" +date = 2020.04.09;10:16:38,000 +comment = "Centos7 CS003" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "CS004C 00342489231" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/5" +date = 2020.04.09;11:13:31,000 +comment = "Centos7 CS004" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "CS005C 50556624003" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/6" +date = 2020.04.09;11:29:37,000 +comment = "Centos7 CS005" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "CS006C 00106307845" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/7" +date = 2020.04.09;12:11:49,000 +comment = "Centos7 CS006" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "ccu001 40470136751" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/8" +date = 2020.04.09;12:15:11,000 +comment = "Centos7 CCU System" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "CS007C 20239096812" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/9" +date = 2020.04.09;13:48:48,000 +comment = "Centos7 CS007" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "CS011C 40450842975" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/10" +date = 2020.04.09;13:59:13,000 +comment = "Centos7 CS011" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "CS013C 30019775961" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/11" +date = 2020.04.09;14:09:29,000 +comment = "Centos7 CS013" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "CS017C 10217011942" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/12" +date = 2020.04.09;14:47:46,000 +comment = "Centos7 CS017" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "CS021C 30140646590" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/13" +date = 2020.04.09;14:56:03,000 +comment = "Centos7 CS021" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "CS024C 90444348222" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/14" +date = 2020.04.09;15:07:08,000 +comment = "Centos7 CS024" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "CS026C 70038930262" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/15" +date = 2020.04.09;15:40:17,000 +comment = "Centos7 CS026" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "CS028C 20194119711" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/16" +date = 2020.04.10;08:16:00,000 +comment = "Centos7 CS028" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "CS030C 60016247642" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/17" +date = 2020.04.10;08:22:34,000 +comment = "Centos7 CS030" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "CS031C 00348849291" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/18" +date = 2020.04.10;08:27:07,000 +comment = "Centos7 CS031" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "CS032C 50245891237" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/19" +date = 2020.04.10;08:31:47,000 +comment = "Centos7 CS032" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "CS101C 50569632637" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/20" +date = 2020.04.10;08:35:44,000 +comment = "Centos7 CS101" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "CS103C 90538656435" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/21" +date = 2020.04.10;08:39:46,000 +comment = "Centos7 CS103" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "CS201C 10095958003" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/22" +date = 2020.04.10;08:43:42,000 +comment = "Centos7 CS201" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "CS301C 30434578596" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/23" +date = 2020.04.10;08:55:41,000 +comment = "Centos7 CS301" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "CS302C 20172722652" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/24" +date = 2020.04.10;09:00:01,000 +comment = "Centos7 CS302" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "CS401C 40191825945" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/25" +date = 2020.04.10;09:04:35,000 +comment = "Centos7 CS401" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "CS501C 20385608525" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/26" +date = 2020.04.10;09:08:48,000 +comment = "Centos7 CS501" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "RS106C 20591522242" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/27" +date = 2020.04.10;10:04:13,000 +comment = "Centos7 RS106" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "RS205C 00273952013" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/28" +date = 2020.04.10;10:08:06,000 +comment = "Centos7 RS205" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "RS208C 30488082003" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/29" +date = 2020.04.10;10:12:07,000 +comment = "Centos7 RS208" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "RS210C 10414998697" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/30" +date = 2020.04.10;10:16:03,000 +comment = "Centos7 RS210" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "RS305C 70237252554" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/31" +date = 2020.04.10;10:19:58,000 +comment = "Centos7 RS305" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "RS306C 80535741009" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/32" +date = 2020.04.10;10:30:23,000 +comment = "Centos7 RS306" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "RS307C 40113807615" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/33" +date = 2020.04.10;10:34:59,000 +comment = "Centos7 RS307" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "RS310C 10499420922" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/34" +date = 2020.04.10;10:39:00,000 +comment = "Centos7 RS310" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "RS406C 00211690373" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/35" +date = 2020.04.10;13:07:57,000 +comment = "Centos7 RS406" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "RS407C 50203225436" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/36" +date = 2020.04.10;13:11:02,000 +comment = "Centos7 RS407" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "RS409C 00109942962" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/37" +date = 2020.04.10;13:14:24,000 +comment = "Centos7 RS409" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "RS503C 20107013190" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/38" +date = 2020.04.10;13:23:37,000 +comment = "Centos7 RS503" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "RS508C 10210866275" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/39" +date = 2020.04.10;13:49:32,000 +comment = "Centos7 RS508" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "RS509C 40209098529" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/40" +date = 2020.04.10;14:04:47,000 +comment = "Centos7 RS509" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "DE601C 10033383939" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/41" +date = 2020.04.10;14:16:47,000 +comment = "Centos7 DE601" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "DE602C 30072366803" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/42" +date = 2020.04.10;14:20:43,000 +comment = "Centos7 DE602" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "DE603C 10146080536" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/43" +date = 2020.04.10;14:24:25,000 +comment = "Centos7 DE603" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "DE604C 20000172152" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/44" +date = 2020.04.10;14:28:13,000 +comment = "Centos7 DE604" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "DE605C 30270172639" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/45" +date = 2020.04.10;14:32:47,000 +comment = "Centos7 DE605" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "FR606C 50523294260" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/46" +date = 2020.04.10;14:37:40,000 +comment = "Centos7 FR606" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "SE607C 10547597978" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/47" +date = 2020.04.10;14:42:43,000 +comment = "Centos7 SE607" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "UK608C 20316744537" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/48" +date = 2020.04.10;14:59:34,000 +comment = "Centos7 UK608" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "DE609C 90330703195" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/49" +date = 2020.04.10;15:16:43,000 +comment = "Centos7 DE609" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "PL610C 60546656721" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/50" +date = 2020.04.10;15:24:03,000 +comment = "Centos7 PL610" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "PL611C 90272092612" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/51" +date = 2020.04.10;15:29:42,000 +comment = "Centos7 PL611" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "PL612C 50519699790" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/52" +date = 2020.04.10;15:36:37,000 +comment = "Centos7 PL612" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "LV614C 20245203426" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/53" +date = 2020.04.10;15:44:54,000 +comment = "Centos7 LV614" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + + +--------------------------------------------------- +[license] +code = "IE613C 80092027996" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/54" +date = 2020.04.12;19:29:09,000 +comment = "Centos7 IE613" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/Astron_Central_1_shield.txt b/MAC/Deployment/data/PVSS/License/Astron_Central_1_shield.txt index a1c5a1625f3fbf6835ba1c94db32e190a9a7dfd8..0d1621234c3ecfa5ba32e68afdd9e184fb4a5596 100644 --- a/MAC/Deployment/data/PVSS/License/Astron_Central_1_shield.txt +++ b/MAC/Deployment/data/PVSS/License/Astron_Central_1_shield.txt @@ -1,5 +1,5 @@ [license] -code = "dongleHost 90272035228" +code = "dongleHost 40169511539" version = 31600002 sn = "471_3031_Astron_Central_5_2" expire = 0000.00.00;00:00:00,000 @@ -28,5 +28,5 @@ pararemote = 0 ctrlext = 1 update = 0 licenseMax = 8 -licenseLeft = 6 +licenseLeft = 4 diff --git a/MAC/Deployment/data/PVSS/License/Astron_Station_1_shield.txt b/MAC/Deployment/data/PVSS/License/Astron_Station_1_shield.txt index 3ea11580e68c8a5ad0088ef338d8e1ee64bf7a6e..10551a0dccf3ebce0756c6dff0ac18fb4345ba62 100644 --- a/MAC/Deployment/data/PVSS/License/Astron_Station_1_shield.txt +++ b/MAC/Deployment/data/PVSS/License/Astron_Station_1_shield.txt @@ -1,7 +1,7 @@ [license] -code = "dongleHost 40215168481" +code = "dongleHost 60439300821" version = 31600002 -sn = "471_3031_Astron_Station_5" +sn = "471_3031_Astron_Station_5_2" expire = 0000.00.00;00:00:00,000 redundancy = 0 ui = 1 @@ -29,5 +29,5 @@ pararemote = 0 ctrlext = 1 update = 0 licenseMax = 100 -licenseLeft = 99 +licenseLeft = 46 diff --git a/MAC/Deployment/data/PVSS/License/Central.rtu b/MAC/Deployment/data/PVSS/License/Central.rtu index 36c8e105136a17f4722558e9d3062f9e9a9ecda4..cc44d9b81d7665451a8637487077238e75ff054d 100644 --- a/MAC/Deployment/data/PVSS/License/Central.rtu +++ b/MAC/Deployment/data/PVSS/License/Central.rtu @@ -5,5 +5,5 @@ Version=1.00 [Contents] ; 1 command for WIBU-BOX with Serial Number 9-5510484: -N5qGR 010NG NNNGq KNNNh 2B3qM y324w -5FTGN M0 +N5qGR 010NG NNNGw NNNN0 8Zu1s 75TB5 +46FhT L8 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CCU001.txt b/MAC/Deployment/data/PVSS/License/HWCode_CCU001.txt new file mode 100644 index 0000000000000000000000000000000000000000..2d3bbf0bfc7d4ba9ec57ed10eb06b3d0b2b22ab3 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CCU001.txt @@ -0,0 +1,3 @@ +[license] +code = "ccu001 14006546712" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CCU199.txt b/MAC/Deployment/data/PVSS/License/HWCode_CCU199.txt new file mode 100644 index 0000000000000000000000000000000000000000..8ce5e4dbc67a2b32e70736acf8d84efd20a7bbfb --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CCU199.txt @@ -0,0 +1 @@ +ccu199.control.lofar 61426158946 \ No newline at end of file diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CS001.txt b/MAC/Deployment/data/PVSS/License/HWCode_CS001.txt new file mode 100644 index 0000000000000000000000000000000000000000..8698efacac0eb941ac7ed9d6676bc4367bd5732b --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CS001.txt @@ -0,0 +1,3 @@ +[license] +code = "CS001C 43942334873" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CS002.txt b/MAC/Deployment/data/PVSS/License/HWCode_CS002.txt new file mode 100644 index 0000000000000000000000000000000000000000..59976ca9cbe1f4f1a645c4cc822803372a332ffc --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CS002.txt @@ -0,0 +1,4 @@ +[license] +code = "CS002C 31235135861" +version = 31600002 + diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CS003.txt b/MAC/Deployment/data/PVSS/License/HWCode_CS003.txt new file mode 100644 index 0000000000000000000000000000000000000000..677c703adfe4f8356f7012d10877fb6a1137bb17 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CS003.txt @@ -0,0 +1,3 @@ +[license] +code = "CS003C 14221960083" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CS004.txt b/MAC/Deployment/data/PVSS/License/HWCode_CS004.txt new file mode 100644 index 0000000000000000000000000000000000000000..fad49d718378dac13aeb60e2dc272817e13faa33 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CS004.txt @@ -0,0 +1,3 @@ +[license] +code = "CS004C 21132892246" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CS005.txt b/MAC/Deployment/data/PVSS/License/HWCode_CS005.txt new file mode 100644 index 0000000000000000000000000000000000000000..73814edc86731d3cee0f5393ed7f442809e7352c --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CS005.txt @@ -0,0 +1,3 @@ +[license] +code = "CS005C 10492674919" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CS006.txt b/MAC/Deployment/data/PVSS/License/HWCode_CS006.txt new file mode 100644 index 0000000000000000000000000000000000000000..b5e6429d36db90ad1cba6c4f355028297ecc440c --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CS006.txt @@ -0,0 +1,3 @@ +[license] +code = "CS006C 42774937483" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CS007.txt b/MAC/Deployment/data/PVSS/License/HWCode_CS007.txt new file mode 100644 index 0000000000000000000000000000000000000000..d5b700f77808d8298c259a35c210d4673cfa1f18 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CS007.txt @@ -0,0 +1,3 @@ +[license] +code = "CS007C 31443220661" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CS011.txt b/MAC/Deployment/data/PVSS/License/HWCode_CS011.txt new file mode 100644 index 0000000000000000000000000000000000000000..d1e5d568f04ec4684c5dd07f29e2426b61c9ff09 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CS011.txt @@ -0,0 +1,3 @@ +[license] +code = "CS011C 00064008033" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CS013.txt b/MAC/Deployment/data/PVSS/License/HWCode_CS013.txt new file mode 100644 index 0000000000000000000000000000000000000000..171a336e44c54ca3526f92e24ccf96fb7538691a --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CS013.txt @@ -0,0 +1,3 @@ +[license] +code = "CS013C 41872072962" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CS017.txt b/MAC/Deployment/data/PVSS/License/HWCode_CS017.txt new file mode 100644 index 0000000000000000000000000000000000000000..3c1fee1c4c8d37e3242126e2fe2c38d86769936e --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CS017.txt @@ -0,0 +1,3 @@ +[license] +code = "CS017C 13559240397" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CS021.txt b/MAC/Deployment/data/PVSS/License/HWCode_CS021.txt new file mode 100644 index 0000000000000000000000000000000000000000..b0c6f6baa24ab0daae0d974c11ae79c23081622b --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CS021.txt @@ -0,0 +1,4 @@ +[license] +code = "CS021C 50629572310" +version = 31600002 + diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CS024.txt b/MAC/Deployment/data/PVSS/License/HWCode_CS024.txt new file mode 100644 index 0000000000000000000000000000000000000000..cd0854dd9437dcf77f84a216dbcbab28c7df6410 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CS024.txt @@ -0,0 +1,3 @@ +[license] +code = "CS024C 23187337798" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CS026.txt b/MAC/Deployment/data/PVSS/License/HWCode_CS026.txt new file mode 100644 index 0000000000000000000000000000000000000000..878efa25ac987ffc74fbed4dcfaa2a0d82b57b73 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CS026.txt @@ -0,0 +1,4 @@ +[license] +code = "CS026C 24188583191" +version = 31600002 + diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CS028.txt b/MAC/Deployment/data/PVSS/License/HWCode_CS028.txt new file mode 100644 index 0000000000000000000000000000000000000000..3e9225b03f46d302e11e5c468cf7d02d5f2861cd --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CS028.txt @@ -0,0 +1,3 @@ +[license] +code = "CS028C 30453505320" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CS030.txt b/MAC/Deployment/data/PVSS/License/HWCode_CS030.txt new file mode 100644 index 0000000000000000000000000000000000000000..e353c10358c9665318da5a6bd7789e23d1da5ad2 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CS030.txt @@ -0,0 +1,3 @@ +[license] +code = "CS030C 11609048933" +version = 31600002 \ No newline at end of file diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CS031.txt b/MAC/Deployment/data/PVSS/License/HWCode_CS031.txt new file mode 100644 index 0000000000000000000000000000000000000000..a8f527de3d9126a4068f80ed338f1af8525afc7d --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CS031.txt @@ -0,0 +1,3 @@ +[license] +code = "CS031C 13220147749" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CS032.txt b/MAC/Deployment/data/PVSS/License/HWCode_CS032.txt new file mode 100644 index 0000000000000000000000000000000000000000..38bba2e68af2b2e1e5507ad4fae256e824525d85 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CS032.txt @@ -0,0 +1,3 @@ +[license] +code = "CS032C 00071147248" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CS101.txt b/MAC/Deployment/data/PVSS/License/HWCode_CS101.txt new file mode 100644 index 0000000000000000000000000000000000000000..7a432b0c5d1c3897e5286c72549b664201d05e6a --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CS101.txt @@ -0,0 +1,3 @@ +[license] +code = "CS101C 80881277975" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CS103.txt b/MAC/Deployment/data/PVSS/License/HWCode_CS103.txt new file mode 100644 index 0000000000000000000000000000000000000000..ab3473728b2f0a81660ded0931cde012db58bb68 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CS103.txt @@ -0,0 +1,3 @@ +[license] +code = "CS103C 02132205423" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CS201.txt b/MAC/Deployment/data/PVSS/License/HWCode_CS201.txt new file mode 100644 index 0000000000000000000000000000000000000000..9cec43e163e830751d900ad3df52a8a15fb112fd --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CS201.txt @@ -0,0 +1,3 @@ +[license] +code = "CS201C 00208803188" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CS301.txt b/MAC/Deployment/data/PVSS/License/HWCode_CS301.txt new file mode 100644 index 0000000000000000000000000000000000000000..5b8e70d93153ad04b91e45ef5201b3350ba1a04b --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CS301.txt @@ -0,0 +1,3 @@ +[license] +code = "CS301C 92439461547" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CS302.txt b/MAC/Deployment/data/PVSS/License/HWCode_CS302.txt new file mode 100644 index 0000000000000000000000000000000000000000..77f3f09b2f6104519565e413ae5e0557e2e74e44 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CS302.txt @@ -0,0 +1,3 @@ +[license] +code = "CS302C 32748977553" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CS401.txt b/MAC/Deployment/data/PVSS/License/HWCode_CS401.txt new file mode 100644 index 0000000000000000000000000000000000000000..6e53797e39a72293b80377a21466556841f1b938 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CS401.txt @@ -0,0 +1,3 @@ +[license] +code = "CS401C 90578161027" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_CS501.txt b/MAC/Deployment/data/PVSS/License/HWCode_CS501.txt new file mode 100644 index 0000000000000000000000000000000000000000..d672966471fe64e618bec6ce37e8b54e5c320822 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_CS501.txt @@ -0,0 +1,3 @@ +[license] +code = "CS501C 52647876265" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_DE601.txt b/MAC/Deployment/data/PVSS/License/HWCode_DE601.txt new file mode 100644 index 0000000000000000000000000000000000000000..4af88cc680144fdba40d55ba2de1bcb21cd89775 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_DE601.txt @@ -0,0 +1,3 @@ +[license] +code = "DE601C 41152944617" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_DE602.txt b/MAC/Deployment/data/PVSS/License/HWCode_DE602.txt new file mode 100644 index 0000000000000000000000000000000000000000..9d244c9bd9bba04bde8de8b6f06e504558c6386c --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_DE602.txt @@ -0,0 +1,3 @@ +[license] +code = "DE602C 94024951745" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_DE603.txt b/MAC/Deployment/data/PVSS/License/HWCode_DE603.txt new file mode 100644 index 0000000000000000000000000000000000000000..5d09e017bbc46bf26f18682b6f533f5373a68512 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_DE603.txt @@ -0,0 +1,3 @@ +[license] +code = "DE603C 32749300152" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_DE604.txt b/MAC/Deployment/data/PVSS/License/HWCode_DE604.txt new file mode 100644 index 0000000000000000000000000000000000000000..557b7bd637b0699407aa31235e72335144efc9e9 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_DE604.txt @@ -0,0 +1,3 @@ +[license] +code = "DE604C 40567813462" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_DE605.txt b/MAC/Deployment/data/PVSS/License/HWCode_DE605.txt new file mode 100644 index 0000000000000000000000000000000000000000..52e46bfdab993f94a5247c641c2fb50ca03fc119 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_DE605.txt @@ -0,0 +1,3 @@ +[license] +code = "DE605C 20494965203" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_DE609.txt b/MAC/Deployment/data/PVSS/License/HWCode_DE609.txt new file mode 100644 index 0000000000000000000000000000000000000000..0fd09d3f3bd490ff88a9905811635c6127f631d4 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_DE609.txt @@ -0,0 +1,3 @@ +[license] +code = "DE609C 10606997699" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_FR606.txt b/MAC/Deployment/data/PVSS/License/HWCode_FR606.txt new file mode 100644 index 0000000000000000000000000000000000000000..d7efcefaec1365ecc984b7ed1e9315ce5c3948cd --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_FR606.txt @@ -0,0 +1,3 @@ +[license] +code = "FR606C 50217647730" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_IE613.txt b/MAC/Deployment/data/PVSS/License/HWCode_IE613.txt new file mode 100644 index 0000000000000000000000000000000000000000..d2a36455f6bf4bce3ea0e7936dfb36e19ebaf21e --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_IE613.txt @@ -0,0 +1,3 @@ +[license] +code = "IE613C 20366292679" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_LV614.txt b/MAC/Deployment/data/PVSS/License/HWCode_LV614.txt new file mode 100644 index 0000000000000000000000000000000000000000..0536ff3b0e546ddad4a107fef8799ee5670d3340 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_LV614.txt @@ -0,0 +1,3 @@ +[license] +code = "LV614C 33179529551" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_MCU001.txt b/MAC/Deployment/data/PVSS/License/HWCode_MCU001.txt new file mode 100644 index 0000000000000000000000000000000000000000..26f3ee2fc77826b470ea1e31db858dabce53e02b --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_MCU001.txt @@ -0,0 +1,3 @@ +[license] +code = "mcu001 51476810367" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_MCU199.txt b/MAC/Deployment/data/PVSS/License/HWCode_MCU199.txt new file mode 100644 index 0000000000000000000000000000000000000000..c80d5ce7500513a90d6da64e4daa8cc64fce6378 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_MCU199.txt @@ -0,0 +1 @@ +mcu199.control.lofar 42457734275 \ No newline at end of file diff --git a/MAC/Deployment/data/PVSS/License/HWCode_PL610.txt b/MAC/Deployment/data/PVSS/License/HWCode_PL610.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69d75952b6ddcb262b9fdf773d75d2abc1460c1 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_PL610.txt @@ -0,0 +1,3 @@ +[license] +code = "PL610C 42422077796" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_PL611.txt b/MAC/Deployment/data/PVSS/License/HWCode_PL611.txt new file mode 100644 index 0000000000000000000000000000000000000000..1813a6c6299b1d175dc0bc55a0fc2dabe9bb64d8 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_PL611.txt @@ -0,0 +1,3 @@ +[license] +code = "PL611C 13047757712" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_PL612.txt b/MAC/Deployment/data/PVSS/License/HWCode_PL612.txt new file mode 100644 index 0000000000000000000000000000000000000000..88f53cf467d0e6c01558ad1709922f7cdba2a377 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_PL612.txt @@ -0,0 +1,3 @@ +[license] +code = "PL612C 04248823065" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_RS106.txt b/MAC/Deployment/data/PVSS/License/HWCode_RS106.txt new file mode 100644 index 0000000000000000000000000000000000000000..285d9e93449b3c2a16a69ae9b97f73bcc0a2dfec --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_RS106.txt @@ -0,0 +1,3 @@ +[license] +code = "RS106C 41388487490" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_RS205.txt b/MAC/Deployment/data/PVSS/License/HWCode_RS205.txt new file mode 100644 index 0000000000000000000000000000000000000000..d2fbf2f487ab4ba746c23072cd45049caa188077 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_RS205.txt @@ -0,0 +1,3 @@ +[license] +code = "RS205C 73301862695" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_RS208.txt b/MAC/Deployment/data/PVSS/License/HWCode_RS208.txt new file mode 100644 index 0000000000000000000000000000000000000000..9ea93136684e2e072aae073fc3869bd1f3055658 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_RS208.txt @@ -0,0 +1,3 @@ +[license] +code = "RS208C 32152374898" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_RS210.txt b/MAC/Deployment/data/PVSS/License/HWCode_RS210.txt new file mode 100644 index 0000000000000000000000000000000000000000..f04f976526340454363a0210abbdbd362d5708d3 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_RS210.txt @@ -0,0 +1,3 @@ +[license] +code = "RS210C 34190877979" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_RS305.txt b/MAC/Deployment/data/PVSS/License/HWCode_RS305.txt new file mode 100644 index 0000000000000000000000000000000000000000..bcc041e8de02e9b8cb15b965db47cf1d21205b0d --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_RS305.txt @@ -0,0 +1,3 @@ +[license] +code = "RS305C 13083419258" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_RS306.txt b/MAC/Deployment/data/PVSS/License/HWCode_RS306.txt new file mode 100644 index 0000000000000000000000000000000000000000..2abde13e5a56473fbbaae451eebacf821304a73d --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_RS306.txt @@ -0,0 +1,3 @@ +[license] +code = "RS306C 33768362806" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_RS307.txt b/MAC/Deployment/data/PVSS/License/HWCode_RS307.txt new file mode 100644 index 0000000000000000000000000000000000000000..6ed109f6c9724d01bf4c82a7681b2a5d88e41dbb --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_RS307.txt @@ -0,0 +1,3 @@ +[license] +code = "RS307C 01042565652" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_RS310.txt b/MAC/Deployment/data/PVSS/License/HWCode_RS310.txt new file mode 100644 index 0000000000000000000000000000000000000000..4c0cdc81f21164c1ef996988f96e22866e19d52f --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_RS310.txt @@ -0,0 +1,3 @@ +[license] +code = "RS310C 23920274281" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_RS406.txt b/MAC/Deployment/data/PVSS/License/HWCode_RS406.txt new file mode 100644 index 0000000000000000000000000000000000000000..982f1eea676652e3ecfb484c6782b6ccb968b266 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_RS406.txt @@ -0,0 +1,3 @@ +[license] +code = "RS406C 51252723418" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_RS407.txt b/MAC/Deployment/data/PVSS/License/HWCode_RS407.txt new file mode 100644 index 0000000000000000000000000000000000000000..b350788ef2d215a8604f9f91a02e06de1ef33722 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_RS407.txt @@ -0,0 +1,3 @@ +[license] +code = "RS407C 00023207235" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_RS409.txt b/MAC/Deployment/data/PVSS/License/HWCode_RS409.txt new file mode 100644 index 0000000000000000000000000000000000000000..d958107de98d4b3e726a4acab12403717936b83c --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_RS409.txt @@ -0,0 +1,3 @@ +[license] +code = "RS409C 34160248527" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_RS503.txt b/MAC/Deployment/data/PVSS/License/HWCode_RS503.txt new file mode 100644 index 0000000000000000000000000000000000000000..e42c2fcbb930a1fd4b8d8528a1343d5022f58d19 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_RS503.txt @@ -0,0 +1,3 @@ +[license] +code = "RS503C 02456868944" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_RS508.txt b/MAC/Deployment/data/PVSS/License/HWCode_RS508.txt new file mode 100644 index 0000000000000000000000000000000000000000..13e512f4975fa1695fe2bdcb93cff23137571dd1 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_RS508.txt @@ -0,0 +1,3 @@ +[license] +code = "RS508C 54198918182" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_RS509.txt b/MAC/Deployment/data/PVSS/License/HWCode_RS509.txt new file mode 100644 index 0000000000000000000000000000000000000000..b2d23ced9f163f1f00fe69dcfadde938e876e29e --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_RS509.txt @@ -0,0 +1,3 @@ +[license] +code = "RS509C 20468846507" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_SE607.txt b/MAC/Deployment/data/PVSS/License/HWCode_SE607.txt new file mode 100644 index 0000000000000000000000000000000000000000..9f976fd66dffb95098b931f7c0961b9eaa77bc35 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_SE607.txt @@ -0,0 +1,3 @@ +[license] +code = "SE607C 12902783523" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/HWCode_UK608.txt b/MAC/Deployment/data/PVSS/License/HWCode_UK608.txt new file mode 100644 index 0000000000000000000000000000000000000000..642a068a841434b10e539384c866201346023bc2 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/HWCode_UK608.txt @@ -0,0 +1,3 @@ +[license] +code = "UK608C 50519361682" +version = 31600002 diff --git a/MAC/Deployment/data/PVSS/License/Readme.txt b/MAC/Deployment/data/PVSS/License/Readme.txt index 0242960de84860f735a9d563e70cbfbe96357afe..37d66de0c8057a4854a405ea63ccdafc3e0de303 100644 --- a/MAC/Deployment/data/PVSS/License/Readme.txt +++ b/MAC/Deployment/data/PVSS/License/Readme.txt @@ -1,3 +1,4 @@ Example command: -WCCILtoolLicense.exe -shield Astron_Central_1_shield.txt wop64-Apertif-3_16_option.txt > shield.wop64.txt \ No newline at end of file +WCCILtoolLicense.exe -shield Astron_Central_1_shield.txt options_MCU199_3.16.txt > shield_MCU199_3.16.txt +WCCILtoolLicense.exe -shield Astron_Station_1_shield.txt options_CS301_3.16.txt > shield_CS301_3.16.txt \ No newline at end of file diff --git a/MAC/Deployment/data/PVSS/License/Stations.rtu b/MAC/Deployment/data/PVSS/License/Stations.rtu new file mode 100644 index 0000000000000000000000000000000000000000..cc89ad5c85b494175b290bcab0eb687a2561c323 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/Stations.rtu @@ -0,0 +1,9 @@ +[WIBU-SYSTEMS Control File] +Guid={00070001-0000-1100-8002-0000C06B5161} +Specification=WIBU-KEY Remote Programming Update File +Version=1.00 + +[Contents] +; 1 command for WIBU-BOX with Serial Number 9-5510497: +N5qqe 010NG NNNG9 5NNNX FXCK2 V21C1 +98F1V hX diff --git a/MAC/Deployment/data/PVSS/License/options_CCU001_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CCU001_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..466fceec35bd9693ed0484f1027d87de06f2c879 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CCU001_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "ccu001 14006546712" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CCU System" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CCU199_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CCU199_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..4d7157199186b339a6e1376ad80c5ba0572f9b49 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CCU199_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "ccu199.control.lofar 61426158946" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 test CCU System" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CS001_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CS001_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..134f5a60b420cf2cffcb2cfe2fbda2623fa2966f --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CS001_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "CS001C 43942334873" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CS001" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CS002_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CS002_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..a307f6d584e5d15d630d3762a2901e48a645fef2 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CS002_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "CS002C 31235135861" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CS002" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CS003_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CS003_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..6a194a52c11ea5cbdf12f04ecf5ae3c54a735f85 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CS003_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "CS003C 14221960083" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CS003" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CS004_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CS004_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..1fa73ec4d23ef57824f05858c3fcbe5e5c6a23c9 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CS004_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "CS004C 21132892246" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CS004" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CS005_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CS005_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..fcd810858b0e68556334fa2962ca144d3c630cac --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CS005_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "CS005C 10492674919" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CS005" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CS006_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CS006_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..1e4df90e78fd04aa4ba5dd66ed79ca9bd2770ee5 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CS006_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "CS006C 42774937483" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CS006" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CS007_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CS007_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..5c2768b84a3f4855c0d49fac4c10d151e1f964ab --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CS007_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "CS007C 31443220661" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CS007" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CS011_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CS011_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..ca9f7e8cf841e0da25b6eeb971fc84b338d8a22e --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CS011_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "CS011C 00064008033" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CS011" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CS013_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CS013_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..2f5377b30be57b3973f78a253cedea85e4228cfc --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CS013_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "CS013C 41872072962" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CS013" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CS017_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CS017_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..ee0a896faa43b337718da4be27be959e1fc353be --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CS017_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "CS017C 13559240397" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CS017" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CS021_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CS021_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..4cc39d7c9293d3efdfb365c5b4d25a67fe1493df --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CS021_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "CS021C 50629572310" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CS021" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CS024_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CS024_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..9406493721c136680eb9a87bfd1f66a9c842dffa --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CS024_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "CS024C 23187337798" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CS024" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CS026_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CS026_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..0731424b8c5838fbcf58bd78833491bdaf6f7377 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CS026_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "CS026C 24188583191" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CS026" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CS028_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CS028_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..d619b52d7915697f8d98c14252d3c2355368c2cc --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CS028_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "CS028C 30453505320" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CS028" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CS030_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CS030_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..7295e29264f428173205cc52582cf5d9952c56f5 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CS030_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "CS030C 11609048933" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CS030" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CS031_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CS031_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..70e6ffa43c3458b79b910c1f572e789ed185563f --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CS031_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "CS031C 13220147749" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CS031" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CS032_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CS032_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..fb3591c9d082761c0889cb3e29cab2ff18d06a5b --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CS032_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "CS032C 00071147248" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CS032" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CS101_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CS101_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..b2281bdb451e729826927057e2dceb647a36a8fc --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CS101_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "CS101C 80881277975" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CS101" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CS103_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CS103_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..519dcb0b4ffdbbaebcc78a3e3fbb6e41247e146e --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CS103_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "CS103C 02132205423" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CS103" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CS201_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CS201_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..77375ebe5f365ccbf8fa8950013f75bc130d0118 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CS201_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "CS201C 00208803188" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CS201" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CS301_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CS301_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..a0ec569dc987cd5a237f27228baa9977ab1ef7b2 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CS301_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "CS301C 92439461547" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CS301" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CS302_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CS302_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..58b9a5eef126b8e67f16506b233dcbd3d22cf1de --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CS302_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "CS302C 32748977553" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CS302 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CS401_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CS401_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..ade009c65cadacf24f86709c99c3372b04c20bd0 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CS401_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "CS401C 90578161027" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CS401" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_CS501_3.16.txt b/MAC/Deployment/data/PVSS/License/options_CS501_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..25122a1b545c3ec0ae5b04cecc6e26a58ba2f096 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_CS501_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "CS501C 52647876265" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 CS501" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_DE601_3.16.txt b/MAC/Deployment/data/PVSS/License/options_DE601_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..74c419e0dd671c85a10cab4dbed55cf380c4afe4 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_DE601_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "DE601C 41152944617" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 DE601" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_DE602_3.16.txt b/MAC/Deployment/data/PVSS/License/options_DE602_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..802f25a80e5301d3571243d7b93c4a9514a49941 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_DE602_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "DE602C 94024951745" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 DE602" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_DE603_3.16.txt b/MAC/Deployment/data/PVSS/License/options_DE603_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..c24358fb1251822548933376039f047f00f2c2aa --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_DE603_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "DE603C 32749300152" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 DE603" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_DE604_3.16.txt b/MAC/Deployment/data/PVSS/License/options_DE604_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..b73b6afb2c2614c51cba40c07b20bca3e56ef43c --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_DE604_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "DE604C 40567813462" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 DE604" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_DE605_3.16.txt b/MAC/Deployment/data/PVSS/License/options_DE605_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..b8aef42249209d6eca9dcef4f917f8462b1ca9f9 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_DE605_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "DE605C 20494965203" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 DE605" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_DE609_3.16.txt b/MAC/Deployment/data/PVSS/License/options_DE609_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..21c192771fcc8fbf0852490b1b02de51c961b141 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_DE609_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "DE609C 10606997699" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 DE609" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_FR606_3.16.txt b/MAC/Deployment/data/PVSS/License/options_FR606_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..895c011f649df355552d18dd8b853c1c6d5988d2 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_FR606_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "FR606C 50217647730" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 FR606" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_IE613_3.16.txt b/MAC/Deployment/data/PVSS/License/options_IE613_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..ff5216253579a2d53defc9acbbd32d2a860e0c29 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_IE613_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "IE613C 20366292679" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 IE613" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_LV614_3.16.txt b/MAC/Deployment/data/PVSS/License/options_LV614_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..ad86c9ab040e31f5c283725d6b6178ba2699ef61 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_LV614_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "LV614C 33179529551" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 LV614" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_MCU001_3.16.txt b/MAC/Deployment/data/PVSS/License/options_MCU001_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..f30afd205397e817b11b3a8fa7ea088f65050881 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_MCU001_3.16.txt @@ -0,0 +1,31 @@ +[license] +code = "mcu001 51476810367" +version = 31600002 +sn = "471_3031_Astron_Central_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 MCUmain System" +redundancy = 1 +ui = 14 +para = 4 +dde = 5 +event = 1 +ios = 100000 +ssi = 0 +api = 80 +excelreport = 5 +http = 19 +infoserver = 5000 +comcenter = 5 +maintenance = 0 +scheduler = 0 +s7 = 1 +distributed = 255 +ultralight = 5 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_MCU199_3.16.txt b/MAC/Deployment/data/PVSS/License/options_MCU199_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..b07aed681f5dd056ff49de12786bb51ae4756c5d --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_MCU199_3.16.txt @@ -0,0 +1,31 @@ +[license] +code = "mcu199.control.lofar 42457734275" +version = 31600002 +sn = "471_3031_Astron_Central_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 test MCUmain System" +redundancy = 1 +ui = 14 +para = 4 +dde = 5 +event = 1 +ios = 100000 +ssi = 0 +api = 80 +excelreport = 5 +http = 19 +infoserver = 5000 +comcenter = 5 +maintenance = 0 +scheduler = 0 +s7 = 1 +distributed = 255 +ultralight = 5 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_PL610_3.16.txt b/MAC/Deployment/data/PVSS/License/options_PL610_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..61967f330939b918a5f012f5e99aef64e1af5936 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_PL610_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "PL610C 42422077796" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 PL610" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_PL611_3.16.txt b/MAC/Deployment/data/PVSS/License/options_PL611_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..986857ad4833dd84f9d0fc92fb291e0354995cd4 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_PL611_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "PL611C 13047757712" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 PL611" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_PL612_3.16.txt b/MAC/Deployment/data/PVSS/License/options_PL612_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..90d6a3d7f4828cf385fdd165da650243bef1af9f --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_PL612_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "PL612C 04248823065" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 PL612" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_RS106_3.16.txt b/MAC/Deployment/data/PVSS/License/options_RS106_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..d8a4d7c256da8a67be8cdf44dcf4a5b027fa1f38 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_RS106_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "RS106C 41388487490" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 RS106" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_RS205_3.16.txt b/MAC/Deployment/data/PVSS/License/options_RS205_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..5e099c7a6044aa433b33adcca5714fdbbc983e06 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_RS205_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "RS205C 73301862695" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 RS205" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_RS208_3.16.txt b/MAC/Deployment/data/PVSS/License/options_RS208_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..61580efa50b746d774c084c6378c890ef53e5df0 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_RS208_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "RS208C 32152374898" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 RS208" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_RS210_3.16.txt b/MAC/Deployment/data/PVSS/License/options_RS210_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..7bd32b023ae69679261fd87e7e4edee55cb2aa02 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_RS210_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "RS210C 34190877979" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 RS210" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_RS305_3.16.txt b/MAC/Deployment/data/PVSS/License/options_RS305_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..25236f01e8336ad682ad861748f6eb5ad4af331b --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_RS305_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "RS305C 13083419258" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 RS305" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_RS306_3.16.txt b/MAC/Deployment/data/PVSS/License/options_RS306_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..7663b88e907ac1e96281f122f8de8ea12be07d70 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_RS306_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "RS306C 33768362806" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 RS306" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_RS307_3.16.txt b/MAC/Deployment/data/PVSS/License/options_RS307_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..a70d75ff731376932c9282b57a60ec42a59d064b --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_RS307_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "RS307C 01042565652" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 RS307" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_RS310_3.16.txt b/MAC/Deployment/data/PVSS/License/options_RS310_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..c242bbd4baef8daa07f11d6f3610f25e42f9bcec --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_RS310_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "RS310C 23920274281" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 RS310" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_RS406_3.16.txt b/MAC/Deployment/data/PVSS/License/options_RS406_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..6f7e2a2c43e1ce65cde06d53e2767e48080beb86 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_RS406_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "RS406C 51252723418" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 RS406" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_RS407_3.16.txt b/MAC/Deployment/data/PVSS/License/options_RS407_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..79416968f0512b40cd0748580b8f1ddbda00fc2a --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_RS407_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "RS407C 00023207235" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 RS407" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_RS409_3.16.txt b/MAC/Deployment/data/PVSS/License/options_RS409_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..e540355dd2d0ba34acc5de060fb6178d65bea8da --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_RS409_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "RS409C 34160248527" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 RS409" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_RS503_3.16.txt b/MAC/Deployment/data/PVSS/License/options_RS503_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..ca6982703ada2c91796bf408ba28f391da821638 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_RS503_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "RS503C 02456868944" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 RS503" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_RS508_3.16.txt b/MAC/Deployment/data/PVSS/License/options_RS508_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..1b8b18b4249faeae3acc375c9eb2014397260980 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_RS508_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "RS508C 54198918182" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 RS508" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_RS509_3.16.txt b/MAC/Deployment/data/PVSS/License/options_RS509_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..36b5c359a3cae4e3dc8e5d2a61075d18f2d109ae --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_RS509_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "RS509C 20468846507" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 RS509" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_SE607_3.16.txt b/MAC/Deployment/data/PVSS/License/options_SE607_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..a3e352e02856465a5aa17be64573c93384ca87a2 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_SE607_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "SE607C 12902783523" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 SE607" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/options_UK608_3.16.txt b/MAC/Deployment/data/PVSS/License/options_UK608_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..8ba4d36ed6d31ca3dc68a0786a92ca177cb691fa --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/options_UK608_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "UK608C 50519361682" +version = 31600002 +sn = "471_3031_Astron_Station_5_2" +expire = 0000.00.00;00:00:00,000 +comment = "Centos7 UK608" +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +ios = 4000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +ssi = 0 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CCU001_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CCU001_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..241e2f9abfe1216c81e6cb42f37cf6a1527e4ef1 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CCU001_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "ccu001 40470136751" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/8" +date = 2020.04.09;12:15:11,000 +comment = "Centos7 CCU System" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CCU199_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CCU199_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..3b1c875d1ed8a92659f863a811bc7a0cab2f5a3d --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CCU199_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "ccu199.control.lofar 80507640383" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/1" +date = 2020.04.08;14:58:31,000 +comment = "Centos7 test CCU System" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CS001_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CS001_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..d7cadcb9bc53419c86a8da53211dc949050b7262 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CS001_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "CS001C 10456784610" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/2" +date = 2020.04.09;09:07:02,000 +comment = "Centos7 CS001" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CS002_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CS002_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..ea04a47ee43683b2ff35d691c7b0baa62fd1b4b3 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CS002_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "CS002C 80447254084" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/3" +date = 2020.04.09;09:15:10,000 +comment = "Centos7 CS002" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CS003_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CS003_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..23b9ed58a804d06948b5cfb7a36935011d6362f3 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CS003_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "CS003C 30475116407" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/4" +date = 2020.04.09;10:16:38,000 +comment = "Centos7 CS003" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CS004_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CS004_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..d1777ff50c8fb493dac248fee14a67ea9fc85a60 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CS004_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "CS004C 00342489231" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/5" +date = 2020.04.09;11:13:31,000 +comment = "Centos7 CS004" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CS005_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CS005_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..1e45bcfaf0e13a70db9008a185f52112484c78c5 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CS005_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "CS005C 50556624003" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/6" +date = 2020.04.09;11:29:37,000 +comment = "Centos7 CS005" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CS006_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CS006_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..13a67e12898dfc0ec83d6c8aaa1b3087c759adee --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CS006_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "CS006C 00106307845" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/7" +date = 2020.04.09;12:11:49,000 +comment = "Centos7 CS006" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CS007_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CS007_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..c5be8733bce352e7e9378933d5b93bbdfd276015 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CS007_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "CS007C 20239096812" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/9" +date = 2020.04.09;13:48:48,000 +comment = "Centos7 CS007" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CS011_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CS011_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..dc4898cb472f5d3ca5e33054878dcb2d580ef76e --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CS011_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "CS011C 40450842975" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/10" +date = 2020.04.09;13:59:13,000 +comment = "Centos7 CS011" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CS013_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CS013_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..36a6b5496a47b605c7512bc9a248d776ceb2e29b --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CS013_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "CS013C 30019775961" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/11" +date = 2020.04.09;14:09:29,000 +comment = "Centos7 CS013" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CS017_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CS017_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..c6e364ee5c14b5a5864d3a0ab4ec589be1c68273 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CS017_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "CS017C 10217011942" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/12" +date = 2020.04.09;14:47:46,000 +comment = "Centos7 CS017" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CS021_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CS021_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..23d932228ff4f1838637d9b53949399c83cff2b1 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CS021_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "CS021C 30140646590" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/13" +date = 2020.04.09;14:56:03,000 +comment = "Centos7 CS021" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CS024_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CS024_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..451b972b2961a7a7a9b1492b4d6a357de90244da --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CS024_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "CS024C 90444348222" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/14" +date = 2020.04.09;15:07:08,000 +comment = "Centos7 CS024" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CS026_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CS026_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..07d2ca5429c68e0ee1d846a1792c25568042d9da --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CS026_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "CS026C 70038930262" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/15" +date = 2020.04.09;15:40:17,000 +comment = "Centos7 CS026" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CS028_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CS028_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..2aab065c21aa2684edca86d3a34049e9ec11ff26 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CS028_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "CS028C 20194119711" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/16" +date = 2020.04.10;08:16:00,000 +comment = "Centos7 CS028" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CS030_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CS030_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..5495f9ec8c21711705910f1eb72e109a41746756 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CS030_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "CS030C 60016247642" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/17" +date = 2020.04.10;08:22:34,000 +comment = "Centos7 CS030" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CS031_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CS031_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..9da8f6da60c785a2f2db574e6966d18f18a380ce --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CS031_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "CS031C 00348849291" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/18" +date = 2020.04.10;08:27:07,000 +comment = "Centos7 CS031" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CS032_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CS032_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..100674a8c497bbf169ad3742aca60e0e9f3df865 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CS032_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "CS032C 50245891237" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/19" +date = 2020.04.10;08:31:47,000 +comment = "Centos7 CS032" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CS101_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CS101_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..f07a468514c669d92f99268dd9c9f6d72481b1f5 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CS101_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "CS101C 50569632637" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/20" +date = 2020.04.10;08:35:44,000 +comment = "Centos7 CS101" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CS103_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CS103_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..c996ad229d4606702b8b8bc4daa7a127e75b2ed7 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CS103_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "CS103C 90538656435" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/21" +date = 2020.04.10;08:39:46,000 +comment = "Centos7 CS103" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CS201_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CS201_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..5cd19641107592567b90c036a843358356d91eeb --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CS201_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "CS201C 10095958003" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/22" +date = 2020.04.10;08:43:42,000 +comment = "Centos7 CS201" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CS301_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CS301_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..a3674d3bba5747b0e11e88e8b10ad1402f0aacbb --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CS301_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "CS301C 30434578596" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/23" +date = 2020.04.10;08:55:41,000 +comment = "Centos7 CS301" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CS302_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CS302_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..ae3a2fe4cff7df4a968069cfe170904b630d9416 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CS302_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "CS302C 20172722652" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/24" +date = 2020.04.10;09:00:01,000 +comment = "Centos7 CS302" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CS401_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CS401_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..737499db06c619723791a77214359b8f97852f7c --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CS401_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "CS401C 40191825945" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/25" +date = 2020.04.10;09:04:35,000 +comment = "Centos7 CS401" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_CS501_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_CS501_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..1f8905efae108ce05eaad812a05bd86bb8c3c188 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_CS501_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "CS501C 20385608525" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/26" +date = 2020.04.10;09:08:48,000 +comment = "Centos7 CS501" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_DE601_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_DE601_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..87a9233a3124f00f5d61509bbd46cc8755f33705 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_DE601_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "DE601C 10033383939" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/41" +date = 2020.04.10;14:16:47,000 +comment = "Centos7 DE601" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_DE602_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_DE602_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..85d6396219f7e20b5da201b3a21d39e69042dcd8 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_DE602_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "DE602C 30072366803" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/42" +date = 2020.04.10;14:20:43,000 +comment = "Centos7 DE602" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_DE603_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_DE603_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..2772a772df58d59d8f79868a615bf7c4ab94e6f2 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_DE603_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "DE603C 10146080536" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/43" +date = 2020.04.10;14:24:25,000 +comment = "Centos7 DE603" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_DE604_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_DE604_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..7c60f2bfbb634cf0f11a2f3211a49c1d5eb2b539 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_DE604_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "DE604C 20000172152" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/44" +date = 2020.04.10;14:28:13,000 +comment = "Centos7 DE604" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_DE605_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_DE605_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..66a6302a2c795743a6fbf37babc221346d21cf83 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_DE605_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "DE605C 30270172639" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/45" +date = 2020.04.10;14:32:47,000 +comment = "Centos7 DE605" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_DE609_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_DE609_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..dc84d65bae2d1dbd9c3e738c5be965ae43cf333a --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_DE609_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "DE609C 90330703195" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/49" +date = 2020.04.10;15:16:43,000 +comment = "Centos7 DE609" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_FR606_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_FR606_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..053f9948016146169ef992a43936dbeaa2dca439 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_FR606_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "FR606C 50523294260" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/46" +date = 2020.04.10;14:37:40,000 +comment = "Centos7 FR606" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_IE613_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_IE613_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..5a5f7c31e71122d79027811f026ff320ba3fef2f --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_IE613_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "IE613C 80092027996" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/54" +date = 2020.04.12;19:29:09,000 +comment = "Centos7 IE613" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_LV614_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_LV614_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..c72098324e8bed655ea64261c39a245cf218b0d7 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_LV614_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "LV614C 20245203426" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/53" +date = 2020.04.10;15:44:54,000 +comment = "Centos7 LV614" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_MCU001_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_MCU001_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..0b40b9d28169e6ecfddb2b4de4a10212c7055e86 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_MCU001_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "mcu001 90351139072" +version = 31600002 +sn = "471_3031_Astron_Central_5_2/4" +date = 2020.04.09;12:05:23,000 +comment = "Centos7 MCUmain System" +expire = 0000.00.00;00:00:00,000 +redundancy = 1 +ui = 14 +para = 4 +dde = 5 +event = 1 +ios = 100000 +ssi = 0 +api = 80 +excelreport = 5 +http = 19 +infoserver = 5000 +comcenter = 5 +maintenance = 0 +scheduler = 0 +s7 = 1 +distributed = 255 +ultralight = 5 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_MCU199_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_MCU199_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..63af75ba28ce5bf8220a010e9df6e8ba5a1b8637 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_MCU199_3.16.txt @@ -0,0 +1,32 @@ +[license] +code = "mcu199.control.lofar 00464183161" +version = 31600002 +sn = "471_3031_Astron_Central_5_2/3" +date = 2020.04.08;13:18:16,000 +comment = "Centos7 test MCUmain System" +expire = 0000.00.00;00:00:00,000 +redundancy = 1 +ui = 14 +para = 4 +dde = 5 +event = 1 +ios = 100000 +ssi = 0 +api = 80 +excelreport = 5 +http = 19 +infoserver = 5000 +comcenter = 5 +maintenance = 0 +scheduler = 0 +s7 = 1 +distributed = 255 +ultralight = 5 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_PL610_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_PL610_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..c70946ae9086f7b13d84c514d0885a3ad558e73b --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_PL610_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "PL610C 60546656721" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/50" +date = 2020.04.10;15:24:03,000 +comment = "Centos7 PL610" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_PL611_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_PL611_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..28ace4a94c6d2673cfca6a231775fc1ed007c7e2 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_PL611_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "PL611C 90272092612" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/51" +date = 2020.04.10;15:29:42,000 +comment = "Centos7 PL611" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_PL612_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_PL612_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..78c76dbc56954add0a19f6bcd969febe3746416f --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_PL612_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "PL612C 50519699790" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/52" +date = 2020.04.10;15:36:37,000 +comment = "Centos7 PL612" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_RS106_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_RS106_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..c983e6141cb46e8f6e926d75c3ea86eda275ead1 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_RS106_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "RS106C 20591522242" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/27" +date = 2020.04.10;10:04:13,000 +comment = "Centos7 RS106" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_RS205_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_RS205_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..e27f7018284751ba7e670e110da0cc5bca44615d --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_RS205_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "RS205C 00273952013" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/28" +date = 2020.04.10;10:08:06,000 +comment = "Centos7 RS205" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_RS208_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_RS208_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..17fe2e0bb57fd309fd1a420cbc9e27a102885687 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_RS208_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "RS208C 30488082003" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/29" +date = 2020.04.10;10:12:07,000 +comment = "Centos7 RS208" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_RS210_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_RS210_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..8a97dec1641e691650d8cd120e832873ac903421 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_RS210_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "RS210C 10414998697" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/30" +date = 2020.04.10;10:16:03,000 +comment = "Centos7 RS210" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_RS305_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_RS305_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..cbd95a93f41f572f12e446e254167d1966e64327 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_RS305_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "RS305C 70237252554" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/31" +date = 2020.04.10;10:19:58,000 +comment = "Centos7 RS305" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_RS306_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_RS306_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..14f3dbf3c73b87de661c0b24f4285e9523e4b9f1 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_RS306_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "RS306C 80535741009" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/32" +date = 2020.04.10;10:30:23,000 +comment = "Centos7 RS306" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_RS307_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_RS307_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..f1174b33b7a68354574ef46b260d506412de1f2e --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_RS307_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "RS307C 40113807615" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/33" +date = 2020.04.10;10:34:59,000 +comment = "Centos7 RS307" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_RS310_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_RS310_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..90f26ba2c9f80fc6662fa280bdf40444cdbdbe16 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_RS310_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "RS310C 10499420922" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/34" +date = 2020.04.10;10:39:00,000 +comment = "Centos7 RS310" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_RS406_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_RS406_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..cafd0b5115c1065a74dacaaeaf5182d490fa2221 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_RS406_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "RS406C 00211690373" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/35" +date = 2020.04.10;13:07:57,000 +comment = "Centos7 RS406" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_RS407_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_RS407_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..bc29a1dd798cd3005c85109d8da4147e4e71916f --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_RS407_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "RS407C 50203225436" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/36" +date = 2020.04.10;13:11:02,000 +comment = "Centos7 RS407" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_RS409_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_RS409_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..ee2414d515f2c58507e0c4306fba8fd3bf658ab1 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_RS409_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "RS409C 00109942962" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/37" +date = 2020.04.10;13:14:24,000 +comment = "Centos7 RS409" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_RS503_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_RS503_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..cb5cbd3773b65ec315e560c12ce473bc43226b68 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_RS503_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "RS503C 20107013190" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/38" +date = 2020.04.10;13:23:37,000 +comment = "Centos7 RS503" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_RS508_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_RS508_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/MAC/Deployment/data/PVSS/License/shield_RS509_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_RS509_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..d2943ac7831eb1391fbdbf919e0daeb035729fd4 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_RS509_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "RS509C 40209098529" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/40" +date = 2020.04.10;14:04:47,000 +comment = "Centos7 RS509" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_SE607_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_SE607_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..6c60a5e72a36cde6ee339a919652841043ec1367 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_SE607_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "SE607C 10547597978" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/47" +date = 2020.04.10;14:42:43,000 +comment = "Centos7 SE607" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/MAC/Deployment/data/PVSS/License/shield_UK608_3.16.txt b/MAC/Deployment/data/PVSS/License/shield_UK608_3.16.txt new file mode 100644 index 0000000000000000000000000000000000000000..bbc09840693f5062a879283315484963a9556653 --- /dev/null +++ b/MAC/Deployment/data/PVSS/License/shield_UK608_3.16.txt @@ -0,0 +1,33 @@ +[license] +code = "UK608C 20316744537" +version = 31600002 +sn = "471_3031_Astron_Station_5_2/48" +date = 2020.04.10;14:59:34,000 +comment = "Centos7 UK608" +expire = 0000.00.00;00:00:00,000 +redundancy = 0 +ui = 1 +para = 1 +dde = 5 +event = 1 +ios = 4000 +ssi = 0 +api = 80 +excelreport = 5 +http = 0 +infoserver = 1000 +comcenter = 5 +maintenance = 1 +scheduler = 1 +s7 = 1 +recipe = 1 +distributed = 255 +ultralight = 1 +mobile_app = 1 +s7plus = 8 +uifix = 0 +parafix = 0 +pararemote = 0 +ctrlext = 1 +update = 0 + diff --git a/SAS/ResourceAssignment/ResourceAssigner/test/t_resourceassigner.py b/SAS/ResourceAssignment/ResourceAssigner/test/t_resourceassigner.py index 337790b19326a5977ca35f001defd1e3c75b2da6..ec65936f4e1dc6276ceac195e6fd88eaf737a54b 100755 --- a/SAS/ResourceAssignment/ResourceAssigner/test/t_resourceassigner.py +++ b/SAS/ResourceAssignment/ResourceAssigner/test/t_resourceassigner.py @@ -903,6 +903,66 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): predecessor_task = self.radb.getTask(otdb_id=predecessor_otdb_id) self.assertEqual([successor_task['id']], predecessor_task['successor_ids'], ) + def test_scheduling_of_trigger_observation_when_running_observation_is_killed(self): + '''SW-907: Trigger observation cannot be scheduled when it needs to abort a running observation. + When a Trigger observation is set to prescheduled, and enters the whole do-schedule logic in the resourceassigner, + and it has to kill a running observation, then the (stupidly implemented conflict-resolution) causes the trigger observation + to be set to approved. + The resource assigner should be able to handle that, or prevent that.''' + + # prepare: insert a blocking task with a huge claim on storage (directly via the radb, not via the resource_assigner) + task_id = self.radb.insertOrUpdateSpecificationAndTask(9876, 9876, 'approved', 'observation', + datetime.datetime.utcnow()-datetime.timedelta(days=1), + datetime.datetime.utcnow()+datetime.timedelta(days=1), + "", "CEP4")['task_id'] + task = self.radb.getTask(task_id) + self.assertEqual('approved', task['status']) + cep_storage_resource = next(r for r in self.radb.getResources(resource_types='storage', include_availability=True) if 'CEP4' in r['name']) + claim_id = self.radb.insertResourceClaim(cep_storage_resource['id'], task_id, task['starttime'], task['endtime'], + 0.75*cep_storage_resource['total_capacity'], "", 0) + self.assertEqual('approved', self.radb.getTask(task_id)['status']) + self.radb.updateTaskAndResourceClaims(task_id, claim_status='claimed', task_status='prescheduled') + self.assertEqual('prescheduled', self.radb.getTask(task_id)['status']) + self.radb.updateTask(task_id, task_status='scheduled') + self.assertEqual('scheduled', self.radb.getTask(task_id)['status']) + + # simulate that the task is running... + self.radb.updateTask(task_id, task_status='queued') + self.radb.updateTask(task_id, task_status='active') + self.assertEqual('active', self.radb.getTask(task_id)['status']) + + # create a second task (caused by a trigger) + task2_id = self.radb.insertOrUpdateSpecificationAndTask(8765, 8765, 'approved', 'observation', + datetime.datetime.utcnow(), + datetime.datetime.utcnow()+datetime.timedelta(hours=1), + "", "CEP4")['task_id'] + task2 = self.radb.getTask(task2_id) + self.assertEqual('approved', self.radb.getTask(task2_id)['status']) + + # mimic that a trigger comes in and sets the observation to prescheduled... + self.radb.updateTaskAndResourceClaims(task2_id, task_status='prescheduled') + self.assertEqual('prescheduled', self.radb.getTask(task2_id)['status']) + + # try to claim some resources (more than available, causing a conflict) + claim2_id = self.radb.insertResourceClaim(cep_storage_resource['id'], task2_id, task2['starttime'], task2['endtime'], + 0.75*cep_storage_resource['total_capacity'], "", 0) + + # this 2nd (trigger) task should not be schedulable (because the running task is in the way) + self.assertEqual('conflict', self.radb.getResourceClaims(claim2_id)[0]['status']) + self.assertEqual('conflict', self.radb.getTask(task2_id)['status']) + + # now mimic the PriorityScheduler's behaviour, and kill task1, ending it now. + self.radb.updateTaskAndResourceClaims(task_id, task_status='aborted', endtime=task2['starttime']) + + # as a result task2 should now be schedulable with tentative claim, and in prescheduler state. + self.assertEqual('tentative', self.radb.getResourceClaims(claim2_id)[0]['status']) + + # THE ROOT-CAUSE OF BUG SW-907 is that task2 used to get the approved state in via an RADB trigger function. + # That has unforeseen sideeffects in the resourceassigner. + # SO, let's test here if the status of task2 is now the expected 'prescheduled' as it was before it went to conflict. + self.assertEqual('prescheduled', self.radb.getTask(task2_id)['status']) + + if __name__ == '__main__': diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/add_functions_and_triggers.sql b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/add_functions_and_triggers.sql index a24822796b807ce3f5963758bd38dd0034a77566..0cefd81819af6562b35efb468da71e8ffc2bb361 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/add_functions_and_triggers.sql +++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/add_functions_and_triggers.sql @@ -11,6 +11,7 @@ $BODY$ DECLARE claim_tentative_status_id int := 0; --beware: hard coded instead of lookup for performance claim_claimed_status_id int := 1; --beware: hard coded instead of lookup for performance + claim_conflict_status_id int := 2; --beware: hard coded instead of lookup for performance task_approved_status_id int := 300; --beware: hard coded instead of lookup for performance task_conflict_status_id int := 335; --beware: hard coded instead of lookup for performance task_prescheduled_status_id int := 350; --beware: hard coded instead of lookup for performance @@ -19,13 +20,20 @@ DECLARE task_aborted_status_id int := 1100; --beware: hard coded instead of lookup for performance BEGIN IF NEW.status_id <> OLD.status_id THEN + IF OLD.status_id = task_conflict_status_id AND NEW.status_id <> task_conflict_status_id THEN + -- bookkeeping, cleanup task_status_before_conlict table for this task + DELETE FROM resource_allocation.task_status_before_conlict WHERE task_id = NEW.id; + END IF; + IF NEW.status_id = task_scheduled_status_id AND OLD.status_id <> task_prescheduled_status_id THEN -- tasks can only be scheduled from the prescheduled state RAISE EXCEPTION 'Cannot update task status from % to %', OLD.status_id, NEW.status_id; END IF; - IF OLD.status_id = task_conflict_status_id AND NEW.status_id <> task_approved_status_id THEN - RAISE EXCEPTION 'When a task has the conflict status it can has to be set to approved status first by making sure all its claims have no conflict status anymore.'; + IF OLD.status_id = task_conflict_status_id AND + NEW.status_id <> task_approved_status_id AND + EXISTS (SELECT id FROM resource_allocation.resource_claim rc WHERE rc.task_id = NEW.id AND rc.status_id = claim_conflict_status_id) THEN + RAISE EXCEPTION 'When a task has the conflict status and if has claims in conflict, it has to be set to approved status first by making sure all its claims have no conflict status anymore.'; END IF; IF NEW.status_id = task_approved_status_id OR NEW.status_id = task_conflict_status_id THEN @@ -54,7 +62,7 @@ $BODY$ ALTER FUNCTION resource_allocation.on_task_updated() OWNER TO resourceassignment; COMMENT ON FUNCTION resource_allocation.on_task_updated() - IS 'function which is called by task table update trigger, which updates all the tasks resource claims to tentative state.'; + IS 'function which is called by task table update trigger.'; DROP TRIGGER IF EXISTS T_on_task_updated ON resource_allocation.task CASCADE; CREATE TRIGGER T_on_task_updated @@ -62,8 +70,38 @@ CREATE TRIGGER T_on_task_updated ON resource_allocation.task FOR EACH ROW EXECUTE PROCEDURE resource_allocation.on_task_updated(); -COMMENT ON TRIGGER T_on_task_updated ON resource_allocation.task - IS 'task table update trigger, calls the resource_allocation.on_task_updated() function.'; + +--------------------------------------------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION resource_allocation.on_before_task_status_updated() + RETURNS trigger AS +$BODY$ +DECLARE + task_approved_status_id int := 300; --beware: hard coded instead of lookup for performance + task_conflict_status_id int := 335; --beware: hard coded instead of lookup for performance + task_prescheduled_status_id int := 350; --beware: hard coded instead of lookup for performance +BEGIN + IF NEW.status_id = task_conflict_status_id AND + (OLD.status_id = task_approved_status_id OR OLD.status_id = task_prescheduled_status_id) THEN + -- bookkeeping, log previous status_ud in task_status_before_conlict table for this task + INSERT INTO resource_allocation.task_status_before_conlict (task_id, status_id) VALUES (OLD.id, OLD.status_id); + END IF; +RETURN NEW; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION resource_allocation.on_before_task_status_updated() + OWNER TO resourceassignment; +COMMENT ON FUNCTION resource_allocation.on_before_task_status_updated() + IS 'function which is called by T_on_before_task_status_updated trigger.'; + +DROP TRIGGER IF EXISTS T_on_before_task_status_updated ON resource_allocation.task CASCADE; +CREATE TRIGGER T_on_before_task_status_updated + BEFORE UPDATE OF status_id + ON resource_allocation.task + FOR EACH ROW + EXECUTE PROCEDURE resource_allocation.on_before_task_status_updated(); --------------------------------------------------------------------------------------------------------------------- @@ -828,7 +866,8 @@ BEGIN WHERE id = NEW.task_id AND status_id = task_approved_status_id) THEN -- update tasks which were in conflict, but which are not anymore due this claim-update to the approved status - UPDATE resource_allocation.task SET status_id=task_approved_status_id + UPDATE resource_allocation.task + SET status_id=COALESCE((SELECT status_id from resource_allocation.task_status_before_conlict WHERE task_id=NEW.task_id), task_approved_status_id) WHERE id=NEW.task_id AND status_id = task_conflict_status_id; END IF; END IF; diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/create_database.sql b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/create_database.sql index a82dfe3779bc0bd495cdef58acc7d4601daeb4ca..489b3f6a4d48126a96be4986a18161f92c1f56a5 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/create_database.sql +++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/create_database.sql @@ -197,6 +197,15 @@ CREATE INDEX task_status_id_idx CREATE INDEX task_type_id_idx ON resource_allocation.task (type_id); + +CREATE TABLE resource_allocation.task_status_before_conlict ( + task_id integer NOT NULL REFERENCES resource_allocation.task(id) ON DELETE CASCADE DEFERRABLE INITIALLY IMMEDIATE, + status_id integer NOT NULL REFERENCES resource_allocation.task_status ON DELETE CASCADE DEFERRABLE INITIALLY IMMEDIATE, + PRIMARY KEY (task_id) +) WITH (OIDS=FALSE); +ALTER TABLE resource_allocation.task_status_before_conlict + OWNER TO resourceassignment; + CREATE TABLE resource_allocation.task_predecessor ( id serial NOT NULL, task_id integer NOT NULL REFERENCES resource_allocation.task(id) ON DELETE CASCADE DEFERRABLE INITIALLY IMMEDIATE, diff --git a/SAS/TMSS/bin/tmss_manage_django b/SAS/TMSS/bin/tmss_manage_django index 08d8fce5623e9ffab5c7485504b31df6c03cc61c..df98fec8dc00f7ee87e04cfdccceedad0ecc1e93 100755 --- a/SAS/TMSS/bin/tmss_manage_django +++ b/SAS/TMSS/bin/tmss_manage_django @@ -21,4 +21,6 @@ from lofar.sas.tmss.manage import main if __name__ == "__main__": - main() + # override default settings to manage the build / installed version instead of the source + # Note: run manage.py in the source tree to manage the source (which is typically what you want) + main('lofar.sas.tmss.tmss.settings') diff --git a/SAS/TMSS/docker-compose-scu199.yml b/SAS/TMSS/docker-compose-scu199.yml index f6ed576c99a0da4d4ae87346861832e0619ecfa6..f2aa2b2c357fe81ae4724db31d79b31ed8702762 100644 --- a/SAS/TMSS/docker-compose-scu199.yml +++ b/SAS/TMSS/docker-compose-scu199.yml @@ -6,7 +6,7 @@ services: restart: on-failure env_file: - ./.env - command: bash -c 'source /opt/lofar/lofarinit.sh && python3 lib64/python3.6/site-packages/lofar/sas/tmss/manage.py runserver 0.0.0.0:8008' + command: bash -c 'source /opt/lofar/lofarinit.sh && ALLOWED_HOSTS=* tmss_test_environment -H 0.0.0.0 -p 8008' ports: - "8008:8008" testprovider: diff --git a/SAS/TMSS/src/Dockerfile-tmss b/SAS/TMSS/src/Dockerfile-tmss index 8af44538aa0aaa703b74711bf979caaeef442e4e..f5e6d92b5543b2bd178ff01e4e5fa4b2659a16d3 100644 --- a/SAS/TMSS/src/Dockerfile-tmss +++ b/SAS/TMSS/src/Dockerfile-tmss @@ -1,14 +1,15 @@ -# Use an official Python runtime as a parent image -FROM python:3.6 - -RUN apt-get -y update && apt-get -y upgrade - -# LOFAR checkout and compile dependencies -RUN apt-get -y update && apt-get -y install make cmake g++ subversion python3 git - -# LOFAR build dependencies -RUN apt-get -y update && apt-get -y install liblog4cplus-dev python3-dev libldap2-dev libsasl2-dev -RUN apt-get -y update && apt-get -y install python3-pip && pip3 install django djangorestframework django-filter django-auth-ldap coreapi python-ldap-test django-jsonforms django-json-widget "git+git://github.com/nnseva/django-jsoneditor.git" psycopg2-binary markdown ldap3 drf-yasg flex swagger-spec-validator mozilla_django_oidc +# +# This builds an image that can be used to run TMSS with all the needed lofar programs. +# It is based on the ci_mac image that is used for build for now. Later we can build a +# smaller image for it. +# +# This image assumes TMSS is build and make install was done. +# +# docker build [-t image_name:tag] -f docker/Dockerfile-tmss . +# +FROM ci_sas:latest + +RUN mkdir -p /opt/lofar # Adding backend directory to make absolute filepaths consistent across services WORKDIR /opt/lofar @@ -16,15 +17,11 @@ WORKDIR /opt/lofar ENV LOFARROOT=/opt/lofar # Add the rest of the code -COPY ./installed /opt/lofar +COPY --chown=lofarsys:lofarsys ./installed /opt/lofar +RUN sed -i "s/lfr_root=.*/lfr_root=\/opt\/lofar/g" /opt/lofar/lofarinit.sh -# Make port 8000 available for the app +# Make port 8000,8008 available for the app EXPOSE 8000 +EXPOSE 8008 -RUN sed -i "s/lfr_root=.*/lfr_root=\/opt\/lofar/g" /opt/lofar/lofarinit.sh -# Be sure to use 0.0.0.0 for the host within the Docker container, -# otherwise the browser won't be able to find it -# CMD python3 manage.py runserver 0.0.0.0:8000 -# CMD bash -c 'source /opt/lofar/lofarinit.sh && tmss' -CMD bash -c 'source /opt/lofar/lofarinit.sh && python3 lib/python3.6/site-packages/lofar/sas/tmss/manage.py runserver 0.0.0.0:8000' \ No newline at end of file diff --git a/SAS/TMSS/src/manage.py b/SAS/TMSS/src/manage.py index d828e31836196fec9ecad451f58233b07319c055..27949848438a55b9daea9fe27d3749bdf80ee061 100755 --- a/SAS/TMSS/src/manage.py +++ b/SAS/TMSS/src/manage.py @@ -2,6 +2,8 @@ import os import sys import signal +import importlib +import argparse def subscribe_to_signals(): # raise SignalException when a signal is caught so django will exit gracefully @@ -15,13 +17,32 @@ def subscribe_to_signals(): signal.signal(s, signal_handler) -def main(): +def main(settings_module="tmss.settings"): + # we typically use manage.py to manage the source (not the product) so this should typically point to the settings + # module relative to this script in the src tree. But we allow overriding it for tmss_manage_django. + + parser = argparse.ArgumentParser() + parser.add_argument("-C", action="store", dest="dbcredentials", + help="use database specified in this credentials file") + parser.add_argument("-L", action="store", dest="ldapcredentials", + help="use LDAP service specified in this credentials file") + args, unknownargs = parser.parse_known_args() + if args.dbcredentials: + os.environ["TMSS_DBCREDENTIALS"] = args.dbcredentials + if args.ldapcredentials: + os.environ["TMSS_LDAPCREDENTIALS"] = args.ldapcredentials + # do subscribe to more signals than django does for proper exits during testing if os.environ.get('TMSS_RAISE_ON_SIGNALS', "False").lower() in ["true", "1", "on"]: subscribe_to_signals() # normal django startup. Specify the DJANGO_SETTINGS_MODULE, and run it. - os.environ.setdefault("DJANGO_SETTINGS_MODULE", "lofar.sas.tmss.tmss.settings") + + os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module) + spec = importlib.util.find_spec(os.environ['DJANGO_SETTINGS_MODULE']) + settings_path = spec.origin + + print("Using settings module %s" % settings_path) try: from django.core.management import execute_from_command_line @@ -31,7 +52,9 @@ def main(): "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc - execute_from_command_line(sys.argv) + + execute_from_command_line([sys.argv[0]] + unknownargs) if __name__ == "__main__": - main() \ No newline at end of file + main() + diff --git a/SAS/TMSS/src/remakemigrations.py b/SAS/TMSS/src/remakemigrations.py index 9c7bd57ed55088d90c08f8277525114811a3938e..10c7ac1b29588ed394da76a70ec06e50d925b3bb 100755 --- a/SAS/TMSS/src/remakemigrations.py +++ b/SAS/TMSS/src/remakemigrations.py @@ -1,15 +1,68 @@ #!/usr/bin/env python3 + +# Copyright (C) 2018 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id: $ + +# This script automates the procedure to replace the existing migrations on the source tree with initital migrations +# based on the current datamodel. Django offers a call 'makemigrations' through manage.py, which creates new migrations +# after the datamodel implementation has changed. These additional migrations apply those changes to an existing +# database reflecting the previous datamodel. +# This is a very nice feature for production, but there are a few downsides that this script tackles: +# +# 1. During development, where the datamodel constantly changes, whe typically don't want a ton of iterative migrations, +# but just have a clean start with a fresh initial database state without the whole provenance is perfectly fine. (We +# start up a fresh database anyway for every test or test deployment.) This can be achieved by removing all existing +# migrations prior to creating new ones. +# A difficulty with this approach is that we do have a manual migration to populate the database with fixtures. +# This migration needs to be restored or re-created after Django created fresh migrations for the database itself. +# +# 2. Since in settings.py we refer to the tmss app in the lofar environment, Django uses the build or installed version. +# A consequence is that the created migrations are placed in there and need to be copied to the source tree. +# +# This script requires a running postgres database instance to work against. +# To use specific database credentials, run e.g. ./remakemigrations.py -C b5f881c4-d41a-4f24-b9f5-23cd6a7f37d0 + + import os -import sys from glob import glob import subprocess as sp import logging - +import argparse +from shutil import copy +import lofar.sas.tmss logger = logging.getLogger(__file__) -here = os.path.dirname(__file__) -relapath = '/tmss/tmssapp/migrations/' + +# set up paths +tmss_source_directory = os.path.dirname(__file__) +if tmss_source_directory == '': + tmss_source_directory = '.' +tmss_env_directory = os.path.dirname(lofar.sas.tmss.__file__) +relative_migrations_directory = '/tmss/tmssapp/migrations/' + +# template for manual changes and fixture (applied last): template = """ +# +# auto-generated by remakemigrations.py +# +# ! Please make sure to apply any changes to the template in that script ! +# from django.db import migrations from lofar.sas.tmss.tmss.tmssapp.populate import * @@ -43,15 +96,17 @@ def delete_old_migrations(): logger.info('Removing old migrations...') files = glob_migrations() - for f in [path for path in files if ("auto" in path or "populate" in path)]: + for f in [path for path in files if ("initial" in path or "auto" in path or "populate" in path)]: logger.info('Deleting: %s' % f) os.remove(f) -def make_django_migrations(): +def make_django_migrations(dbcredentials=None): logger.info('Making Django migrations...') - execute_and_log('/usr/bin/env python3 %s/manage.py makemigrations' % here) + if dbcredentials: + os.environ['TMSS_DBCREDENTIALS'] = dbcredentials + execute_and_log('/usr/bin/env python3 %s/manage.py makemigrations' % tmss_source_directory) def make_populate_migration(): @@ -60,17 +115,27 @@ def make_populate_migration(): last_migration = determine_last_migration() migration = template % last_migration - path = here + relapath + '%s_populate.py' % str(int(last_migration.split('_')[0])+1).zfill(4) + path = tmss_env_directory + relative_migrations_directory + '%s_populate.py' % str(int(last_migration.split('_')[0])+1).zfill(4) logger.info('Writing to: %s' % path) with open(path,'w') as f: f.write(migration) -def glob_migrations(): - paths = glob(here + '/' + relapath + '0*_*') +def glob_migrations(directories=(tmss_source_directory, tmss_env_directory)): + paths = [] + for directory in directories: + paths += glob(directory + '/' + relative_migrations_directory + '0*_*') return paths +def copy_migrations_to_source(): + logger.info('Copying over migrations to source directory...') + files = glob_migrations(directories=[tmss_env_directory]) + for file in files: + logger.info('Copying %s to %s' % (file, tmss_source_directory + '/' + relative_migrations_directory)) + copy(file, tmss_source_directory + '/' + relative_migrations_directory) + + def determine_last_migration(): logger.info('Determining last migration...') files = glob_migrations() @@ -81,17 +146,11 @@ def determine_last_migration(): return last_migration -def put_migrations_under_version_control(): - logger.info('Putting migrations under version control...') - files = glob_migrations() - for f in files: - execute_and_log('git add %s' % f) - - -def remake_migrations(): +def remake_migrations(dbcredentials=None): delete_old_migrations() - make_django_migrations() + make_django_migrations(dbcredentials) make_populate_migration() + copy_migrations_to_source() if __name__ == "__main__": @@ -102,4 +161,7 @@ if __name__ == "__main__": handler.setLevel(logging.INFO) logger.addHandler(handler) - remake_migrations() + parser = argparse.ArgumentParser() + parser.add_argument("-C", action="store", dest="dbcredentials", help="use database specified in these dbcredentials") + args = parser.parse_args() + remake_migrations(args.dbcredentials) diff --git a/SAS/TMSS/src/tmss/exceptions.py b/SAS/TMSS/src/tmss/exceptions.py index 7d100f0daa3413dd501f51e2c66c46032022e81c..0ea1ea394479bb5b63e10ce3a689f739c135ab7c 100644 --- a/SAS/TMSS/src/tmss/exceptions.py +++ b/SAS/TMSS/src/tmss/exceptions.py @@ -2,7 +2,7 @@ class TMSSException(Exception): pass -class SpecificationException(TMSSException): +class SchemaValidationException(TMSSException): pass class ConversionException(TMSSException): diff --git a/SAS/TMSS/src/tmss/settings.py b/SAS/TMSS/src/tmss/settings.py index 617fd939b6451fa38870b7474f49a2a052e04bbc..63a0c0b291c9b3012bca9b43947b8c5ec796a482 100644 --- a/SAS/TMSS/src/tmss/settings.py +++ b/SAS/TMSS/src/tmss/settings.py @@ -64,6 +64,11 @@ LOGGING = { 'handlers': ['console'], 'level': 'INFO', }, + 'django.request': { + 'handlers': ['console'], + 'level': 'DEBUG', # change debug level as appropiate + 'propagate': False, + }, } } @@ -103,7 +108,7 @@ MIDDLEWARE = [ 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware' ] ROOT_URLCONF = 'lofar.sas.tmss.tmss.urls' @@ -301,4 +306,4 @@ STATICFILES_DIRS = ( # Setup support for proxy headers USE_X_FORWARDED_HOST = True -SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') \ No newline at end of file +SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') diff --git a/SAS/TMSS/src/tmss/tmssapp/CMakeLists.txt b/SAS/TMSS/src/tmss/tmssapp/CMakeLists.txt index aee876a483d8ce2b972e2e008fe883c6269d0a7d..2d831505833f3c1d4380e83f51abe22ec62f10fa 100644 --- a/SAS/TMSS/src/tmss/tmssapp/CMakeLists.txt +++ b/SAS/TMSS/src/tmss/tmssapp/CMakeLists.txt @@ -7,6 +7,7 @@ set(_py_files apps.py views.py populate.py + validation.py ) python_install(${_py_files} diff --git a/SAS/TMSS/src/tmss/tmssapp/adapters/parset.py b/SAS/TMSS/src/tmss/tmssapp/adapters/parset.py index e906cb22c94b2199922af61516f30603d55e69fd..d41f989284fec83a237b2935cba7355146168415 100644 --- a/SAS/TMSS/src/tmss/tmssapp/adapters/parset.py +++ b/SAS/TMSS/src/tmss/tmssapp/adapters/parset.py @@ -29,17 +29,22 @@ def _convert_to_parset_for_observationcontrol_schema(subtask: models.Subtask) -> parset = dict() # parameterset has no proper assignment operators, so take detour via dict... parset["Observation.ObsID"] = subtask.pk - parset["Observation.momID"] = -1 # Needed by MACScheduler - parset["Observation.otdbID"] = -1 # Needed by MACScheduler; should/can this be the same as subtask.pk? + parset["Observation.momID"] = 0 # Needed by MACScheduler + parset["Observation.otdbID"] = 0 # Needed by MACScheduler; should/can this be the same as subtask.pk? parset["Observation.processType"] = subtask.specifications_template.type.value.capitalize() - parset["Observation.processSubtype"] = subtask.specifications_template.type.value.capitalize() # TODO: where to derive the processSubtype from? + parset["Observation.processSubtype"] = "Beam Observation" # TODO: where to derive the processSubtype from? + parset["Observation.Campaign.name"] = "TMSS_test" #toDo: replace by project name parset["Observation.startTime"] = formatDatetime(subtask.start_time) parset["Observation.stopTime"] = formatDatetime(subtask.stop_time) - parset["Observation.VirtualInstrument.stationList"] = spec["stations"]["station_list"] + parset["Observation.VirtualInstrument.minimalNrStations"] = 1 # maybe not mandatory? + parset["Observation.VirtualInstrument.stationSet"] = "Custom" # maybe not mandatory? + parset["Observation.VirtualInstrument.stationList"] = "[%s]" % ','.join(s for s in spec["stations"]["station_list"]) + parset["Observation.antennaArray"] = "HBA" if "HBA" in spec["stations"]["antenna_set"] else "LBA" # maybe not mandatory? parset["Observation.antennaSet"] = spec["stations"]["antenna_set"] parset["Observation.bandFilter"] = spec["stations"]["filter"] parset["Observation.sampleClock"] = 200 # why is this not part of the schema? for example as a required setting with a single allowed value. parset["Observation.nrBitsPerSample"] = 8 # why is this not part of the schema? for example as a required setting with a single allowed value. + parset["Observation.strategy"] = "default" # maybe not mandatory? digi_beams = spec['stations']['digital_pointings'] parset["Observation.nrBeams"] = len(digi_beams) @@ -68,27 +73,53 @@ def _convert_to_parset_for_observationcontrol_schema(subtask: models.Subtask) -> parset[beam_prefix+"angle1"] = analog_beam['angle1'] parset[beam_prefix+"angle2"] = analog_beam['angle2'] - parset["Cobalt.realTime"] = True - parset["Cobalt.blockSize"] = spec['COBALT']['blocksize'] - parset["Cobalt.correctBandPass"] = spec['COBALT']['bandpass_correction'] - parset["Cobalt.delayCompensation"] = spec['COBALT']['delay_compensation'] + for prefix in ["", "Observation.ObservationControl.OnlineControl."]: + parset[prefix+"Cobalt.realTime"] = True + parset[prefix+"Cobalt.blockSize"] = spec['COBALT']['blocksize'] + parset[prefix+"Cobalt.correctBandPass"] = spec['COBALT']['bandpass_correction'] + parset[prefix+"Cobalt.delayCompensation"] = spec['COBALT']['delay_compensation'] - parset["Cobalt.Correlator.nrChannelsPerSubband"] = spec['COBALT']['correlator']['channels_per_subband'] - parset["Cobalt.Correlator.nrBlocksPerIntegration"] = spec['COBALT']['correlator']['blocks_per_integration'] - parset["Cobalt.Correlator.nrIntegrationsPerBlock"] = spec['COBALT']['correlator']['integrations_per_block'] + parset[prefix+"Cobalt.Correlator.nrChannelsPerSubband"] = spec['COBALT']['correlator']['channels_per_subband'] + parset[prefix+"Cobalt.Correlator.nrBlocksPerIntegration"] = spec['COBALT']['correlator']['blocks_per_integration'] + parset[prefix+"Cobalt.Correlator.nrIntegrationsPerBlock"] = spec['COBALT']['correlator']['integrations_per_block'] + parset["Observation.Cluster.ProcessingCluster.clusterName"] = subtask.cluster.name + parset["Observation.DataProducts.Output_Correlated.enabled"] = True - parset["Observation.Cluster.ProcessingCluster.clusterName"] = subtask.cluster or "CEP4" - + parset["Observation.DataProducts.Output_Correlated.storageClusterName"] = subtask.cluster.name + parset["Observation.DataProducts.Output_Correlated.storageClusterPartition"] = "/data/test-projects" parset["Observation.DataProducts.Output_Correlated.filenames"] = [] parset["Observation.DataProducts.Output_Correlated.locations"] = [] # TODO: do not use SubtaskOutput.objects.filter but make subtask.subtask_outputs work subtask_outputs = list(models.SubtaskOutput.objects.filter(subtask_id=subtask.id)) for subtask_output in subtask_outputs: dataproducts = list(models.Dataproduct.objects.filter(producer_id=subtask_output.id)) - for dataproduct in dataproducts: - parset["Observation.DataProducts.Output_Correlated.filenames"].append(dataproduct.filename) - parset["Observation.DataProducts.Output_Correlated.locations"].append(dataproduct.directory) + parset["Observation.DataProducts.Output_Correlated.filenames"] = "[%s]" % ','.join(dp.filename for dp in dataproducts) + parset["Observation.DataProducts.Output_Correlated.locations"] = "[%s]" % ','.join(dp.directory for dp in dataproducts) + + # various additional 'Control' settings which seem to be needed for MAC + parset["prefix"] = "LOFAR." + parset["Observation.claimPeriod"] = 35 + parset["Observation.preparePeriod"] = 20 + for prefix in ["", "Observation."]: + parset[prefix+"ObservationControl.OnlineControl.CorrAppl.CorrProc._executable"] = "CN_Processing" + parset[prefix+"ObservationControl.OnlineControl.CorrAppl.CorrProc._hostname"] = "cbmmaster" + parset[prefix+"ObservationControl.OnlineControl.CorrAppl.CorrProc._nodes"] = [] + parset[prefix+"ObservationControl.OnlineControl.CorrAppl.CorrProc._startstopType"] = "bgl" + parset[prefix+"ObservationControl.OnlineControl.CorrAppl.CorrProc.workingdir"] = "/opt/lofar/bin/" + parset[prefix+"ObservationControl.OnlineControl.CorrAppl._hostname"] = "cbmmaster" + parset[prefix+"ObservationControl.OnlineControl.CorrAppl.extraInfo"] = '["PIC","Cobalt"]' + parset[prefix+"ObservationControl.OnlineControl.CorrAppl.procesOrder"] = [] + parset[prefix+"ObservationControl.OnlineControl.CorrAppl.processes"] = '["CorrProc"]' + parset[prefix+"ObservationControl.OnlineControl._hostname"] = 'CCU001' + parset[prefix+"ObservationControl.OnlineControl.applOrder"] = '["CorrAppl"]' + parset[prefix+"ObservationControl.OnlineControl.applications"] = '["CorrAppl"]' + parset[prefix+"ObservationControl.OnlineControl.inspectionHost"] = 'head01.cep4.control.lofar' + parset[prefix+"ObservationControl.OnlineControl.inspectionProgram"] = 'inspection-plots-observation.sh' + parset[prefix+"ObservationControl.StationControl._hostname"] = parset["Observation.VirtualInstrument.stationList"] + parset[prefix+"ObservationControl.StationControl.aartfaacPiggybackAllowed"] = False + parset[prefix+"ObservationControl.StationControl.tbbPiggybackAllowed"] = False + # convert dict to real parameterset, and return it parset = parameterset(parset) diff --git a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py index 054fee4f510ebd4a3dbe170d2f9dcb5b42fba946..eacc7a839abe6e06dcff9f37faeab3285a44ede8 100644 --- a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py +++ b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 3.0.3 on 2020-03-24 18:30 +# Generated by Django 2.2.10 on 2020-04-17 08:37 from django.conf import settings import django.contrib.postgres.fields @@ -177,6 +177,19 @@ class Migration(migrations.Migration): 'abstract': False, }, ), + migrations.CreateModel( + name='ResourceUnit', + fields=[ + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128, primary_key=True, serialize=False)), + ], + options={ + 'abstract': False, + }, + ), migrations.CreateModel( name='Role', fields=[ @@ -527,7 +540,7 @@ class Migration(migrations.Migration): ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('user_identifier', models.CharField(editable=False, help_text='The ID of the user who changed the state of the subtask.', null=True, max_length=128)), + ('user_identifier', models.CharField(editable=False, help_text='The ID of the user who changed the state of the subtask.', max_length=128, null=True)), ('new_state', models.ForeignKey(editable=False, help_text='Subtask state after update (see Subtask State Machine).', on_delete=django.db.models.deletion.PROTECT, related_name='is_new_state_of', to='tmssapp.SubtaskState')), ('old_state', models.ForeignKey(editable=False, help_text='Subtask state before update (see Subtask State Machine).', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='is_old_state_of', to='tmssapp.SubtaskState')), ('subtask', models.ForeignKey(editable=False, help_text='Subtask to which this state change refers.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.Subtask')), @@ -635,6 +648,29 @@ class Migration(migrations.Migration): name='project', field=models.ForeignKey(help_text='Project to which this scheduling set belongs.', on_delete=django.db.models.deletion.PROTECT, related_name='scheduling_sets', to='tmssapp.Project'), ), + migrations.CreateModel( + name='ResourceType', + fields=[ + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128, primary_key=True, serialize=False)), + ('resource_unit', models.ForeignKey(help_text='Unit of current resource.', on_delete=django.db.models.deletion.PROTECT, related_name='resource_types', to='tmssapp.ResourceUnit')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='ProjectQuota', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('value', models.FloatField(help_text='Resource Quota value')), + ('project', models.ForeignKey(help_text='Project to wich this quota belongs.', on_delete=django.db.models.deletion.PROTECT, related_name='project_quota', to='tmssapp.Project')), + ('resource_type', models.ForeignKey(help_text='Resource type.', on_delete=django.db.models.deletion.PROTECT, related_name='resource_type', to='tmssapp.ResourceType')), + ], + ), migrations.CreateModel( name='Filesystem', fields=[ diff --git a/SAS/TMSS/src/tmss/tmssapp/migrations/0002_populate.py b/SAS/TMSS/src/tmss/tmssapp/migrations/0002_populate.py index 72f8e4d289d59a71e05f6c98682fcd430e197dc4..7a25dab177badfe0cab6eeaecee51d3d1fdfee19 100644 --- a/SAS/TMSS/src/tmss/tmssapp/migrations/0002_populate.py +++ b/SAS/TMSS/src/tmss/tmssapp/migrations/0002_populate.py @@ -1,4 +1,9 @@ +# +# auto-generated by remakemigrations.py +# +# ! Please make sure to apply any changes to the template in that script ! +# from django.db import migrations from lofar.sas.tmss.tmss.tmssapp.populate import * @@ -12,6 +17,6 @@ class Migration(migrations.Migration): # Start SubTask id with 2 000 000 to avoid overlap with 'old' (test/production) OTDB operations = [ migrations.RunSQL('ALTER SEQUENCE tmssapp_SubTask_id_seq RESTART WITH 2000000;'), migrations.RunPython(populate_choices), + migrations.RunPython(populate_resources), migrations.RunPython(populate_misc), migrations.RunPython(populate_lofar_json_schemas) ] - diff --git a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py index 3ff978a222e02e6d909c64bc2fc2ba20a6a245fe..36cdc05269d5276798ec6163448b3f889ed24eac 100644 --- a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py @@ -11,10 +11,7 @@ from enum import Enum from rest_framework.serializers import HyperlinkedRelatedField from django.dispatch import receiver -from lofar.sas.tmss.tmss.exceptions import * - -import json -import jsonschema +from lofar.sas.tmss.tmss.tmssapp.validation import validate_json_against_schema # # I/O @@ -132,7 +129,6 @@ class DataproductFeedbackTemplate(Template): # todo: do we need to specify a default? - # # Instance Objects # @@ -157,30 +153,17 @@ class Subtask(BasicCommon): created_or_updated_by_user = ForeignKey(User, null=True, editable=False, on_delete=PROTECT, help_text='The user who created / updated the subtask.') def __init__(self, *args, **kwargs): - super(Subtask, self).__init__(*args, **kwargs) - self.__original_state = self.state - - def validate_specification_against_schema(self): - if self.specifications_doc is None or self.specifications_template_id is None: - return + super().__init__(*args, **kwargs) - try: - # ensure the specification and schema are both valid json in the first place - spec = json.loads(self.specifications_doc) if type(self.specifications_doc) == str else self.specifications_doc - schema = json.loads(self.specifications_template.schema) if type(self.specifications_template.schema) == str else self.specifications_template.schema - except json.decoder.JSONDecodeError as e: - raise SpecificationException("Invalid JSON: %s" % str(e)) - - try: - jsonschema.validate(spec, schema) - except jsonschema.ValidationError as e: - raise SpecificationException(str(e)) + # keep original state for logging + self.__original_state = self.state def save(self, force_insert=False, force_update=False, using=None, update_fields=None): - '''override of normal save method, doing a validation of the specification against the schema first - :raises SpecificationException in case the specification does not validate against the schema''' - self.validate_specification_against_schema() creating = self._state.adding # True on create, False on update + + if self.specifications_doc and self.specifications_template_id and self.specifications_template.schema: + validate_json_against_schema(self.specifications_doc, self.specifications_template.schema) + super().save(force_insert, force_update, using, update_fields) # log if either state update or new entry: @@ -221,6 +204,12 @@ class SubtaskInput(BasicCommon): selection_doc = JSONField(help_text='Filter to apply to the dataproducts of the producer, to derive input dataproducts when scheduling.') selection_template = ForeignKey('SubtaskInputSelectionTemplate', on_delete=PROTECT, help_text='Schema used for selection_doc.') + def save(self, force_insert=False, force_update=False, using=None, update_fields=None): + if self.selection_doc and self.selection_template_id and self.selection_template.schema: + validate_json_against_schema(self.selection_doc, self.selection_template.schema) + + super().save(force_insert, force_update, using, update_fields) + class SubtaskOutput(BasicCommon): subtask = ForeignKey('Subtask', null=False, on_delete=CASCADE, help_text='Subtask to which this output specification refers.') @@ -248,6 +237,14 @@ class Dataproduct(BasicCommon): feedback_doc = JSONField(help_text='Dataproduct properties, as reported by the producing process.') feedback_template = ForeignKey('DataproductFeedbackTemplate', on_delete=PROTECT, help_text='Schema used for feedback_doc.') + def save(self, force_insert=False, force_update=False, using=None, update_fields=None): + if self.specifications_doc and self.specifications_template_id and self.specifications_template.schema: + validate_json_against_schema(self.specifications_doc, self.specifications_template.schema) + + if self.feedback_doc and self.feedback_template_id and self.feedback_template.schema: + validate_json_against_schema(self.feedback_doc, self.feedback_template.schema) + + super().save(force_insert, force_update, using, update_fields) class AntennaSet(NamedCommon): station_type = ForeignKey('StationType', null=False, on_delete=PROTECT) diff --git a/SAS/TMSS/src/tmss/tmssapp/models/specification.py b/SAS/TMSS/src/tmss/tmssapp/models/specification.py index 86f621bfa1e2cf6e3130c2a83f8c270ac7ab0330..6dd29ad59246cb26fc7b44fa87baed01897ffc4c 100644 --- a/SAS/TMSS/src/tmss/tmssapp/models/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/models/specification.py @@ -2,11 +2,13 @@ This file contains the database models """ -from django.db.models import Model, CharField, DateTimeField, BooleanField, ForeignKey, CASCADE, IntegerField, SET_NULL, PROTECT, ManyToManyField +from django.db.models import Model, CharField, DateTimeField, BooleanField, ForeignKey, CASCADE, IntegerField, FloatField, SET_NULL, PROTECT, ManyToManyField from django.contrib.postgres.fields import ArrayField, JSONField from django.contrib.postgres.indexes import GinIndex from enum import Enum +from lofar.sas.tmss.tmss.tmssapp.validation import validate_json_against_schema + # # Common # @@ -192,7 +194,6 @@ class DefaultWorkRelationSelectionTemplate(BasicCommon): # class Cycle(NamedCommonPK): - start = DateTimeField(help_text='Moment at which the cycle starts, that is, when its projects can run.') stop = DateTimeField(help_text='Moment at which the cycle officially ends.') number = IntegerField(help_text='Cycle number.') @@ -202,7 +203,7 @@ class Cycle(NamedCommonPK): class Project(NamedCommonPK): - # cycle is protected since we hav<e to manually decide to clean up projects with a cycle or keep them without cycle + # cycle is protected since we have to manually decide to clean up projects with a cycle or keep them without cycle cycle = ForeignKey('Cycle', related_name='projects', on_delete=PROTECT, null=True, help_text='Cycle(s) to which this project belongs (NULLable).') priority = IntegerField(default=0, help_text='Priority of this project w.r.t. other projects. Projects can interrupt observations of lower-priority projects.') # todo: define a value for the default priority can_trigger = BooleanField(default=False, help_text='True if this project is allowed to supply observation requests on the fly, possibly interrupting currently running observations (responsive telescope).') @@ -211,12 +212,31 @@ class Project(NamedCommonPK): filler = BooleanField(default=False, help_text='Use this project to fill up idle telescope time.') +class ProjectQuota(Model): + project = ForeignKey('Project', related_name="project_quota", on_delete=PROTECT, help_text='Project to wich this quota belongs.') # protected to avoid accidents + value = FloatField(help_text='Resource Quota value') + resource_type = ForeignKey('ResourceType', related_name="resource_type", on_delete=PROTECT, help_text='Resource type.') # protected to avoid accidents + + +class ResourceType(NamedCommonPK): + resource_unit = ForeignKey('ResourceUnit', related_name="resource_types", on_delete=PROTECT, help_text='Unit of current resource.') + + +class ResourceUnit(NamedCommonPK): + pass + class SchedulingSet(NamedCommon): generator_doc = JSONField(null=True, help_text='Parameters for the generator (NULLable).') generator_template = ForeignKey('GeneratorTemplate', on_delete=SET_NULL, null=True, help_text='Generator for the scheduling units in this set (NULLable).') generator_source = ForeignKey('SchedulingUnitDraft', on_delete=SET_NULL, null=True, help_text='Reference for the generator to an existing collection of specifications (NULLable).') project = ForeignKey('Project', related_name="scheduling_sets", on_delete=PROTECT, help_text='Project to which this scheduling set belongs.') # protected to avoid accidents + def save(self, force_insert=False, force_update=False, using=None, update_fields=None): + if self.generator_doc and self.generator_template_id and self.generator_template.schema: + validate_json_against_schema(self.generator_doc, self.generator_template.schema) + + super().save(force_insert, force_update, using, update_fields) + class SchedulingUnitDraft(NamedCommon): requirements_doc = JSONField(help_text='Scheduling and/or quality requirements for this run.') @@ -226,6 +246,11 @@ class SchedulingUnitDraft(NamedCommon): scheduling_set = ForeignKey('SchedulingSet', related_name='scheduling_unit_drafts', on_delete=CASCADE, help_text='Set to which this scheduling unit draft belongs.') requirements_template = ForeignKey('SchedulingUnitTemplate', on_delete=CASCADE, help_text='Schema used for requirements_doc.') # todo: 'schema'? + def save(self, force_insert=False, force_update=False, using=None, update_fields=None): + if self.requirements_doc and self.requirements_template_id and self.requirements_template.schema: + validate_json_against_schema(self.requirements_doc, self.requirements_template.schema) + + super().save(force_insert, force_update, using, update_fields) class SchedulingUnitBlueprint(NamedCommon): requirements_doc = JSONField(help_text='Scheduling and/or quality requirements for this scheduling unit (IMMUTABLE).') @@ -233,6 +258,11 @@ class SchedulingUnitBlueprint(NamedCommon): requirements_template = ForeignKey('SchedulingUnitTemplate', on_delete=CASCADE, help_text='Schema used for requirements_doc (IMMUTABLE).') draft = ForeignKey('SchedulingUnitDraft', related_name='related_scheduling_unit_blueprint', on_delete=CASCADE, help_text='Scheduling Unit Draft which this run instantiates.') + def save(self, force_insert=False, force_update=False, using=None, update_fields=None): + if self.requirements_doc and self.requirements_template_id and self.requirements_template.schema: + validate_json_against_schema(self.requirements_doc, self.requirements_template.schema) + + super().save(force_insert, force_update, using, update_fields) class TaskDraft(NamedCommon): specifications_doc = JSONField(help_text='Specifications for this task.') @@ -241,6 +271,12 @@ class TaskDraft(NamedCommon): scheduling_unit_draft = ForeignKey('SchedulingUnitDraft', related_name='task_drafts', on_delete=CASCADE, help_text='Scheduling Unit draft to which this task draft belongs.') specifications_template = ForeignKey('TaskTemplate', on_delete=CASCADE, help_text='Schema used for requirements_doc.') # todo: 'schema'? + def save(self, force_insert=False, force_update=False, using=None, update_fields=None): + if self.specifications_doc and self.specifications_template_id and self.specifications_template.schema: + validate_json_against_schema(self.specifications_doc, self.specifications_template.schema) + + super().save(force_insert, force_update, using, update_fields) + class TaskBlueprint(NamedCommon): specifications_doc = JSONField(help_text='Schedulings for this task (IMMUTABLE).') @@ -249,6 +285,12 @@ class TaskBlueprint(NamedCommon): draft = ForeignKey('TaskDraft', related_name='related_task_blueprint', on_delete=CASCADE, help_text='Task Draft which this task instantiates.') scheduling_unit_blueprint = ForeignKey('SchedulingUnitBlueprint', on_delete=CASCADE, help_text='Scheduling Unit Blueprint to which this task belongs.') + def save(self, force_insert=False, force_update=False, using=None, update_fields=None): + if self.specifications_doc and self.specifications_template_id and self.specifications_template.schema: + validate_json_against_schema(self.specifications_doc, self.specifications_template.schema) + + super().save(force_insert, force_update, using, update_fields) + class TaskRelationDraft(BasicCommon): selection_doc = JSONField(help_text='Filter for selecting dataproducts from the output role.') @@ -259,6 +301,12 @@ class TaskRelationDraft(BasicCommon): output = ForeignKey('TaskConnectors', related_name='outputs_task_relation_draft', on_delete=CASCADE, help_text='Output connector of producer.') selection_template = ForeignKey('WorkRelationSelectionTemplate', on_delete=CASCADE, help_text='Schema used for selection_doc.') # todo: 'schema'? + def save(self, force_insert=False, force_update=False, using=None, update_fields=None): + if self.selection_doc and self.selection_template_id and self.selection_template.schema: + validate_json_against_schema(self.selection_doc, self.selection_template.schema) + + super().save(force_insert, force_update, using, update_fields) + class TaskRelationBlueprint(BasicCommon): selection_doc = JSONField(help_text='Filter for selecting dataproducts from the output role.') @@ -269,3 +317,10 @@ class TaskRelationBlueprint(BasicCommon): output = ForeignKey('TaskConnectors', related_name='outputs_task_relation_blueprint', on_delete=CASCADE, help_text='Output connector of producer.') draft = ForeignKey('TaskRelationDraft', on_delete=CASCADE, related_name='related_task_relation_blueprint', help_text='Task Relation Draft which this work request instantiates.') selection_template = ForeignKey('WorkRelationSelectionTemplate', on_delete=CASCADE, help_text='Schema used for selection_doc.') # todo: 'schema'? + + def save(self, force_insert=False, force_update=False, using=None, update_fields=None): + if self.selection_doc and self.selection_template_id and self.selection_template.schema: + validate_json_against_schema(self.selection_doc, self.selection_template.schema) + + super().save(force_insert, force_update, using, update_fields) + diff --git a/SAS/TMSS/src/tmss/tmssapp/populate.py b/SAS/TMSS/src/tmss/tmssapp/populate.py index e7a39909a37ae2e9296d81d8d63c87c7e664699f..b11731904350d2dafffc8fc26f6253756dbc5547 100644 --- a/SAS/TMSS/src/tmss/tmssapp/populate.py +++ b/SAS/TMSS/src/tmss/tmssapp/populate.py @@ -15,7 +15,7 @@ class Migration(migrations.Migration): """ import json -from lofar.sas.tmss.tmss.tmssapp.models.specification import Role, Datatype, Dataformat, CopyReason, TaskTemplate +from lofar.sas.tmss.tmss.tmssapp.models.specification import Role, Datatype, Dataformat, CopyReason, TaskTemplate, ResourceType, ResourceUnit from lofar.sas.tmss.tmss.tmssapp.models.scheduling import SubtaskState, SubtaskType, SubtaskTemplate, Subtask, \ StationType, Algorithm, ScheduleMethod, Cluster, Filesystem from lofar.common.json_utils import * @@ -38,6 +38,14 @@ def populate_lofar_json_schemas(apps, schema_editor): _populate_example_data() +def populate_resources(apps, schema_editor): + ru_bytes = ResourceUnit.objects.create(name="bytes", description="Bytes") + ru_hours = ResourceUnit.objects.create(name="hours", description="duration in hours") + + ResourceType.objects.create(name="lta_storage", description="Amount of storage in LTA", resource_unit=ru_bytes) + ResourceType.objects.create(name="cep_storage", description="Amount of storage at CEP processing cluster", resource_unit=ru_bytes) + ResourceType.objects.create(name="cep_processing_hours", description="Number of processing hours for CEP processing cluster", resource_unit=ru_hours) + def populate_misc(apps, schema_editor): cluster = Cluster.objects.create(name="CEP4", location="CIT") fs = Filesystem.objects.create(name="LustreFS", cluster=cluster, capacity=3.6e15) @@ -170,37 +178,43 @@ def _populate_example_data(): from lofar.sas.tmss.tmss.tmssapp import models from lofar.sas.tmss.test.tmss_test_data_django_models import TaskDraft_test_data, TaskBlueprint_test_data, SubtaskOutput_test_data, Dataproduct_test_data, Subtask_test_data - task_template = models.TaskTemplate.objects.get(name='correlator schema') - task_draft_data = TaskDraft_test_data(name="my test obs", specifications_template=task_template) - task_draft = models.TaskDraft.objects.create(**task_draft_data) + cluster = Cluster.objects.get(name="CEP4") - task_blueprint_data = TaskBlueprint_test_data(task_draft=task_draft) - task_blueprint = models.TaskBlueprint.objects.create(**task_blueprint_data) + for i in range(10): + task_template = models.TaskTemplate.objects.get(name='correlator schema') + task_draft_data = TaskDraft_test_data(name="my test obs", specifications_template=task_template) + task_draft = models.TaskDraft.objects.create(**task_draft_data) - subtask_template = models.SubtaskTemplate.objects.get(name='observationcontrol schema') - specifications_doc = { - "stations": {"station_list": ["CS001", "CS002"], - "antenna_set": "HBA_DUAL", - "filter": "HBA_110_190", - "analog_pointing": {"direction_type": "J2000", - "angle1": 45, - "angle2": 20}, - "digital_pointings": [{"name": "beam01", - "pointing": {"direction_type": "J2000", - "angle1": 45, - "angle2": 20}, - "subbands": list(range(0, 16)) - }] - } - } + task_blueprint_data = TaskBlueprint_test_data(task_draft=task_draft) + task_blueprint = models.TaskBlueprint.objects.create(**task_blueprint_data) + + subtask_template = models.SubtaskTemplate.objects.get(name='observationcontrol schema') + specifications_doc = { + "stations": {"station_list": ["RS106","RS205"], + "antenna_set": "HBA_DUAL_INNER", + "filter": "HBA_110_190", + "analog_pointing": {"direction_type": "J2000", + "angle1": 0.4262457643630986, + "angle2": 0.5787463318245085 }, + "digital_pointings": [{"name": "3C48", + "pointing": {"direction_type": "J2000", + "angle1": 0.4262457643630986, + "angle2": 0.5787463318245085 }, + "subbands": list(range(0, 244)) + }] + } + } - specifications_doc = add_defaults_to_json_object_for_schema(specifications_doc, subtask_template.schema) - subtask_data = Subtask_test_data(task_blueprint=task_blueprint, subtask_template=subtask_template, specifications_doc=specifications_doc) - subtask = models.Subtask.objects.create(**subtask_data) + specifications_doc = add_defaults_to_json_object_for_schema(specifications_doc, subtask_template.schema) + subtask_data = Subtask_test_data(task_blueprint=task_blueprint, subtask_template=subtask_template, + specifications_doc=specifications_doc, cluster=cluster) + subtask = models.Subtask.objects.create(**subtask_data) - subtask_output = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=subtask)) - for sb_nr in specifications_doc['stations']['digital_pointings'][0]['subbands']: - dataproduct: models.Dataproduct = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=subtask_output, filename="dataproduct_SB_%03d.h5"%sb_nr)) + subtask_output = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=subtask)) + for sb_nr in specifications_doc['stations']['digital_pointings'][0]['subbands']: + models.Dataproduct.objects.create(**Dataproduct_test_data(producer=subtask_output, + directory="CEP4:/data/test-projects/TMSS_test/L%d/uv/" % (subtask.id,), + filename="L%d_SB%03d_uv.MS"%(subtask.id, sb_nr))) except ImportError: pass diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py index f71570d16e7c67697aad85d18adbc6c5af54defb..e241bb9d02ffd4c9230fa78a454fe842f84fe264 100644 --- a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py @@ -103,6 +103,7 @@ class CopyReasonSerializer(serializers.ModelSerializer): model = models.CopyReason fields = '__all__' + class TaskConnectorsSerializer(serializers.HyperlinkedModelSerializer): class Meta: @@ -121,6 +122,24 @@ class ProjectSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.Project fields = '__all__' + extra_fields = ['name','project_quota'] + +class ProjectQuotaSerializer(RelationalHyperlinkedModelSerializer): + class Meta: + model = models.ProjectQuota + fields = '__all__' + extra_fields = ['resource_type'] + +class ResourceUnitSerializer(RelationalHyperlinkedModelSerializer): + class Meta: + model = models.ResourceUnit + fields = '__all__' + extra_fields = ['name'] + +class ResourceTypeSerializer(RelationalHyperlinkedModelSerializer): + class Meta: + model = models.ResourceType + fields = '__all__' extra_fields = ['name'] class SchedulingSetSerializer(RelationalHyperlinkedModelSerializer): diff --git a/SAS/TMSS/src/tmss/tmssapp/validation.py b/SAS/TMSS/src/tmss/tmssapp/validation.py new file mode 100644 index 0000000000000000000000000000000000000000..2908a80cad68da0c71aea006a2aa9b6787768033 --- /dev/null +++ b/SAS/TMSS/src/tmss/tmssapp/validation.py @@ -0,0 +1,32 @@ +import json +import jsonschema +from lofar.sas.tmss.tmss.exceptions import * + +def validate_json_against_schema(json_string: str, schema: str): + '''validate the given json_string against the given schema. + If no exception if thrown, then the given json_string validates against the given schema. + :raises SchemaValidationException if the json_string does not validate against the schema + ''' + + # ensure the given arguments are strings + if type(json_string) != str: + json_string = json.dumps(json_string) + if type(schema) != str: + schema = json.dumps(schema) + + # ensure the specification and schema are both valid json in the first place + try: + json_object = json.loads(json_string) + except json.decoder.JSONDecodeError as e: + raise SchemaValidationException("Invalid JSON: %s\n%s" % (str(e), json_string)) + + try: + schema_object = json.loads(schema) + except json.decoder.JSONDecodeError as e: + raise SchemaValidationException("Invalid JSON: %s\n%s" % (str(e), schema)) + + # now do the actual validation + try: + jsonschema.validate(json_object, schema_object) + except jsonschema.ValidationError as e: + raise SchemaValidationException(str(e)) diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py index 8eb8e847c4e599521e5cb63cf2947ea26a05e9f9..d744569426c9a096c353d187e8fc9f42e36dbb8c 100644 --- a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py @@ -9,12 +9,15 @@ from .. import models from .. import serializers from django_filters import rest_framework as filters from lofar.sas.tmss.tmss.tmssapp.models.scheduling import Subtask +# Don't use Ordering class from the django_filters but use rest_framework instead !! +from rest_framework.filters import OrderingFilter class subTaskFilter(filters.FilterSet): class Meta: model = Subtask fields = { + 'state__value': ['exact'], 'start_time': ['lt', 'gt'], 'stop_time': ['lt', 'gt'], 'cluster__name': ['exact', 'icontains'], @@ -28,7 +31,9 @@ from lofar.sas.tmss.tmss.tmssapp.viewsets.lofar_viewset import LOFARViewSet from lofar.sas.tmss.tmss.tmssapp import models from lofar.sas.tmss.tmss.tmssapp import serializers +from datetime import datetime from lofar.common.json_utils import get_default_json_object_for_schema +from lofar.common.datetimeutils import formatDatetime from lofar.sas.tmss.tmss.tmssapp.adapters.parset import convert_to_parset @@ -199,8 +204,9 @@ class DataproductHashViewSet(LOFARViewSet): class SubtaskViewSetJSONeditorOnline(LOFARViewSet): queryset = models.Subtask.objects.all() serializer_class = serializers.SubtaskSerializerJSONeditorOnline - filter_backends = (filters.DjangoFilterBackend,) + filter_backends = (filters.DjangoFilterBackend, OrderingFilter,) filter_class = subTaskFilter + ordering = ('start_time',) def get_view_name(self): # override name because DRF auto-naming dot_tmssapp_scheduling_djangoes not produce something usable here name = "Subtask" @@ -223,5 +229,10 @@ class SubtaskViewSetJSONeditorOnline(LOFARViewSet): def parset(self, request, pk=None): subtask = get_object_or_404(models.Subtask, pk=pk) parset = convert_to_parset(subtask) - return HttpResponse(str(parset), content_type='text/plain') + parset_str = "# THIS PARSET WAS GENERATED BY TMSS FROM THE SPECICATION OF SUBTASK ID=%d ON %s url: %s\n%s" % ( + subtask.pk, + formatDatetime(datetime.utcnow()), + request._request.get_raw_uri(), + parset,) + return HttpResponse(parset_str, content_type='text/plain') diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py index 2d7b11a7fcfccc72700ee9f6e48eaf78c388d2f6..16f25c43c2a58715bfba2ed1fc87ac401db3a410 100644 --- a/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py @@ -100,7 +100,10 @@ class CopyReasonViewSet(LOFARViewSet): queryset = models.CopyReason.objects.all() serializer_class = serializers.CopyReasonSerializer - +class ResourceUnitViewSet(LOFARViewSet): + queryset = models.ResourceUnit.objects.all() + serializer_class = serializers.ResourceUnitSerializer + class TaskConnectorsViewSet(LOFARViewSet): queryset = models.TaskConnectors.objects.all() serializer_class = serializers.TaskConnectorsSerializer @@ -123,6 +126,15 @@ class ProjectViewSet(LOFARViewSet): else: return models.Project.objects.all() +class ProjectQuotaViewSet(LOFARViewSet): + queryset = models.ProjectQuota.objects.all() + serializer_class = serializers.ProjectQuotaSerializer + + +class ResourceTypeViewSet(LOFARViewSet): + queryset = models.ResourceType.objects.all() + serializer_class = serializers.ResourceTypeSerializer + class SchedulingSetViewSet(LOFARViewSet): queryset = models.SchedulingSet.objects.all() diff --git a/SAS/TMSS/src/tmss/urls.py b/SAS/TMSS/src/tmss/urls.py index 5d9fadf734c44a153e7174d5b017e79bb6c3a9dd..41011e9bba9604b9d4427f8d8cbd3c89af0a188c 100644 --- a/SAS/TMSS/src/tmss/urls.py +++ b/SAS/TMSS/src/tmss/urls.py @@ -86,6 +86,10 @@ router.register(r'default_work_relation_selection_template', viewsets.DefaultWor # instances router.register(r'cycle', viewsets.CycleViewSet) router.register(r'project', viewsets.ProjectViewSet) +router.register(r'resource_unit', viewsets.ResourceUnitViewSet) +router.register(r'resource_type', viewsets.ResourceTypeViewSet) +router.register(r'project_quota', viewsets.ProjectQuotaViewSet) + router.register(r'scheduling_set', viewsets.SchedulingSetViewSet) router.register(r'scheduling_unit_draft', viewsets.SchedulingUnitDraftViewSet) router.register(r'scheduling_unit_blueprint', viewsets.SchedulingUnitBlueprintViewSet) diff --git a/SAS/TMSS/test/CMakeLists.txt b/SAS/TMSS/test/CMakeLists.txt index 980f937556504a4b8ad1ca13cc28dfab705fdf9f..19041224c4f4e355b3eaa0ba112e3c17bed1c56e 100644 --- a/SAS/TMSS/test/CMakeLists.txt +++ b/SAS/TMSS/test/CMakeLists.txt @@ -20,14 +20,14 @@ if(BUILD_TESTING) DESTINATION lofar/sas/tmss/test) lofar_add_test(t_tmss_test_database) - lofar_add_test(t_tmssapp_specification_django) - lofar_add_test(t_tmssapp_specification_functional) - lofar_add_test(t_tmssapp_scheduling_django) - lofar_add_test(t_tmssapp_scheduling_functional) + lofar_add_test(t_tmssapp_specification_django_API) + lofar_add_test(t_tmssapp_specification_REST_API) + lofar_add_test(t_tmssapp_scheduling_django_API) + lofar_add_test(t_tmssapp_scheduling_REST_API) lofar_add_test(t_subtask_validation) lofar_add_test(t_tmssapp_specification_permissions) lofar_add_test(t_tmss_session_auth) - set_tests_properties(t_tmssapp_scheduling_functional PROPERTIES TIMEOUT 300) - set_tests_properties(t_tmssapp_specification_functional PROPERTIES TIMEOUT 300) + set_tests_properties(t_tmssapp_scheduling_REST_API PROPERTIES TIMEOUT 300) + set_tests_properties(t_tmssapp_specification_REST_API PROPERTIES TIMEOUT 300) endif() diff --git a/SAS/TMSS/test/t_subtask_validation.py b/SAS/TMSS/test/t_subtask_validation.py index 80b562af61ce3956a03ddc231ef59fbe93aa1c9e..0083c1acad2b9d47f1e5915bbc7bbe1987a2f24a 100755 --- a/SAS/TMSS/test/t_subtask_validation.py +++ b/SAS/TMSS/test/t_subtask_validation.py @@ -40,7 +40,7 @@ from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator rest_data_creator = TMSSRESTTestDataCreator(BASE_URL, AUTH) from lofar.sas.tmss.tmss.tmssapp import models -from lofar.sas.tmss.tmss.exceptions import SpecificationException +from lofar.sas.tmss.tmss.exceptions import SchemaValidationException import requests @@ -63,7 +63,7 @@ class SubtaskValidationTest(unittest.TestCase): specifications_doc = '42' subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc) - with self.assertRaises(SpecificationException): + with self.assertRaises(SchemaValidationException): models.Subtask.objects.create(**subtask_data) @@ -77,7 +77,7 @@ class SubtaskValidationTest(unittest.TestCase): # updating the specification with an invalid should fail invalid_spec = '42' - with self.assertRaises(SpecificationException): + with self.assertRaises(SchemaValidationException): subtask.specifications_doc = invalid_spec subtask.save() self.assertEqual(invalid_spec, subtask.specifications_doc) @@ -86,20 +86,12 @@ class SubtaskValidationTest(unittest.TestCase): subtask.refresh_from_db() self.assertEqual(valid_spec, subtask.specifications_doc) - def test_validate_simple_string_schema_with_valid_specification(self): - subtask_template = self.create_subtask_template('{"type": "string"}') - specifications_doc = '"a random string"' - subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc) - - subtask = models.Subtask.objects.create(**subtask_data) - self.assertIsNotNone(subtask) - def test_validate_flawed_json_schema(self): subtask_template = self.create_subtask_template('{ this is not a json object }') specifications_doc = '"a random string"' subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc) - with self.assertRaises(SpecificationException) as context: + with self.assertRaises(SchemaValidationException) as context: models.Subtask.objects.create(**subtask_data) self.assertTrue('invalid json' in str(context.exception).lower()) @@ -108,7 +100,7 @@ class SubtaskValidationTest(unittest.TestCase): specifications_doc = '{ this is not a json object }' subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc) - with self.assertRaises(SpecificationException) as context: + with self.assertRaises(SchemaValidationException) as context: models.Subtask.objects.create(**subtask_data) self.assertTrue('invalid json' in str(context.exception).lower()) @@ -129,13 +121,13 @@ class SubtaskValidationTest(unittest.TestCase): self.assertIsNotNone(subtask_template) # test with invalid json - with self.assertRaises(SpecificationException) as context: + with self.assertRaises(SchemaValidationException) as context: subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc="bogus spec") models.Subtask.objects.create(**subtask_data) self.assertTrue('invalid json' in str(context.exception).lower()) # test with valid json, but not according to schema - with self.assertRaises(SpecificationException) as context: + with self.assertRaises(SchemaValidationException) as context: specifications_doc = '''{ "duration": -10 }''' subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc) models.Subtask.objects.create(**subtask_data) @@ -151,7 +143,7 @@ class SubtaskValidationTest(unittest.TestCase): # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', subtask_test_data, 201, subtask_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, subtask_test_data) + GET_OK_and_assert_equal_expected_response(self, url, subtask_test_data) def test_validate_simple_string_schema_with_invalid_specification_via_rest(self): template = rest_data_creator.SubtaskTemplate(schema='{"type": "string"}') @@ -163,7 +155,7 @@ class SubtaskValidationTest(unittest.TestCase): # POST and GET a new item and assert correctness response_content = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', subtask_test_data, 500, {}) - self.assertTrue("SpecificationException at /api/subtask/" in response_content) + self.assertTrue("SchemaValidationException at /api/subtask/" in response_content) self.assertTrue("42 is not of type 'string'" in response_content) def test_validate_correlator_schema_with_valid_specification_via_rest(self): @@ -183,7 +175,7 @@ class SubtaskValidationTest(unittest.TestCase): # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', subtask_test_data, 201, subtask_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, subtask_test_data) + GET_OK_and_assert_equal_expected_response(self, url, subtask_test_data) def test_validate_correlator_schema_with_invalid_specification_via_rest(self): # fetch correlator_schema for Dupplo UC1 which should be in the initially populated database @@ -193,7 +185,7 @@ class SubtaskValidationTest(unittest.TestCase): self.assertEqual(1, json_response.get('count')) template = json_response['results'][0] - schema_url = template['url'] + schema_url = template['url'].split('?')[0] specifications_doc = "bogus spec" subtask_test_data = rest_data_creator.Subtask(specifications_template_url=schema_url, specifications_doc=specifications_doc) diff --git a/SAS/TMSS/test/t_tmssapp_scheduling_functional.py b/SAS/TMSS/test/t_tmssapp_scheduling_REST_API.py similarity index 74% rename from SAS/TMSS/test/t_tmssapp_scheduling_functional.py rename to SAS/TMSS/test/t_tmssapp_scheduling_REST_API.py index 5fdef254a72ea722334f86c85ff296ca29325d45..1b7495c610e311eeea6f23b243bd6cf8688c5494 100755 --- a/SAS/TMSS/test/t_tmssapp_scheduling_functional.py +++ b/SAS/TMSS/test/t_tmssapp_scheduling_REST_API.py @@ -53,7 +53,7 @@ class SubtaskTemplateTestCase(unittest.TestCase): self.assertTrue("Subtask Template List" in r.content.decode('utf8')) def test_subtask_template_template_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/subtask_template/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/subtask_template/1234321/', 404) def test_subtask_template_POST_and_GET(self): st_test_data = test_data_creator.SubtaskTemplate() @@ -61,7 +61,7 @@ class SubtaskTemplateTestCase(unittest.TestCase): # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_template/', st_test_data, 201, st_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, st_test_data) + GET_OK_and_assert_equal_expected_response(self, url, st_test_data) def test_subtask_template_PUT_invalid_raises_error(self): st_test_data = test_data_creator.SubtaskTemplate() @@ -74,11 +74,11 @@ class SubtaskTemplateTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_template/', st_test_data, 201, st_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, st_test_data) + GET_OK_and_assert_equal_expected_response(self, url, st_test_data) # PUT new values, verify PUT_and_assert_expected_response(self, url, st_test_data2, 200, st_test_data2) - GET_and_assert_expected_response(self, url, 200, st_test_data2) + GET_OK_and_assert_equal_expected_response(self, url, st_test_data2) def test_subtask_template_PATCH(self): st_test_data = test_data_creator.SubtaskTemplate() @@ -86,7 +86,7 @@ class SubtaskTemplateTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_template/', st_test_data, 201, st_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, st_test_data) + GET_OK_and_assert_equal_expected_response(self, url, st_test_data) test_patch = {"type": BASE_URL + '/subtask_type/inspection/', "version": 'v6.28318530718', @@ -97,7 +97,7 @@ class SubtaskTemplateTestCase(unittest.TestCase): PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(st_test_data) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_subtask_template_DELETE(self): st_test_data = test_data_creator.SubtaskTemplate() @@ -105,7 +105,7 @@ class SubtaskTemplateTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_template/', st_test_data, 201, st_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, st_test_data) + GET_OK_and_assert_equal_expected_response(self, url, st_test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -122,14 +122,32 @@ class SubtaskTemplateTestCase(unittest.TestCase): test_data = dict(st_test_data) test_data['type'] = type_url url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_template/', test_data, 201, test_data)['url'] - GET_and_assert_expected_response(self, url, 200, test_data) + GET_OK_and_assert_equal_expected_response(self, url, test_data) # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(type_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, type_url, 200, type_data) + GET_OK_and_assert_equal_expected_response(self, type_url, type_data) + + def test_GET_SubtaskTemplate_list_view_shows_entry(self): + + test_data_1 = SubtaskTemplate_test_data() + models.SubtaskTemplate.objects.create(**test_data_1) + nbr_results = models.SubtaskTemplate.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/subtask_template/', test_data_1, nbr_results) + + def test_GET_SubtaskTemplate_view_returns_correct_entry(self): + + # setup + test_data_1 = SubtaskTemplate_test_data() + test_data_2 = SubtaskTemplate_test_data() + id1 = models.SubtaskTemplate.objects.create(**test_data_1).id + id2 = models.SubtaskTemplate.objects.create(**test_data_2).id + # assert + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask_template/%s/' % id1, test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask_template/%s/' % id2, test_data_2) class DataproductSpecificationsTemplateTestCase(unittest.TestCase): @@ -139,7 +157,7 @@ class DataproductSpecificationsTemplateTestCase(unittest.TestCase): self.assertTrue("Dataproduct Specifications Template List" in r.content.decode('utf8')) def test_dataproduct_specifications_template_template_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/dataproduct_specifications_template/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/dataproduct_specifications_template/1234321/', 404) def test_dataproduct_specifications_template_POST_and_GET(self): dst_test_data = test_data_creator.DataproductSpecificationsTemplate() @@ -147,7 +165,7 @@ class DataproductSpecificationsTemplateTestCase(unittest.TestCase): # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_specifications_template/', dst_test_data, 201, dst_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, dst_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dst_test_data) def test_dataproduct_specifications_template_PUT_invalid_raises_error(self): dst_test_data = test_data_creator.DataproductSpecificationsTemplate() @@ -161,11 +179,11 @@ class DataproductSpecificationsTemplateTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_specifications_template/', dst_test_data, 201, dst_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, dst_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dst_test_data) # PUT new values, verify PUT_and_assert_expected_response(self, url, dst_test_data2, 200, dst_test_data2) - GET_and_assert_expected_response(self, url, 200, dst_test_data2) + GET_OK_and_assert_equal_expected_response(self, url, dst_test_data2) def test_dataproduct_specifications_template_PATCH(self): dst_test_data = test_data_creator.DataproductSpecificationsTemplate() @@ -173,7 +191,7 @@ class DataproductSpecificationsTemplateTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_specifications_template/', dst_test_data, 201, dst_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, dst_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dst_test_data) test_patch = {"version": 'v6.28318530718', "schema": {"mykey": "my better value"}, @@ -183,7 +201,7 @@ class DataproductSpecificationsTemplateTestCase(unittest.TestCase): PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(dst_test_data) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_dataproduct_specifications_template_DELETE(self): dst_test_data = test_data_creator.DataproductSpecificationsTemplate() @@ -191,11 +209,29 @@ class DataproductSpecificationsTemplateTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_specifications_template/', dst_test_data, 201, dst_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, dst_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dst_test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) + def test_GET_DataproductSpecificationsTemplate_list_view_shows_entry(self): + + test_data_1 = DataproductSpecificationsTemplate_test_data() + models.DataproductSpecificationsTemplate.objects.create(**test_data_1) + nbr_results = models.DataproductSpecificationsTemplate.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/dataproduct_specifications_template/', test_data_1, nbr_results) + + def test_GET_DataproductSpecificationsTemplate_view_returns_correct_entry(self): + + # setup + test_data_1 = DataproductSpecificationsTemplate_test_data() + test_data_2 = DataproductSpecificationsTemplate_test_data() + id1 = models.DataproductSpecificationsTemplate.objects.create(**test_data_1).id + id2 = models.DataproductSpecificationsTemplate.objects.create(**test_data_2).id + # assert + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/dataproduct_specifications_template/%s/' % id1, test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/dataproduct_specifications_template/%s/' % id2, test_data_2) + class DataproductFeedbackTemplateTestCase(unittest.TestCase): # This currently adds nothing on top of the template base class, so nothing new to test here. @@ -231,7 +267,7 @@ class DefaultSubtaskTemplatesTestCase(unittest.TestCase): response = requests.delete(template_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, template_url, 200, st_test_data) + GET_OK_and_assert_equal_expected_response(self, template_url, st_test_data) def test_default_dataproduct_specifications_template_PROTECT_behavior_on_template_deleted(self): dpst_test_data = test_data_creator.DataproductSpecificationsTemplate() @@ -246,7 +282,7 @@ class DefaultSubtaskTemplatesTestCase(unittest.TestCase): response = requests.delete(template_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, template_url, 200, dpst_test_data) + GET_OK_and_assert_equal_expected_response(self, template_url, dpst_test_data) class SubtaskTestCase(unittest.TestCase): @@ -263,7 +299,7 @@ class SubtaskTestCase(unittest.TestCase): self.assertTrue("Subtask List" in r.content.decode('utf8')) def test_subtask_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/subtask/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/subtask/1234321/', 404) def test_subtask_POST_and_GET(self): st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url, specifications_template_url=self.specifications_template_url) @@ -271,7 +307,7 @@ class SubtaskTestCase(unittest.TestCase): # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, st_test_data) + GET_OK_and_assert_equal_expected_response(self, url, st_test_data) minimium_subtaskid = 2000000 subtask_id = url.split("subtask/")[1].replace("/","") self.assertGreaterEqual(int(subtask_id), minimium_subtaskid) @@ -288,11 +324,11 @@ class SubtaskTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, st_test_data) + GET_OK_and_assert_equal_expected_response(self, url, st_test_data) # PUT new values, verify PUT_and_assert_expected_response(self, url, st_test_data2, 200, st_test_data2) - GET_and_assert_expected_response(self, url, 200, st_test_data2) + GET_OK_and_assert_equal_expected_response(self, url, st_test_data2) def test_subtask_PATCH(self): st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url, specifications_template_url=self.specifications_template_url) @@ -300,7 +336,7 @@ class SubtaskTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, st_test_data) + GET_OK_and_assert_equal_expected_response(self, url, st_test_data) test_patch = {"specifications_doc": {"somespec": "somevalue"}} @@ -308,7 +344,7 @@ class SubtaskTestCase(unittest.TestCase): PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(st_test_data) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_subtask_DELETE(self): st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url, specifications_template_url=self.specifications_template_url) @@ -316,7 +352,7 @@ class SubtaskTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, st_test_data) + GET_OK_and_assert_equal_expected_response(self, url, st_test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -333,14 +369,14 @@ class SubtaskTestCase(unittest.TestCase): test_data = dict(st_test_data) test_data['state'] = state_url url = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', test_data, 201, test_data)['url'] - GET_and_assert_expected_response(self, url, 200, test_data) + GET_OK_and_assert_equal_expected_response(self, url, test_data) # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(state_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, state_url, 200, state_data) + GET_OK_and_assert_equal_expected_response(self, state_url, state_data) def test_subtask_SET_NULL_behavior_on_task_blueprint_deleted(self): # make new task_blueprint_url instance, but reuse related data for speed @@ -352,7 +388,7 @@ class SubtaskTestCase(unittest.TestCase): # POST new item and verify url = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data)['url'] - GET_and_assert_expected_response(self, url, 200, st_test_data) + GET_OK_and_assert_equal_expected_response(self, url, st_test_data) # DELETE dependency and check it's gone DELETE_and_assert_gone(self, task_blueprint_url) @@ -360,7 +396,7 @@ class SubtaskTestCase(unittest.TestCase): # assert item reference is set null expected_data = dict(st_test_data) expected_data['task_blueprint'] = None - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_subtask_PROTECT_behavior_on_template_deleted(self): stt_test_data = test_data_creator.SubtaskTemplate() @@ -369,14 +405,57 @@ class SubtaskTestCase(unittest.TestCase): # POST new item and verify url = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data)['url'] - GET_and_assert_expected_response(self, url, 200, st_test_data) + GET_OK_and_assert_equal_expected_response(self, url, st_test_data) # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(specifications_template_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, specifications_template_url, 200, stt_test_data) + GET_OK_and_assert_equal_expected_response(self, specifications_template_url, stt_test_data) + + def test_GET_Subtask_list_view_shows_entry(self): + + test_data_1 = Subtask_test_data() + models.Subtask.objects.create(**test_data_1) + nbr_results = models.Subtask.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/subtask/', test_data_1, nbr_results) + + def test_GET_Subtask_view_returns_correct_entry(self): + + # setup + test_data_1 = Subtask_test_data() + test_data_2 = Subtask_test_data() + id1 = models.Subtask.objects.create(**test_data_1).id + id2 = models.Subtask.objects.create(**test_data_2).id + # assert + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask/%s/' % id1, test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask/%s/' % id2, test_data_2) + + def test_nested_Subtask_are_filtered_according_to_TaskBlueprint(self): + + # setup + test_data_1 = Subtask_test_data() + test_data_2 = Subtask_test_data() + tbt_test_data_1 = TaskBlueprint_test_data("task blue print one") + tbt_test_data_2 = TaskBlueprint_test_data("task blue print two") + task_blueprint_1 = models.TaskBlueprint.objects.create(**tbt_test_data_1) + task_blueprint_2 = models.TaskBlueprint.objects.create(**tbt_test_data_2) + test_data_1 = dict(test_data_1) + test_data_1['task_blueprint'] = task_blueprint_1 + subtask_1 = models.Subtask.objects.create(**test_data_1) + test_data_2 = dict(test_data_2) + test_data_2['task_blueprint'] = task_blueprint_2 + subtask_2 = models.Subtask.objects.create(**test_data_2) + + # assert the returned list contains related items, a list of length 1 is retrieved + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_blueprint/%s/subtask/' % task_blueprint_2.id, test_data_2, 1) + # assert an existing related item is returned + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_blueprint/%s/subtask/%s/' % + (task_blueprint_2.id, subtask_2.id), test_data_2) + # assert an existing unrelated item is not returned + GET_and_assert_equal_expected_code(self, + BASE_URL + '/task_blueprint/%s/subtask/%s/' % (task_blueprint_2.id, subtask_1.id), 404) def test_subtask_state_log_records(self): st_test_data = test_data_creator.Subtask() @@ -384,24 +463,24 @@ class SubtaskTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, st_test_data) + GET_OK_and_assert_equal_expected_response(self, url, st_test_data) # Verify state log count is 1 segments = url.split('/') identifier = '' while identifier == '': identifier = segments.pop() - GET_and_assert_expected_response(self, BASE_URL + '/subtask_state_log/?subtask=' + identifier, 200, {"count":1}) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask_state_log/?subtask=' + identifier, {"count":1}) # PATCH item with something else than state and verify no log record is created test_patch = {"specifications_doc": {"somespec": "somevalue"}} PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) - GET_and_assert_expected_response(self, BASE_URL + '/subtask_state_log/?subtask=' + identifier, 200, {"count": 1}) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask_state_log/?subtask=' + identifier, {"count": 1}) # PATCH item with state update and verify log record is created test_patch = {"state": BASE_URL + "/subtask_state/finishing/"} PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) - GET_and_assert_expected_response(self, BASE_URL + '/subtask_state_log/?subtask=' + identifier, 200, {"count": 2}) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask_state_log/?subtask=' + identifier, {"count": 2}) class DataproductTestCase(unittest.TestCase): @@ -417,7 +496,7 @@ class DataproductTestCase(unittest.TestCase): self.assertTrue("Dataproduct List" in r.content.decode('utf8')) def test_dataproduct_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/dataproduct/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/dataproduct/1234321/', 404) def test_dataproduct_POST_and_GET(self): dp_test_data = test_data_creator.Dataproduct(specifications_template_url=self.specifications_template_url, subtask_output_url=self.subtask_output_url, dataproduct_feedback_template_url=self.dataproduct_feedback_template_url) @@ -425,7 +504,7 @@ class DataproductTestCase(unittest.TestCase): # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct/', dp_test_data, 201, dp_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, dp_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dp_test_data) def test_dataproduct_PUT_invalid_raises_error(self): dp_test_data = test_data_creator.Dataproduct(specifications_template_url=self.specifications_template_url, subtask_output_url=self.subtask_output_url, dataproduct_feedback_template_url=self.dataproduct_feedback_template_url) @@ -439,11 +518,11 @@ class DataproductTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct/', dp_test_data, 201, dp_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, dp_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dp_test_data) # PUT new values, verify PUT_and_assert_expected_response(self, url, dp_test_data2, 200, dp_test_data2) - GET_and_assert_expected_response(self, url, 200, dp_test_data2) + GET_OK_and_assert_equal_expected_response(self, url, dp_test_data2) def test_dataproduct_PATCH(self): dp_test_data = test_data_creator.Dataproduct(specifications_template_url=self.specifications_template_url, subtask_output_url=self.subtask_output_url, dataproduct_feedback_template_url=self.dataproduct_feedback_template_url) @@ -451,7 +530,7 @@ class DataproductTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct/', dp_test_data, 201, dp_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, dp_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dp_test_data) test_patch = {"filename": 'my_better.filename', "deleted_since": datetime.utcnow().isoformat()} @@ -460,7 +539,7 @@ class DataproductTestCase(unittest.TestCase): PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(dp_test_data) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_dataproduct_DELETE(self): dp_test_data = test_data_creator.Dataproduct(specifications_template_url=self.specifications_template_url, subtask_output_url=self.subtask_output_url, dataproduct_feedback_template_url=self.dataproduct_feedback_template_url) @@ -468,7 +547,7 @@ class DataproductTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct/', dp_test_data, 201, dp_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, dp_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dp_test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -485,14 +564,14 @@ class DataproductTestCase(unittest.TestCase): test_data = dict(dp_test_data) test_data['dataformat'] = dataformat_url url = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct/', test_data, 201, test_data)['url'] - GET_and_assert_expected_response(self, url, 200, test_data) + GET_OK_and_assert_equal_expected_response(self, url, test_data) # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(dataformat_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, dataformat_url, 200, dataformat_data) + GET_OK_and_assert_equal_expected_response(self, dataformat_url, dataformat_data) def test_dataproduct_CASCADE_behavior_on_specifications_template_deleted(self): specifications_template_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskTemplate(), '/dataproduct_specifications_template/') @@ -500,13 +579,31 @@ class DataproductTestCase(unittest.TestCase): # POST new item, verify url = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct/', dp_test_data, 201, dp_test_data)['url'] - GET_and_assert_expected_response(self, url, 200, dp_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dp_test_data) # DELETE dependency and check it's gone DELETE_and_assert_gone(self, specifications_template_url) # assert item gone - GET_and_assert_expected_response(self, url, 404, {}) + GET_and_assert_equal_expected_code(self, url, 404) + + def test_GET_Dataproduct_list_view_shows_entry(self): + + test_data_1 = Dataproduct_test_data() + models.Dataproduct.objects.create(**test_data_1) + nbr_results = models.Dataproduct.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/dataproduct/', test_data_1, nbr_results) + + def test_GET_Dataproduct_view_returns_correct_entry(self): + + # setup + test_data_1 = Dataproduct_test_data() + test_data_2 = Dataproduct_test_data() + id1 = models.Dataproduct.objects.create(**test_data_1).id + id2 = models.Dataproduct.objects.create(**test_data_2).id + # assert + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/dataproduct/%s/' % id1, test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/dataproduct/%s/' % id2, test_data_2) class SubtaskConnectorTestCase(unittest.TestCase): @@ -516,7 +613,7 @@ class SubtaskConnectorTestCase(unittest.TestCase): self.assertTrue("Subtask Connector List" in r.content.decode('utf8')) def test_subtask_connector_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/subtask_connector/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/subtask_connector/1234321/', 404) def test_subtask_connector_POST_and_GET(self): stc_test_data = test_data_creator.SubtaskConnector() @@ -524,7 +621,7 @@ class SubtaskConnectorTestCase(unittest.TestCase): # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_connector/', stc_test_data, 201, stc_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, stc_test_data) + GET_OK_and_assert_equal_expected_response(self, url, stc_test_data) def test_subtask_connector_PUT_invalid_raises_error(self): stc_test_data = test_data_creator.SubtaskConnector() @@ -538,11 +635,11 @@ class SubtaskConnectorTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_connector/', stc_test_data, 201, stc_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, stc_test_data) + GET_OK_and_assert_equal_expected_response(self, url, stc_test_data) # PUT new values, verify PUT_and_assert_expected_response(self, url, stc_test_data2, 200, stc_test_data2) - GET_and_assert_expected_response(self, url, 200, stc_test_data2) + GET_OK_and_assert_equal_expected_response(self, url, stc_test_data2) def test_subtask_connector_PATCH(self): stc_test_data = test_data_creator.SubtaskConnector() @@ -550,7 +647,7 @@ class SubtaskConnectorTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_connector/', stc_test_data, 201, stc_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, stc_test_data) + GET_OK_and_assert_equal_expected_response(self, url, stc_test_data) test_patch = {"role": BASE_URL + '/role/calibrator/', "datatype": BASE_URL + '/datatype/quality/', } @@ -559,7 +656,7 @@ class SubtaskConnectorTestCase(unittest.TestCase): PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(stc_test_data) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_subtask_connector_DELETE(self): stc_test_data = test_data_creator.SubtaskConnector() @@ -567,7 +664,7 @@ class SubtaskConnectorTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_connector/', stc_test_data, 201, stc_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, stc_test_data) + GET_OK_and_assert_equal_expected_response(self, url, stc_test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -585,15 +682,14 @@ class SubtaskConnectorTestCase(unittest.TestCase): test_data = dict(stc_test_data) test_data['role'] = role_url url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_connector/', test_data, 201, test_data)['url'] - GET_and_assert_expected_response(self, url, 200, test_data) + GET_OK_and_assert_equal_expected_response(self, url, test_data) # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(role_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, role_url, 200, role_data) - + GET_OK_and_assert_equal_expected_response(self, role_url, role_data) def test_subtask_connector_PROTECT_behavior_on_datatype_deleted(self): stc_test_data = test_data_creator.SubtaskConnector() @@ -607,14 +703,46 @@ class SubtaskConnectorTestCase(unittest.TestCase): test_data = dict(stc_test_data) test_data['datatype'] = datatype_url url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_connector/', test_data, 201, test_data)['url'] - GET_and_assert_expected_response(self, url, 200, test_data) + GET_OK_and_assert_equal_expected_response(self, url, test_data) # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(datatype_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, datatype_url, 200, datatype_data) + GET_OK_and_assert_equal_expected_response(self, datatype_url, datatype_data) + + def test_GET_SubtaskConnector_list_view_shows_entry(self): + + test_data_1 = SubtaskConnector_test_data() + models.SubtaskConnector.objects.create(**test_data_1) + nbr_results = models.SubtaskConnector.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/subtask_connector/', test_data_1, nbr_results) + + def test_GET_SubtaskConnector_view_returns_correct_entry(self): + + # setup + test_data_1 = SubtaskConnector_test_data() + test_data_2 = SubtaskConnector_test_data() + id1 = models.SubtaskConnector.objects.create(**test_data_1).id + id2 = models.SubtaskConnector.objects.create(**test_data_2).id + # assert + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask_connector/%s/' % id1, test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask_connector/%s/' % id2, test_data_2) + + def test_SubtaskConnector_allows_setting_dataformats(self): + """ + Other then through the API view, we cannot assign ManyToMany on creation, but have to set it later + """ + test_data_1 = dict(SubtaskConnector_test_data()) + test_data_1['inputs'] = None + test_data_2 = SubtaskConnector_test_data() + tior = models.SubtaskConnector.objects.create(**test_data_2) + tior.dataformats.set([models.Dataformat.objects.get(value='Beamformed'), + models.Dataformat.objects.get(value='MeasurementSet')]) + tior.save() + # assert + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask_connector/%s' % tior.id, test_data_2) class SubtaskInputTestCase(unittest.TestCase): @@ -636,7 +764,7 @@ class SubtaskInputTestCase(unittest.TestCase): self.assertTrue("Subtask Input List" in r.content.decode('utf8')) def test_subtask_input_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/subtask_input/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/subtask_input/1234321/', 404) def test_subtask_input_POST_and_GET(self): sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) @@ -644,7 +772,7 @@ class SubtaskInputTestCase(unittest.TestCase): # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, sti_test_data) + GET_OK_and_assert_equal_expected_response(self, url, sti_test_data) def test_subtask_input_PUT_invalid_raises_error(self): sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) @@ -657,12 +785,12 @@ class SubtaskInputTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, sti_test_data) + GET_OK_and_assert_equal_expected_response(self, url, sti_test_data) # PUT new values, verify sti_test_data2 = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) PUT_and_assert_expected_response(self, url, sti_test_data2, 200, sti_test_data2) - GET_and_assert_expected_response(self, url, 200, sti_test_data2) + GET_OK_and_assert_equal_expected_response(self, url, sti_test_data2) def test_subtask_input_PATCH(self): sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) @@ -670,7 +798,7 @@ class SubtaskInputTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, sti_test_data) + GET_OK_and_assert_equal_expected_response(self, url, sti_test_data) # make new subtask_url instance, but reuse related data for speed subtask_url = test_data_creator.post_data_and_get_url(test_data_creator.Subtask(cluster_url=self.subtask_data['cluster'], @@ -685,7 +813,7 @@ class SubtaskInputTestCase(unittest.TestCase): PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(sti_test_data) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_subtask_input_DELETE(self): sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) @@ -693,7 +821,7 @@ class SubtaskInputTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, sti_test_data) + GET_OK_and_assert_equal_expected_response(self, url, sti_test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -708,13 +836,13 @@ class SubtaskInputTestCase(unittest.TestCase): # POST new item, verify url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url'] - GET_and_assert_expected_response(self, url, 200, sti_test_data) + GET_OK_and_assert_equal_expected_response(self, url, sti_test_data) # DELETE dependency and check it's gone DELETE_and_assert_gone(self, subtask_url) # assert item gone - GET_and_assert_expected_response(self, url, 404, {}) + GET_and_assert_equal_expected_code(self, url, 404) def test_subtask_input_SET_NULL_behavior_on_connector_deleted(self): subtask_connector_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskConnector(), '/subtask_connector/') @@ -722,7 +850,7 @@ class SubtaskInputTestCase(unittest.TestCase): # POST new item, verify url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url'] - GET_and_assert_expected_response(self, url, 200, sti_test_data) + GET_OK_and_assert_equal_expected_response(self, url, sti_test_data) # DELETE dependency and check it's gone DELETE_and_assert_gone(self, subtask_connector_url) @@ -730,7 +858,7 @@ class SubtaskInputTestCase(unittest.TestCase): # assert item reference is set null expected_data = dict(sti_test_data) expected_data['connector'] = None - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_subtask_input_SET_NULL_behavior_on_task_relation_blueprint_deleted(self): # make new task_relation_blueprint instance, but reuse related data for speed @@ -741,7 +869,7 @@ class SubtaskInputTestCase(unittest.TestCase): # POST new item, verify url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url'] - GET_and_assert_expected_response(self, url, 200, sti_test_data) + GET_OK_and_assert_equal_expected_response(self, url, sti_test_data) # DELETE dependency and check it's gone DELETE_and_assert_gone(self, task_relation_blueprint_url) @@ -749,7 +877,7 @@ class SubtaskInputTestCase(unittest.TestCase): # assert item reference is set null expected_data = dict(sti_test_data) expected_data['task_relation_blueprint'] = None - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_subtask_input_PROTECT_behavior_on_producer_deleted(self): # make new subtask_output_url instance, but reuse related data for speed @@ -758,14 +886,14 @@ class SubtaskInputTestCase(unittest.TestCase): # POST with dependency url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url'] - GET_and_assert_expected_response(self, url, 200, sti_test_data) + GET_OK_and_assert_equal_expected_response(self, url, sti_test_data) # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(subtask_output_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, subtask_output_url, 200, {}) + GET_and_assert_equal_expected_code(self, subtask_output_url, 200) def test_subtask_input_PROTECT_behavior_on_selection_template_deleted(self): subtask_input_selection_template_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskInputSelectionTemplate(), '/subtask_input_selection_template/') @@ -778,14 +906,45 @@ class SubtaskInputTestCase(unittest.TestCase): # POST with dependency url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url'] - GET_and_assert_expected_response(self, url, 200, sti_test_data) + GET_OK_and_assert_equal_expected_response(self, url, sti_test_data) # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(subtask_input_selection_template_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, subtask_input_selection_template_url, 200, {}) + GET_and_assert_equal_expected_code(self, subtask_input_selection_template_url, 200) + + def test_GET_SubtaskInput_list_view_shows_entry(self): + + test_data_1 = SubtaskInput_test_data() + models.SubtaskInput.objects.create(**test_data_1) + nbr_results = models.SubtaskInput.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/subtask_input/', test_data_1, nbr_results) + + def test_GET_SubtaskInput_view_returns_correct_entry(self): + + # setup + test_data_1 = SubtaskInput_test_data() + test_data_2 = SubtaskInput_test_data() + id1 = models.SubtaskInput.objects.create(**test_data_1).id + id2 = models.SubtaskInput.objects.create(**test_data_2).id + # assert + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask_input/%s/' % id1, test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask_input/%s/' % id2, test_data_2) + + def test_SubtaskInput_allows_setting_dataproducts(self): + + test_data_1 = SubtaskInput_test_data() + dpt_test_data_1 = Dataproduct_test_data() + dpt_test_data_2 = Dataproduct_test_data() + # Other then through the API view, we cannot assign ManyToMany on creation, but have to set it later + si = models.SubtaskInput.objects.create(**test_data_1) + si.dataproducts.set([models.Dataproduct.objects.create(**dpt_test_data_1), + models.Dataproduct.objects.create(**dpt_test_data_2)]) + si.save() + # assert + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask_input/%s/' % si.id, test_data_1) class SubtaskOutputTestCase(unittest.TestCase): @@ -802,7 +961,7 @@ class SubtaskOutputTestCase(unittest.TestCase): self.assertTrue("Subtask Output List" in r.content.decode('utf8')) def test_subtask_output_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/subtask_output/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/subtask_output/1234321/', 404) def test_subtask_output_POST_and_GET(self): sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url) @@ -811,7 +970,7 @@ class SubtaskOutputTestCase(unittest.TestCase): r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201, sto_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, sto_test_data) + GET_OK_and_assert_equal_expected_response(self, url, sto_test_data) def test_subtask_output_PUT_invalid_raises_error(self): sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url) @@ -824,11 +983,11 @@ class SubtaskOutputTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201,sto_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, sto_test_data) + GET_OK_and_assert_equal_expected_response(self, url, sto_test_data) # PUT new values, verify PUT_and_assert_expected_response(self, url, sto_test_data2, 200, sto_test_data2) - GET_and_assert_expected_response(self, url, 200, sto_test_data2) + GET_OK_and_assert_equal_expected_response(self, url, sto_test_data2) def test_subtask_output_PATCH(self): sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url) @@ -838,7 +997,7 @@ class SubtaskOutputTestCase(unittest.TestCase): r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201, sto_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, sto_test_data) + GET_OK_and_assert_equal_expected_response(self, url, sto_test_data) test_patch = {"subtask": sto_test_data2["subtask"], "tags": ['FANCYTAG'], } @@ -847,7 +1006,7 @@ class SubtaskOutputTestCase(unittest.TestCase): PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(sto_test_data) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_subtask_output_DELETE(self): sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url) @@ -856,7 +1015,7 @@ class SubtaskOutputTestCase(unittest.TestCase): r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201, sto_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, sto_test_data) + GET_OK_and_assert_equal_expected_response(self, url, sto_test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -868,13 +1027,13 @@ class SubtaskOutputTestCase(unittest.TestCase): # POST new item, verify url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201, sto_test_data)['url'] - GET_and_assert_expected_response(self, url, 200, sto_test_data) + GET_OK_and_assert_equal_expected_response(self, url, sto_test_data) # DELETE dependency and check it's gone DELETE_and_assert_gone(self, subtask_url) # assert item gone - GET_and_assert_expected_response(self, url, 404, {}) + GET_and_assert_equal_expected_code(self, url, 404) def test_subtask_output_SET_NULL_behavior_on_connector_deleted(self): sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url) @@ -883,7 +1042,7 @@ class SubtaskOutputTestCase(unittest.TestCase): url = \ POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201, sto_test_data)[ 'url'] - GET_and_assert_expected_response(self, url, 200, sto_test_data) + GET_OK_and_assert_equal_expected_response(self, url, sto_test_data) # DELETE dependency and check it's gone DELETE_and_assert_gone(self, sto_test_data['connector']) @@ -891,8 +1050,25 @@ class SubtaskOutputTestCase(unittest.TestCase): # assert item reference is set null expected_data = dict(sto_test_data) expected_data['connector'] = None - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) + + def test_GET_SubtaskOutput_list_view_shows_entry(self): + + test_data_1 = SubtaskOutput_test_data() + models.SubtaskOutput.objects.create(**test_data_1) + nbr_results = models.SubtaskOutput.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/subtask_output/', test_data_1, nbr_results) + def test_GET_SubtaskOutput_view_returns_correct_entry(self): + + # setup + test_data_1 = SubtaskOutput_test_data() + test_data_2 = SubtaskOutput_test_data() + id1 = models.SubtaskOutput.objects.create(**test_data_1).id + id2 = models.SubtaskOutput.objects.create(**test_data_2).id + # assert + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask_output/%s/' % id1, test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask_output/%s/' % id2, test_data_2) class AntennaSetTestCase(unittest.TestCase): @@ -902,7 +1078,7 @@ class AntennaSetTestCase(unittest.TestCase): self.assertTrue("Antenna Set List" in r.content.decode('utf8')) def test_antenna_set_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/antenna_set/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/antenna_set/1234321/', 404) def test_antenna_set_POST_and_GET(self): antennaset_test_data = test_data_creator.AntennaSet() @@ -910,7 +1086,7 @@ class AntennaSetTestCase(unittest.TestCase): # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/antenna_set/', antennaset_test_data, 201, antennaset_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, antennaset_test_data) + GET_OK_and_assert_equal_expected_response(self, url, antennaset_test_data) def test_antenna_set_PUT_invalid_raises_error(self): antennaset_test_data = test_data_creator.AntennaSet() @@ -924,11 +1100,11 @@ class AntennaSetTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/antenna_set/', antennaset_test_data, 201, antennaset_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, antennaset_test_data) + GET_OK_and_assert_equal_expected_response(self, url, antennaset_test_data) # PUT new values, verify PUT_and_assert_expected_response(self, url, antennaset_test_data2, 200, antennaset_test_data2) - GET_and_assert_expected_response(self, url, 200, antennaset_test_data2) + GET_OK_and_assert_equal_expected_response(self, url, antennaset_test_data2) def test_antenna_set_PATCH(self): antennaset_test_data = test_data_creator.AntennaSet() @@ -936,7 +1112,7 @@ class AntennaSetTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/antenna_set/', antennaset_test_data, 201, antennaset_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, antennaset_test_data) + GET_OK_and_assert_equal_expected_response(self, url, antennaset_test_data) test_patch = {"rcus": [11, 12, 13, 14, 15], "station_type": BASE_URL + '/station_type/remote/'} @@ -945,7 +1121,7 @@ class AntennaSetTestCase(unittest.TestCase): PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(antennaset_test_data) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_antenna_set_DELETE(self): antennaset_test_data = test_data_creator.AntennaSet() @@ -953,7 +1129,7 @@ class AntennaSetTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/antenna_set/', antennaset_test_data, 201, antennaset_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, antennaset_test_data) + GET_OK_and_assert_equal_expected_response(self, url, antennaset_test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -970,14 +1146,32 @@ class AntennaSetTestCase(unittest.TestCase): test_data = dict(antennaset_test_data) test_data['station_type'] = dataformat_url url = POST_and_assert_expected_response(self, BASE_URL + '/antenna_set/', test_data, 201, test_data)['url'] - GET_and_assert_expected_response(self, url, 200, test_data) + GET_OK_and_assert_equal_expected_response(self, url, test_data) # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(dataformat_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, dataformat_url, 200, dataformat_data) + GET_OK_and_assert_equal_expected_response(self, dataformat_url, dataformat_data) + + def test_GET_AntennaSet_list_view_shows_entry(self): + + test_data_1 = AntennaSet_test_data() + models.AntennaSet.objects.create(**test_data_1) + nbr_results = models.AntennaSet.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/antenna_set/', test_data_1, nbr_results) + + def test_GET_AntennaSet_view_returns_correct_entry(self): + + # setup + test_data_1 = AntennaSet_test_data() + test_data_2 = AntennaSet_test_data() + id1 = models.AntennaSet.objects.create(**test_data_1).id + id2 = models.AntennaSet.objects.create(**test_data_2).id + # assert + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/antenna_set/%s/' % id1, test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/antenna_set/%s/' % id2, test_data_2) class DataproductTransformTestCase(unittest.TestCase): @@ -994,7 +1188,7 @@ class DataproductTransformTestCase(unittest.TestCase): self.assertTrue("Dataproduct Transform List" in r.content.decode('utf8')) def test_dataproduct_transform_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/dataproduct_transform/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/dataproduct_transform/1234321/', 404) def test_dataproduct_transform_POST_and_GET(self): dpt_test_data = test_data_creator.DataproductTransform(input_dataproduct_url=self.input_dataproduct_url, output_dataproduct_url=self.output_dataproduct_url) @@ -1002,7 +1196,7 @@ class DataproductTransformTestCase(unittest.TestCase): # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_transform/', dpt_test_data, 201, dpt_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, dpt_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dpt_test_data) def test_dataproduct_transform_PUT_invalid_raises_error(self): dpt_test_data = test_data_creator.DataproductTransform(input_dataproduct_url=self.input_dataproduct_url, output_dataproduct_url=self.output_dataproduct_url) @@ -1016,11 +1210,11 @@ class DataproductTransformTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_transform/', dpt_test_data, 201, dpt_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, dpt_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dpt_test_data) # PUT new values, verify PUT_and_assert_expected_response(self, url, dpt_test_data2, 200, dpt_test_data2) - GET_and_assert_expected_response(self, url, 200, dpt_test_data2) + GET_OK_and_assert_equal_expected_response(self, url, dpt_test_data2) def test_dataproduct_transform_PATCH(self): dpt_test_data = test_data_creator.DataproductTransform(input_dataproduct_url=self.input_dataproduct_url, output_dataproduct_url=self.output_dataproduct_url) @@ -1028,7 +1222,7 @@ class DataproductTransformTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_transform/', dpt_test_data, 201, dpt_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, dpt_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dpt_test_data) # make new output_dataproduct_url instance, but reuse related data for speed output_dp_test_data = test_data_creator.Dataproduct(specifications_template_url=self.output_dataproduct_data['specifications_template'], @@ -1043,7 +1237,7 @@ class DataproductTransformTestCase(unittest.TestCase): PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(dpt_test_data) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_dataproduct_transform_DELETE(self): dpt_test_data = test_data_creator.DataproductTransform(input_dataproduct_url=self.input_dataproduct_url, output_dataproduct_url=self.output_dataproduct_url) @@ -1051,7 +1245,7 @@ class DataproductTransformTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_transform/', dpt_test_data, 201, dpt_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, dpt_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dpt_test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -1066,14 +1260,14 @@ class DataproductTransformTestCase(unittest.TestCase): # POST new item and verify url = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_transform/', dpt_test_data, 201, dpt_test_data)['url'] - GET_and_assert_expected_response(self, url, 200, dpt_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dpt_test_data) # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(input_dataproduct_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, input_dataproduct_url, 200, input_dp_test_data) + GET_OK_and_assert_equal_expected_response(self, input_dataproduct_url, input_dp_test_data) def test_dataproduct_transform_PROTECT_behavior_on_output_deleted(self): # make new output_dataproduct_url instance, but reuse related data for speed @@ -1085,14 +1279,32 @@ class DataproductTransformTestCase(unittest.TestCase): # POST new item and verify url = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_transform/', dpt_test_data, 201, dpt_test_data)['url'] - GET_and_assert_expected_response(self, url, 200, dpt_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dpt_test_data) # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(output_dataproduct_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, output_dataproduct_url, 200, output_dp_test_data) + GET_OK_and_assert_equal_expected_response(self, output_dataproduct_url, output_dp_test_data) + + def test_GET_DataproductTransform_list_view_shows_entry(self): + + test_data_1 = DataproductTransform_test_data() + models.DataproductTransform.objects.create(**test_data_1) + nbr_results = models.DataproductTransform.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/dataproduct_transform/', test_data_1, nbr_results) + + def test_GET_DataproductTransform_view_returns_correct_entry(self): + + # setup + test_data_1 = DataproductTransform_test_data() + test_data_2 = DataproductTransform_test_data() + id1 = models.DataproductTransform.objects.create(**test_data_1).id + id2 = models.DataproductTransform.objects.create(**test_data_2).id + # assert + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/dataproduct_transform/%s/' % id1, test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/dataproduct_transform/%s/' % id2, test_data_2) class FilesystemTestCase(unittest.TestCase): @@ -1102,7 +1314,7 @@ class FilesystemTestCase(unittest.TestCase): self.assertTrue("Filesystem List" in r.content.decode('utf8')) def test_filesystem_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/filesystem/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/filesystem/1234321/', 404) def test_filesystem_POST_and_GET(self): fs_test_data = test_data_creator.Filesystem() @@ -1110,7 +1322,7 @@ class FilesystemTestCase(unittest.TestCase): # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/filesystem/', fs_test_data, 201, fs_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, fs_test_data) + GET_OK_and_assert_equal_expected_response(self, url, fs_test_data) def test_filesystem_PUT_invalid_raises_error(self): fs_test_data = test_data_creator.Filesystem() @@ -1125,13 +1337,13 @@ class FilesystemTestCase(unittest.TestCase): r_dict = POST_and_assert_expected_response(self, BASE_URL + '/filesystem/', fs_test_data, 201, fs_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, fs_test_data) + GET_OK_and_assert_equal_expected_response(self, url, fs_test_data) fs_test_data2 = test_data_creator.Filesystem() # PUT new values, verify PUT_and_assert_expected_response(self, url, fs_test_data2, 200, fs_test_data2) - GET_and_assert_expected_response(self, url, 200, fs_test_data2) + GET_OK_and_assert_equal_expected_response(self, url, fs_test_data2) def test_filesystem_PATCH(self): cluster_url = test_data_creator.post_data_and_get_url(test_data_creator.Cluster(), '/cluster/') @@ -1141,7 +1353,7 @@ class FilesystemTestCase(unittest.TestCase): r_dict = POST_and_assert_expected_response(self, BASE_URL + '/filesystem/', fs_test_data, 201, fs_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, fs_test_data) + GET_OK_and_assert_equal_expected_response(self, url, fs_test_data) cluster_url2 = test_data_creator.post_data_and_get_url(test_data_creator.Cluster(), '/cluster/') test_patch = {"cluster": cluster_url2, @@ -1151,7 +1363,7 @@ class FilesystemTestCase(unittest.TestCase): PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(fs_test_data) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_filesystem_DELETE(self): fs_test_data = test_data_creator.Filesystem() @@ -1160,7 +1372,7 @@ class FilesystemTestCase(unittest.TestCase): r_dict = POST_and_assert_expected_response(self, BASE_URL + '/filesystem/', fs_test_data, 201, fs_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, fs_test_data) + GET_OK_and_assert_equal_expected_response(self, url, fs_test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -1171,14 +1383,32 @@ class FilesystemTestCase(unittest.TestCase): # POST new item and verify url = POST_and_assert_expected_response(self, BASE_URL + '/filesystem/', fs_test_data, 201, fs_test_data)['url'] - GET_and_assert_expected_response(self, url, 200, fs_test_data) + GET_OK_and_assert_equal_expected_response(self, url, fs_test_data) # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(fs_test_data['cluster'], auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, fs_test_data['cluster'], 200, {}) + GET_and_assert_equal_expected_code(self, fs_test_data['cluster'], 200) + + def test_GET_Filesystem_list_view_shows_entry(self): + + test_data_1 = Filesystem_test_data() + models.Filesystem.objects.create(**test_data_1) + nbr_results = models.Filesystem.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/filesystem/', test_data_1, nbr_results) + + def test_GET_Filesystem_view_returns_correct_entry(self): + + # setup + test_data_1 = Filesystem_test_data() + test_data_2 = Filesystem_test_data() + id1 = models.Filesystem.objects.create(**test_data_1).id + id2 = models.Filesystem.objects.create(**test_data_2).id + # assert + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/filesystem/%s/' % id1, test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/filesystem/%s/' % id2, test_data_2) class ClusterTestCase(unittest.TestCase): @@ -1188,7 +1418,7 @@ class ClusterTestCase(unittest.TestCase): self.assertTrue("Cluster List" in r.content.decode('utf8')) def test_cluster_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/cluster/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/cluster/1234321/', 404) def test_cluster_POST_and_GET(self): c_test_data = test_data_creator.Cluster() @@ -1196,7 +1426,7 @@ class ClusterTestCase(unittest.TestCase): # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/cluster/', c_test_data, 201, c_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, c_test_data) + GET_OK_and_assert_equal_expected_response(self, url, c_test_data) def test_cluster_PUT_invalid_raises_error(self): c_test_data = test_data_creator.Cluster() @@ -1208,13 +1438,13 @@ class ClusterTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/cluster/', c_test_data, 201, c_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, c_test_data) + GET_OK_and_assert_equal_expected_response(self, url, c_test_data) c_test_data2 = test_data_creator.Cluster() # PUT new values, verify PUT_and_assert_expected_response(self, url, c_test_data2, 200, c_test_data2) - GET_and_assert_expected_response(self, url, 200, c_test_data2) + GET_OK_and_assert_equal_expected_response(self, url, c_test_data2) def test_cluster_PATCH(self): c_test_data = test_data_creator.Cluster() @@ -1222,7 +1452,7 @@ class ClusterTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/cluster/', c_test_data, 201, c_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, c_test_data) + GET_OK_and_assert_equal_expected_response(self, url, c_test_data) test_patch = {"location": 'at the other end of the universe'} @@ -1230,7 +1460,7 @@ class ClusterTestCase(unittest.TestCase): PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(c_test_data) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_cluster_DELETE(self): c_test_data = test_data_creator.Cluster() @@ -1238,11 +1468,29 @@ class ClusterTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/cluster/', c_test_data, 201, c_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, c_test_data) + GET_OK_and_assert_equal_expected_response(self, url, c_test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) + def test_GET_Cluster_list_view_shows_entry(self): + + test_data_1 = Cluster_test_data("Cluster one") + models.Cluster.objects.create(**test_data_1) + nbr_results = models.Cluster.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/cluster/', test_data_1, nbr_results) + + def test_GET_Cluster_view_returns_correct_entry(self): + + # setup + test_data_1 = Cluster_test_data("Cluster 1") + test_data_2 = Cluster_test_data("Cluster 2") + id1 = models.Cluster.objects.create(**test_data_1).id + id2 = models.Cluster.objects.create(**test_data_2).id + # assert + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/cluster/%s/' % id1, test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/cluster/%s/' % id2, test_data_2) + class DataproductHashTestCase(unittest.TestCase): @classmethod @@ -1255,7 +1503,7 @@ class DataproductHashTestCase(unittest.TestCase): self.assertTrue("Dataproduct Hash List" in r.content.decode('utf8')) def test_dataproduct_hash_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/dataproduct_hash/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/dataproduct_hash/1234321/', 404) def test_dataproduct_hash_POST_and_GET(self): dph_test_data = test_data_creator.DataproductHash(dataproduct_url=self.dataproduct_url) @@ -1264,7 +1512,7 @@ class DataproductHashTestCase(unittest.TestCase): r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_hash/', dph_test_data, 201, dph_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, dph_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dph_test_data) def test_dataproduct_hash_PUT_invalid_raises_error(self): dph_test_data = test_data_creator.DataproductHash(dataproduct_url=self.dataproduct_url) @@ -1280,11 +1528,11 @@ class DataproductHashTestCase(unittest.TestCase): r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_hash/', dph_test_data, 201, dph_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, dph_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dph_test_data) # PUT new values, verify PUT_and_assert_expected_response(self, url, dph_test_data2, 200, dph_test_data2) - GET_and_assert_expected_response(self, url, 200, dph_test_data2) + GET_OK_and_assert_equal_expected_response(self, url, dph_test_data2) def test_dataproduct_hash_PATCH(self): dph_test_data = test_data_creator.DataproductHash(dataproduct_url=self.dataproduct_url) @@ -1293,7 +1541,7 @@ class DataproductHashTestCase(unittest.TestCase): r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_hash/', dph_test_data, 201, dph_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, dph_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dph_test_data) test_patch = {"algorithm": BASE_URL + '/algorithm/aes256/', "hash": 'bender-was-here'} @@ -1302,7 +1550,7 @@ class DataproductHashTestCase(unittest.TestCase): PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(dph_test_data) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_dataproduct_hash_DELETE(self): dph_test_data = test_data_creator.DataproductHash(dataproduct_url=self.dataproduct_url) @@ -1311,7 +1559,7 @@ class DataproductHashTestCase(unittest.TestCase): r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_hash/', dph_test_data, 201, dph_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, dph_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dph_test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -1322,14 +1570,14 @@ class DataproductHashTestCase(unittest.TestCase): # POST new item and verify url = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_hash/', dph_test_data, 201, dph_test_data)['url'] - GET_and_assert_expected_response(self, url, 200, dph_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dph_test_data) # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(dph_test_data['dataproduct'], auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, dph_test_data['dataproduct'], 200, {}) + GET_and_assert_equal_expected_code(self, dph_test_data['dataproduct'], 200) def test_dataproduct_hash_PROTECT_behavior_on_algorithm_deleted(self): dph_test_data = test_data_creator.DataproductHash(dataproduct_url=self.dataproduct_url) @@ -1337,14 +1585,32 @@ class DataproductHashTestCase(unittest.TestCase): # POST new item and verify url = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_hash/', dph_test_data, 201, dph_test_data)['url'] - GET_and_assert_expected_response(self, url, 200, dph_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dph_test_data) # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(dph_test_data['algorithm'], auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, dph_test_data['algorithm'], 200, {}) + GET_and_assert_equal_expected_code(self, dph_test_data['algorithm'], 200) + + def test_GET_DataproductHash_list_view_shows_entry(self): + + test_data_1 = DataproductHash_test_data() + models.DataproductHash.objects.create(**test_data_1) + nbr_results = models.DataproductHash.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/dataproduct_hash/', test_data_1, nbr_results) + + def test_GET_DataproductHash_view_returns_correct_entry(self): + + # setup + test_data_1 = DataproductHash_test_data() + test_data_2 = DataproductHash_test_data() + id1 = models.DataproductHash.objects.create(**test_data_1).id + id2 = models.DataproductHash.objects.create(**test_data_2).id + # assert + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/dataproduct_hash/%s/' % id1, test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/dataproduct_hash/%s/' % id2, test_data_2) class DataproductArchiveInfoTestCase(unittest.TestCase): @@ -1358,7 +1624,7 @@ class DataproductArchiveInfoTestCase(unittest.TestCase): self.assertTrue("Dataproduct Archive Info List" in r.content.decode('utf8')) def test_dataproduct_archive_info_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/dataproduct_archive_info/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/dataproduct_archive_info/1234321/', 404) def test_dataproduct_archive_info_POST_and_GET(self): dpai_test_data = test_data_creator.DataproductArchiveInfo(dataproduct_url=self.dataproduct_url) @@ -1367,7 +1633,7 @@ class DataproductArchiveInfoTestCase(unittest.TestCase): r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_archive_info/', dpai_test_data, 201, dpai_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, dpai_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dpai_test_data) def test_dataproduct_archive_info_PUT_invalid_raises_error(self): dpai_test_data = test_data_creator.DataproductArchiveInfo(dataproduct_url=self.dataproduct_url) @@ -1383,11 +1649,11 @@ class DataproductArchiveInfoTestCase(unittest.TestCase): r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_archive_info/', dpai_test_data, 201, dpai_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, dpai_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dpai_test_data) # PUT new values, verify PUT_and_assert_expected_response(self, url, dpai_test_data2, 200, dpai_test_data2) - GET_and_assert_expected_response(self, url, 200, dpai_test_data2) + GET_OK_and_assert_equal_expected_response(self, url, dpai_test_data2) def test_dataproduct_archive_info_PATCH(self): dpai_test_data = test_data_creator.DataproductArchiveInfo(dataproduct_url=self.dataproduct_url) @@ -1396,7 +1662,7 @@ class DataproductArchiveInfoTestCase(unittest.TestCase): r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_archive_info/', dpai_test_data, 201, dpai_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, dpai_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dpai_test_data) test_patch = {"storage_ticket": "mygoldenticket"} @@ -1404,7 +1670,7 @@ class DataproductArchiveInfoTestCase(unittest.TestCase): PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(dpai_test_data) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_dataproduct_archive_info_DELETE(self): dpai_test_data = test_data_creator.DataproductArchiveInfo(dataproduct_url=self.dataproduct_url) @@ -1413,7 +1679,7 @@ class DataproductArchiveInfoTestCase(unittest.TestCase): r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_archive_info/', dpai_test_data, 201, dpai_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, dpai_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dpai_test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -1424,14 +1690,32 @@ class DataproductArchiveInfoTestCase(unittest.TestCase): # POST new item and verify url = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_archive_info/', dpai_test_data, 201, dpai_test_data)['url'] - GET_and_assert_expected_response(self, url, 200, dpai_test_data) + GET_OK_and_assert_equal_expected_response(self, url, dpai_test_data) # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(dpai_test_data['dataproduct'], auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, dpai_test_data['dataproduct'], 200, {}) + GET_and_assert_equal_expected_code(self, dpai_test_data['dataproduct'], 200) + + def test_GET_DataproductArchiveInfo_list_view_shows_entry(self): + + test_data_1 = DataproductArchiveInfo_test_data() + models.DataproductArchiveInfo.objects.create(**test_data_1) + nbr_results = models.DataproductArchiveInfo.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/dataproduct_archive_info/', test_data_1, nbr_results) + + def test_GET_DataproductArchiveInfo_view_returns_correct_entry(self): + + # setup + test_data_1 = DataproductArchiveInfo_test_data() + test_data_2 = DataproductArchiveInfo_test_data() + id1 = models.DataproductArchiveInfo.objects.create(**test_data_1).id + id2 = models.DataproductArchiveInfo.objects.create(**test_data_2).id + # assert + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/dataproduct_archive_info/%s/' % id1, test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/dataproduct_archive_info/%s/' % id2, test_data_2) class SubtaskQuery(unittest.TestCase): @@ -1464,7 +1748,7 @@ class SubtaskQuery(unittest.TestCase): return json_response.get('count') @staticmethod - def create_cluster_object(cluster_name): + def create_cluster(cluster_name): cluster_data = Cluster_test_data(name=cluster_name) return models.Cluster.objects.create(**cluster_data) @@ -1474,13 +1758,13 @@ class SubtaskQuery(unittest.TestCase): Create multiple subtasks for a given number of days with start_time 2 hours from now and stop_time 4 hours from now """ - cluster_object = SubtaskQuery.create_cluster_object(cluster_name) + cluster = SubtaskQuery.create_cluster(cluster_name) for day_idx in range(0, total_number): start_time = datetime.now() + timedelta(hours=2, days=day_idx) stop_time = datetime.now() + timedelta(hours=4, days=day_idx) subtask_data = Subtask_test_data(start_time=formatDatetime(start_time), stop_time=formatDatetime(stop_time), - cluster_object=cluster_object) + cluster=cluster) models.Subtask.objects.create(**subtask_data) subtasks_test_data_with_start_stop_time = {'clusterB': 50, 'clusterC': 30 } @@ -1493,8 +1777,8 @@ class SubtaskQuery(unittest.TestCase): clusterB 50 subtasks with start 2hr and stop time 4hr from now, recurring 'every day' clusterC 30 subtasks with start 2hr and stop time 4hr from now, recurring 'every day' """ - cluster_object = SubtaskQuery.create_cluster_object("clusterA") - subtask_data = Subtask_test_data(cluster_object=cluster_object) + cluster = SubtaskQuery.create_cluster("clusterA") + subtask_data = Subtask_test_data(cluster=cluster) models.Subtask.objects.create(**subtask_data) for cluster_name, period_length_in_days in SubtaskQuery.subtasks_test_data_with_start_stop_time.items(): SubtaskQuery.create_multiple_subtask_object(period_length_in_days, cluster_name) @@ -1647,6 +1931,46 @@ class SubtaskQuery(unittest.TestCase): (start_time, stop_time), auth=AUTH) self.check_response_OK_and_result_count(response, 0) + def test_query_state_only(self): + """ + Check the query on state value. Check status code and response length + All states are scheduling, None are defined + """ + logger.info("Check query on state scheduling") + response = requests.get(BASE_URL + '/subtask/?state__value=scheduling', auth=AUTH) + self.check_response_OK_and_result_count(response, SubtaskQuery.get_total_number_of_subtasks()) + + response = requests.get(BASE_URL + '/subtask/?state__value=defined', auth=AUTH) + self.check_response_OK_and_result_count(response, 0) + + def test_query_ordering_start_time(self): + """ + Check the query on ordering of start_time in ascending (default) and descending order + Check status code and response length + Check if next start_time in response is 'younger' in ascending order + Check if next start_time in response is 'older' in descending order + + """ + logger.info("Check query on ordering ascending start time") + response = requests.get(BASE_URL + '/subtask/?ordering=start_time', auth=AUTH) + self.check_response_OK_and_result_count(response, SubtaskQuery.get_total_number_of_subtasks()) + previous_start_time = "2000-01-01T00:00:00" + for item in response.json().get('results'): + start_time = item['start_time'] + self.assertGreater(start_time, previous_start_time, "The start time should be greater than the previous one") + previous_start_time = start_time + + + logger.info("Check query on ordering descending start time") + response = requests.get(BASE_URL + '/subtask/?ordering=-start_time', auth=AUTH) + self.check_response_OK_and_result_count(response, SubtaskQuery.get_total_number_of_subtasks()) + previous_start_time = "2100-01-01T00:00:00" + for item in response.json().get('results'): + start_time = item['start_time'] + self.assertLess(start_time, previous_start_time, "The start time should be smaller than the previous one") + previous_start_time = start_time + + if __name__ == "__main__": unittest.main() diff --git a/SAS/TMSS/test/t_tmssapp_scheduling_REST_API.run b/SAS/TMSS/test/t_tmssapp_scheduling_REST_API.run new file mode 100755 index 0000000000000000000000000000000000000000..b5b37a50d7b60ae4230352548d53b38a96cfa7ae --- /dev/null +++ b/SAS/TMSS/test/t_tmssapp_scheduling_REST_API.run @@ -0,0 +1,6 @@ +#!/bin/bash + +# Run the unit test +source python-coverage.sh +python_coverage_test "*tmss*" t_tmssapp_scheduling_REST_API.py + diff --git a/SAS/TMSS/test/t_tmssapp_scheduling_REST_API.sh b/SAS/TMSS/test/t_tmssapp_scheduling_REST_API.sh new file mode 100755 index 0000000000000000000000000000000000000000..c184de64f2b4cbe6fa4834d96c62573e2cb8196d --- /dev/null +++ b/SAS/TMSS/test/t_tmssapp_scheduling_REST_API.sh @@ -0,0 +1,5 @@ +#!/bin/sh + +# Run Test +./runctest.sh t_tmssapp_scheduling_REST_API + diff --git a/SAS/TMSS/test/t_tmssapp_scheduling_django.py b/SAS/TMSS/test/t_tmssapp_scheduling_django.py deleted file mode 100755 index 28bdfefadbe5b2c11567f73c26fc8aa8bd167306..0000000000000000000000000000000000000000 --- a/SAS/TMSS/test/t_tmssapp_scheduling_django.py +++ /dev/null @@ -1,823 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright (C) 2018 ASTRON (Netherlands Institute for Radio Astronomy) -# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands -# -# This file is part of the LOFAR software suite. -# The LOFAR software suite is free software: you can redistribute it and/or -# modify it under the terms of the GNU General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# The LOFAR software suite is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. - -# $Id: $ - -import os -import unittest -from datetime import datetime -# use this to create timezone-aware datetime objects: from django.utils import timezone - -import logging -logger = logging.getLogger(__name__) -logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.DEBUG) - - -# todo: Tags? -> Decide how to deal with them first. -# todo: Immutability of Blueprints on db level? - -# Do Mandatory setup: -# use setup/teardown magic for tmss test database -# (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_database_unittest_setup module) -from lofar.sas.tmss.test.tmss_database_unittest_setup import * - -from lofar.sas.tmss.test.tmss_test_data_django_models import * - -from django.db.utils import IntegrityError - -# TODO: rest API testing should be moved out of this test module. -# import rest_framework.test -# client = rest_framework.test.APIClient() -# from lofar.sas.tmss.test.test_utils import assertDataWithUrls, assertUrlList - -class SubtaskTemplateTest(unittest.TestCase): - def test_SubtaskTemplate_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.SubtaskTemplate.objects.create(**SubtaskTemplate_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_SubtaskTemplate_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.SubtaskTemplate.objects.create(**SubtaskTemplate_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_SubtaskTemplate_list_view_shows_entry(self): - # - # # setup - # entry = models.SubtaskTemplate.objects.create(**self.get_test_data()) - # - # # assert - # response = client.get('/subtask_template/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data['results'][0], self.get_test_data()) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_SubtaskTemplate_view_returns_correct_entry(self): - # - # # setup - # id1 = models.SubtaskTemplate.objects.create(**self.get_test_data()).id - # id2 = models.SubtaskTemplate.objects.create(**self.get_test_data()).id - # - # # assert - # response1 = client.get('/subtask_template/%s/' % id1, format='json', follow=True) - # response2 = client.get('/subtask_template/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # assertDataWithUrls(self, response1.data, self.get_test_data()) - # assertDataWithUrls(self, response2.data, self.get_test_data()) - - -class DataproductSpecificationsTemplateTest(unittest.TestCase): - - def test_DataproductSpecificationsTemplate_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.DataproductSpecificationsTemplate.objects.create(**DataproductSpecificationsTemplate_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_DataproductSpecificationsTemplate_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.DataproductSpecificationsTemplate.objects.create(**DataproductSpecificationsTemplate_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_DataproductSpecificationsTemplate_list_view_shows_entry(self): - # - # # setup - # entry = models.DataproductSpecificationsTemplate.objects.create(**self.get_test_data()) - # - # # assert - # response = client.get('/dataproduct_specifications_template/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # for item in self.get_test_data().items(): - # self.assertIn(item, response.data['results'][0].items()) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_DataproductSpecificationsTemplate_view_returns_correct_entry(self): - # - # # setup - # id1 = models.DataproductSpecificationsTemplate.objects.create(**self.get_test_data()).id - # id2 = models.DataproductSpecificationsTemplate.objects.create(**self.get_test_data()).id - # - # # assert - # response1 = client.get('/dataproduct_specifications_template/%s/' % id1, format='json', follow=True) - # response2 = client.get('/dataproduct_specifications_template/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # for item in self.get_test_data().items(): - # self.assertIn(item, response1.data.items()) - # for item in self.get_test_data().items(): - # self.assertIn(item, response2.data.items()) - - -class SubtaskInputSelectionTemplateTest(unittest.TestCase): - # This currently adds nothing on top of the template base class, so nothing new to test here. - pass - -class DataproductFeedbackTemplateTest(unittest.TestCase): - # This currently adds nothing on top of the template base class, so nothing new to test here. - pass - - -class SubtaskOutputTest(unittest.TestCase): - def test_SubtaskOutput_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_SubtaskOutput_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_SubtaskOutput_list_view_shows_entry(self): - # # setup - # models.SubtaskOutput.objects.create(**SubtaskOutput_test_data()) - # - # # assert - # response = client.get('/subtask_output/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data['results'][0], SubtaskOutput_test_data()) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_SubtaskOutput_view_returns_correct_entry(self): - # # setup - # id1 = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data()).id - # id2 = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data()).id - # - # # assert - # response1 = client.get('/subtask_output/%s/' % id1, format='json', follow=True) - # response2 = client.get('/subtask_output/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # assertDataWithUrls(self, response1.data, SubtaskOutput_test_data()) - # assertDataWithUrls(self, response2.data, SubtaskOutput_test_data()) - - - def test_SubtaskOutput_prevents_missing_subtask(self): - - # setup - test_data = dict(SubtaskOutput_test_data()) - test_data['subtask'] = None - - # assert - with self.assertRaises(IntegrityError): - models.SubtaskOutput.objects.create(**test_data) - - -class SubtaskInputTest(unittest.TestCase): - def test_SubtaskInput_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.SubtaskInput.objects.create(**SubtaskInput_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_SubtaskInput_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.SubtaskInput.objects.create(**SubtaskInput_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_SubtaskInput_list_view_shows_entry(self): - # # setup - # models.SubtaskInput.objects.create(**SubtaskInput_test_data()) - # - # # assert - # response = client.get('/subtask_input/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data['results'][0], SubtaskInput_test_data()) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_SubtaskInput_view_returns_correct_entry(self): - # # setup - # id1 = models.SubtaskInput.objects.create(**SubtaskInput_test_data()).id - # id2 = models.SubtaskInput.objects.create(**SubtaskInput_test_data()).id - # - # # assert - # response1 = client.get('/subtask_input/%s/' % id1, format='json', follow=True) - # response2 = client.get('/subtask_input/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # assertDataWithUrls(self, response1.data, SubtaskInput_test_data()) - # assertDataWithUrls(self, response2.data, SubtaskInput_test_data()) - - - def test_SubtaskInput_prevents_missing_subtask(self): - - # setup - test_data = dict(SubtaskInput_test_data()) - test_data['subtask'] = None - - # assert - with self.assertRaises(IntegrityError): - models.SubtaskInput.objects.create(**test_data) - - - # TODO: rest API testing should be moved out of this test module. - # def test_SubtaskInput_allows_setting_dataproducts(self): - # # Other then through the API view, we cannot assign ManyToMany on creation, but have to set it later - # si = models.SubtaskInput.objects.create(**SubtaskInput_test_data()) - # si.dataproducts.set([models.Dataproduct.objects.create(**Dataproduct_test_data()), - # models.Dataproduct.objects.create(**Dataproduct_test_data())]) - # si.save() - # - # # assert - # response = client.get('/subtask_input/%s' % si.id, format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data, SubtaskInput_test_data()) - - -class SubtaskTest(unittest.TestCase): - def test_Subtask_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.Subtask.objects.create(**Subtask_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_Subtask_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.Subtask.objects.create(**Subtask_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_Subtask_list_view_shows_entry(self): - # - # # setup - # entry = models.Subtask.objects.create(**Subtask_test_data()) - # - # # assert - # response = client.get('/subtask/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data['results'][0], Subtask_test_data()) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_Subtask_view_returns_correct_entry(self): - # - # # setup - # id1 = models.Subtask.objects.create(**Subtask_test_data()).id - # id2 = models.Subtask.objects.create(**Subtask_test_data()).id - # - # # assert - # response1 = client.get('/subtask/%s/' % id1, format='json', follow=True) - # response2 = client.get('/subtask/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # assertDataWithUrls(self, response1.data, Subtask_test_data()) - # assertDataWithUrls(self, response2.data, Subtask_test_data()) - - def test_Subtask_prevents_missing_template(self): - - # setup - test_data = dict(Subtask_test_data()) - test_data['specifications_template'] = None - - # assert - with self.assertRaises(IntegrityError): - models.Subtask.objects.create(**test_data) - - # TODO: rest API testing should be moved out of this test module. - # def test_nested_Subtask_are_filtered_according_to_TaskBlueprint(self): - # tbt = TaskBlueprintTest() - # tbt.setUp(populate=False) - # - # # setup - # task_blueprint_1 = models.TaskBlueprint.objects.create(**tbt.get_test_data()) - # task_blueprint_2 = models.TaskBlueprint.objects.create(**tbt.get_test_data()) - # test_data_1 = dict(Subtask_test_data()) - # test_data_1['task_blueprint'] = task_blueprint_1 - # subtask_1 = models.Subtask.objects.create(**test_data_1) - # test_data_2 = dict(Subtask_test_data()) - # test_data_2['task_blueprint'] = task_blueprint_2 - # subtask_2 = models.Subtask.objects.create(**test_data_2) - # - # # assert the returned list contains related items - # response = client.get('/task_blueprint/%s/subtask/' % task_blueprint_2.id, format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # self.assertEqual(len(response.data['results']), 1) - # assertDataWithUrls(self, response.data['results'][0], test_data_2) - # - # # assert an existing related item is returned - # response = client.get( - # '/task_blueprint/%s/subtask/%s/' % (task_blueprint_2.id, subtask_2.id), format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data, test_data_2) - # - # # assert an existing unrelated item is not returned - # response = client.get( '/task_blueprint/%s/subtask/%s/' % (task_blueprint_2.id, subtask_1.id), format='json', follow=True) - # self.assertEqual(response.status_code, 404) - - -class DataproductTest(unittest.TestCase): - def test_Dataproduct_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.Dataproduct.objects.create(**Dataproduct_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_Dataproduct_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.Dataproduct.objects.create(**Dataproduct_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_Dataproduct_list_view_shows_entry(self): - # - # # setup - # entry = models.Dataproduct.objects.create(**Dataproduct_test_data()) - # - # # assert - # response = client.get('/dataproduct/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data['results'][0], Dataproduct_test_data()) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_Dataproduct_view_returns_correct_entry(self): - # - # # setup - # id1 = models.Dataproduct.objects.create(**Dataproduct_test_data()).id - # id2 = models.Dataproduct.objects.create(**Dataproduct_test_data()).id - # - # # assert - # response1 = client.get('/dataproduct/%s/' % id1, format='json', follow=True) - # response2 = client.get('/dataproduct/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # assertDataWithUrls(self, response1.data, Dataproduct_test_data()) - # assertDataWithUrls(self, response2.data, Dataproduct_test_data()) - - def test_Dataproduct_prevents_missing_specifications_template(self): - - # setup - test_data = dict(Dataproduct_test_data()) - test_data['specifications_template'] = None - - # assert - with self.assertRaises(IntegrityError): - models.Dataproduct.objects.create(**test_data) - - -class SubtaskConnectorTest(unittest.TestCase): - def test_SubtaskConnector_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.SubtaskConnector.objects.create(**SubtaskConnector_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_SubtaskConnector_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.SubtaskConnector.objects.create(**SubtaskConnector_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_SubtaskConnector_list_view_shows_entry(self): - # # setup - # models.SubtaskConnector.objects.create(**SubtaskConnector_test_data()) - # - # # assert - # response = client.get('/subtask_connector/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data['results'][0], SubtaskConnector_test_data()) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_SubtaskConnector_view_returns_correct_entry(self): - # # setup - # id1 = models.SubtaskConnector.objects.create(**SubtaskConnector_test_data()).id - # id2 = models.SubtaskConnector.objects.create(**SubtaskConnector_test_data()).id - # - # # assert - # response1 = client.get('/subtask_connector/%s/' % id1, format='json', follow=True) - # response2 = client.get('/subtask_connector/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # assertDataWithUrls(self, response1.data, SubtaskConnector_test_data()) - # assertDataWithUrls(self, response2.data, SubtaskConnector_test_data()) - - # TODO: rest API testing should be moved out of this test module. - # def test_SubtaskConnector_allows_setting_dataformats(self): - # # Other then through the API view, we cannot assign ManyToMany on creation, but have to set it later - # - # test_data_1 = dict(SubtaskConnector_test_data()) - # test_data_1['inputs'] = None - # tior = models.SubtaskConnector.objects.create(**SubtaskConnector_test_data()) - # tior.dataformats.set([models.Dataformat.objects.get(value='Beamformed'), - # models.Dataformat.objects.get(value='MeasurementSet')]) - # tior.save() - # - # # assert - # response = client.get('/subtask_connector/%s' % tior.id, format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data, SubtaskConnector_test_data()) - -class AntennaSetTest(unittest.TestCase): - def test_AntennaSet_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.AntennaSet.objects.create(**AntennaSet_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_AntennaSet_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.AntennaSet.objects.create(**AntennaSet_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_AntennaSet_list_view_shows_entry(self): - # # setup - # models.AntennaSet.objects.create(**AntennaSet_test_data()) - # - # # assert - # response = client.get('/antenna_set/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data['results'][0], AntennaSet_test_data()) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_AntennaSet_view_returns_correct_entry(self): - # # setup - # id1 = models.AntennaSet.objects.create(**AntennaSet_test_data()).id - # id2 = models.AntennaSet.objects.create(**AntennaSet_test_data()).id - # - # # assert - # response1 = client.get('/antenna_set/%s/' % id1, format='json', follow=True) - # response2 = client.get('/antenna_set/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # assertDataWithUrls(self, response1.data, AntennaSet_test_data()) - # assertDataWithUrls(self, response2.data, AntennaSet_test_data()) - - -class DataproductTransformTest(unittest.TestCase): - def test_DataproductTransform_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.DataproductTransform.objects.create(**DataproductTransform_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_DataproductTransform_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.DataproductTransform.objects.create(**DataproductTransform_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_DataproductTransform_list_view_shows_entry(self): - # # setup - # models.DataproductTransform.objects.create(**DataproductTransform_test_data()) - # - # # assert - # response = client.get('/dataproduct_transform/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data['results'][0], DataproductTransform_test_data()) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_DataproductTransform_view_returns_correct_entry(self): - # # setup - # id1 = models.DataproductTransform.objects.create(**DataproductTransform_test_data()).id - # id2 = models.DataproductTransform.objects.create(**DataproductTransform_test_data()).id - # - # # assert - # response1 = client.get('/dataproduct_transform/%s/' % id1, format='json', follow=True) - # response2 = client.get('/dataproduct_transform/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # assertDataWithUrls(self, response1.data, DataproductTransform_test_data()) - # assertDataWithUrls(self, response2.data, DataproductTransform_test_data()) - - -class FilesystemTest(unittest.TestCase): - def test_Filesystem_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.Filesystem.objects.create(**Filesystem_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_Filesystem_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.Filesystem.objects.create(**Filesystem_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_Filesystem_list_view_shows_entry(self): - # # setup - # models.Filesystem.objects.create(**Filesystem_test_data()) - # - # # assert - # response = client.get('/filesystem/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data['results'][0], Filesystem_test_data()) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_Filesystem_view_returns_correct_entry(self): - # # setup - # id1 = models.Filesystem.objects.create(**Filesystem_test_data()).id - # id2 = models.Filesystem.objects.create(**Filesystem_test_data()).id - # - # # assert - # response1 = client.get('/filesystem/%s/' % id1, format='json', follow=True) - # response2 = client.get('/filesystem/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # assertDataWithUrls(self, response1.data, Filesystem_test_data()) - # assertDataWithUrls(self, response2.data, Filesystem_test_data()) - - -class ClusterTest(unittest.TestCase): - def test_Cluster_gets_created_with_correct_creation_timestamp(self): - # setup - before = datetime.utcnow() - entry = models.Cluster.objects.create(**Cluster_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_Cluster_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.Cluster.objects.create(**Cluster_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_Cluster_list_view_shows_entry(self): - # # setup - # models.Cluster.objects.create(**Cluster_test_data()) - # - # # assert - # response = client.get('/cluster/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data['results'][0], Cluster_test_data()) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_Cluster_view_returns_correct_entry(self): - # # setup - # id1 = models.Cluster.objects.create(**Cluster_test_data()).id - # id2 = models.Cluster.objects.create(**Cluster_test_data()).id - # - # # assert - # response1 = client.get('/cluster/%s/' % id1, format='json', follow=True) - # response2 = client.get('/cluster/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # assertDataWithUrls(self, response1.data, Cluster_test_data()) - # assertDataWithUrls(self, response2.data, Cluster_test_data()) - - -class DataproductArchiveInfoTest(unittest.TestCase): - def test_DataproductArchiveInfo_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.DataproductArchiveInfo.objects.create(**DataproductArchiveInfo_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_DataproductArchiveInfo_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.DataproductArchiveInfo.objects.create(**DataproductArchiveInfo_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_DataproductArchiveInfo_list_view_shows_entry(self): - # # setup - # models.DataproductArchiveInfo.objects.create(**DataproductArchiveInfo_test_data()) - # - # # assert - # response = client.get('/dataproduct_archive_info/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data['results'][0], DataproductArchiveInfo_test_data()) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_DataproductArchiveInfo_view_returns_correct_entry(self): - # # setup - # id1 = models.DataproductArchiveInfo.objects.create(**DataproductArchiveInfo_test_data()).id - # id2 = models.DataproductArchiveInfo.objects.create(**DataproductArchiveInfo_test_data()).id - # - # # assert - # response1 = client.get('/dataproduct_archive_info/%s/' % id1, format='json', follow=True) - # response2 = client.get('/dataproduct_archive_info/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # assertDataWithUrls(self, response1.data, DataproductArchiveInfo_test_data()) - # assertDataWithUrls(self, response2.data, DataproductArchiveInfo_test_data()) - - -class DataproductHashTest(unittest.TestCase): - def test_DataproductHash_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.DataproductHash.objects.create(**DataproductHash_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_DataproductHash_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.DataproductHash.objects.create(**DataproductHash_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_DataproductHash_list_view_shows_entry(self): - # # setup - # models.DataproductHash.objects.create(**DataproductHash_test_data()) - # - # # assert - # response = client.get('/dataproduct_hash/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data['results'][0], DataproductHash_test_data()) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_DataproductHash_view_returns_correct_entry(self): - # # setup - # id1 = models.DataproductHash.objects.create(**DataproductHash_test_data()).id - # id2 = models.DataproductHash.objects.create(**DataproductHash_test_data()).id - # - # # assert - # response1 = client.get('/dataproduct_hash/%s/' % id1, format='json', follow=True) - # response2 = client.get('/dataproduct_hash/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # assertDataWithUrls(self, response1.data, DataproductHash_test_data()) - # assertDataWithUrls(self, response2.data, DataproductHash_test_data()) - - -if __name__ == "__main__": - os.environ['TZ'] = 'UTC' - unittest.main() diff --git a/SAS/TMSS/test/t_tmssapp_scheduling_django.run b/SAS/TMSS/test/t_tmssapp_scheduling_django.run deleted file mode 100755 index d37f323e3811877fbf308a71d840def2d994c3b4..0000000000000000000000000000000000000000 --- a/SAS/TMSS/test/t_tmssapp_scheduling_django.run +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash - -# Run the unit test -source python-coverage.sh -python_coverage_test "*tmss*" t_tmssapp_scheduling_django.py - diff --git a/SAS/TMSS/test/t_tmssapp_scheduling_django.sh b/SAS/TMSS/test/t_tmssapp_scheduling_django.sh deleted file mode 100755 index 70dcbde44239cd230090b901a2cf356318c616c3..0000000000000000000000000000000000000000 --- a/SAS/TMSS/test/t_tmssapp_scheduling_django.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/sh - -./runctest.sh t_tmssapp_scheduling_django \ No newline at end of file diff --git a/SAS/TMSS/test/t_tmssapp_scheduling_django_API.py b/SAS/TMSS/test/t_tmssapp_scheduling_django_API.py new file mode 100755 index 0000000000000000000000000000000000000000..9fa9a987f1e380d231159f80a43897d0c6435be9 --- /dev/null +++ b/SAS/TMSS/test/t_tmssapp_scheduling_django_API.py @@ -0,0 +1,435 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2018 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id: $ + +import os +import unittest +from datetime import datetime +# use this to create timezone-aware datetime objects: from django.utils import timezone + +import logging +logger = logging.getLogger(__name__) +logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.DEBUG) + + +# todo: Tags? -> Decide how to deal with them first. +# todo: Immutability of Blueprints on db level? + +# Do Mandatory setup: +# use setup/teardown magic for tmss test database +# (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_database_unittest_setup module) +from lofar.sas.tmss.test.tmss_database_unittest_setup import * + +from lofar.sas.tmss.test.tmss_test_data_django_models import * + +from django.db.utils import IntegrityError + + +class SubtaskTemplateTest(unittest.TestCase): + def test_SubtaskTemplate_gets_created_with_correct_creation_timestamp(self): + + # setup + before = datetime.utcnow() + entry = models.SubtaskTemplate.objects.create(**SubtaskTemplate_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_SubtaskTemplate_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.SubtaskTemplate.objects.create(**SubtaskTemplate_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + +class DataproductSpecificationsTemplateTest(unittest.TestCase): + + def test_DataproductSpecificationsTemplate_gets_created_with_correct_creation_timestamp(self): + + # setup + before = datetime.utcnow() + entry = models.DataproductSpecificationsTemplate.objects.create(**DataproductSpecificationsTemplate_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_DataproductSpecificationsTemplate_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.DataproductSpecificationsTemplate.objects.create(**DataproductSpecificationsTemplate_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + +class SubtaskInputSelectionTemplateTest(unittest.TestCase): + # This currently adds nothing on top of the template base class, so nothing new to test here. + pass + +class DataproductFeedbackTemplateTest(unittest.TestCase): + # This currently adds nothing on top of the template base class, so nothing new to test here. + pass + + +class SubtaskOutputTest(unittest.TestCase): + def test_SubtaskOutput_gets_created_with_correct_creation_timestamp(self): + + # setup + before = datetime.utcnow() + entry = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_SubtaskOutput_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + def test_SubtaskOutput_prevents_missing_subtask(self): + + # setup + test_data = dict(SubtaskOutput_test_data()) + test_data['subtask'] = None + + # assert + with self.assertRaises(IntegrityError): + models.SubtaskOutput.objects.create(**test_data) + + +class SubtaskInputTest(unittest.TestCase): + def test_SubtaskInput_gets_created_with_correct_creation_timestamp(self): + + # setup + before = datetime.utcnow() + entry = models.SubtaskInput.objects.create(**SubtaskInput_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_SubtaskInput_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.SubtaskInput.objects.create(**SubtaskInput_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + def test_SubtaskInput_prevents_missing_subtask(self): + + # setup + test_data = dict(SubtaskInput_test_data()) + test_data['subtask'] = None + + # assert + with self.assertRaises(IntegrityError): + models.SubtaskInput.objects.create(**test_data) + + +class SubtaskTest(unittest.TestCase): + def test_Subtask_gets_created_with_correct_creation_timestamp(self): + + # setup + before = datetime.utcnow() + entry = models.Subtask.objects.create(**Subtask_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_Subtask_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.Subtask.objects.create(**Subtask_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + def test_Subtask_prevents_missing_template(self): + + # setup + test_data = dict(Subtask_test_data()) + test_data['specifications_template'] = None + + # assert + with self.assertRaises(IntegrityError): + models.Subtask.objects.create(**test_data) + + +class DataproductTest(unittest.TestCase): + def test_Dataproduct_gets_created_with_correct_creation_timestamp(self): + + # setup + before = datetime.utcnow() + entry = models.Dataproduct.objects.create(**Dataproduct_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_Dataproduct_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.Dataproduct.objects.create(**Dataproduct_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + + def test_Dataproduct_prevents_missing_specifications_template(self): + + # setup + test_data = dict(Dataproduct_test_data()) + test_data['specifications_template'] = None + + # assert + with self.assertRaises(IntegrityError): + models.Dataproduct.objects.create(**test_data) + + +class SubtaskConnectorTest(unittest.TestCase): + def test_SubtaskConnector_gets_created_with_correct_creation_timestamp(self): + + # setup + before = datetime.utcnow() + entry = models.SubtaskConnector.objects.create(**SubtaskConnector_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_SubtaskConnector_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.SubtaskConnector.objects.create(**SubtaskConnector_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + +class AntennaSetTest(unittest.TestCase): + def test_AntennaSet_gets_created_with_correct_creation_timestamp(self): + + # setup + before = datetime.utcnow() + entry = models.AntennaSet.objects.create(**AntennaSet_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_AntennaSet_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.AntennaSet.objects.create(**AntennaSet_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + +class DataproductTransformTest(unittest.TestCase): + def test_DataproductTransform_gets_created_with_correct_creation_timestamp(self): + + # setup + before = datetime.utcnow() + entry = models.DataproductTransform.objects.create(**DataproductTransform_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_DataproductTransform_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.DataproductTransform.objects.create(**DataproductTransform_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + +class FilesystemTest(unittest.TestCase): + def test_Filesystem_gets_created_with_correct_creation_timestamp(self): + + # setup + before = datetime.utcnow() + entry = models.Filesystem.objects.create(**Filesystem_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_Filesystem_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.Filesystem.objects.create(**Filesystem_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + +class ClusterTest(unittest.TestCase): + def test_Cluster_gets_created_with_correct_creation_timestamp(self): + # setup + before = datetime.utcnow() + entry = models.Cluster.objects.create(**Cluster_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_Cluster_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.Cluster.objects.create(**Cluster_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + +class DataproductArchiveInfoTest(unittest.TestCase): + def test_DataproductArchiveInfo_gets_created_with_correct_creation_timestamp(self): + + # setup + before = datetime.utcnow() + entry = models.DataproductArchiveInfo.objects.create(**DataproductArchiveInfo_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_DataproductArchiveInfo_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.DataproductArchiveInfo.objects.create(**DataproductArchiveInfo_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + +class DataproductHashTest(unittest.TestCase): + def test_DataproductHash_gets_created_with_correct_creation_timestamp(self): + + # setup + before = datetime.utcnow() + entry = models.DataproductHash.objects.create(**DataproductHash_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_DataproductHash_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.DataproductHash.objects.create(**DataproductHash_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + +if __name__ == "__main__": + os.environ['TZ'] = 'UTC' + unittest.main() diff --git a/SAS/TMSS/test/t_tmssapp_scheduling_django_API.run b/SAS/TMSS/test/t_tmssapp_scheduling_django_API.run new file mode 100755 index 0000000000000000000000000000000000000000..beefa83c68579fae9f7ab2c32ed7f642f2cea87c --- /dev/null +++ b/SAS/TMSS/test/t_tmssapp_scheduling_django_API.run @@ -0,0 +1,6 @@ +#!/bin/bash + +# Run the unit test +source python-coverage.sh +python_coverage_test "*tmss*" t_tmssapp_scheduling_django_API.py + diff --git a/SAS/TMSS/test/t_tmssapp_scheduling_django_API.sh b/SAS/TMSS/test/t_tmssapp_scheduling_django_API.sh new file mode 100755 index 0000000000000000000000000000000000000000..0122423a50f654fbba225b696aae3c7af5641ac0 --- /dev/null +++ b/SAS/TMSS/test/t_tmssapp_scheduling_django_API.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +./runctest.sh t_tmssapp_scheduling_django_API \ No newline at end of file diff --git a/SAS/TMSS/test/t_tmssapp_scheduling_functional.run b/SAS/TMSS/test/t_tmssapp_scheduling_functional.run deleted file mode 100755 index 1596420177818737b185dd401f1c57d0eabdeb65..0000000000000000000000000000000000000000 --- a/SAS/TMSS/test/t_tmssapp_scheduling_functional.run +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash - -# Run the unit test -source python-coverage.sh -python_coverage_test "*tmss*" t_tmssapp_scheduling_functional.py - diff --git a/SAS/TMSS/test/t_tmssapp_scheduling_functional.sh b/SAS/TMSS/test/t_tmssapp_scheduling_functional.sh deleted file mode 100755 index e0d4072f682067df7a8e4d3713f9d6d626754505..0000000000000000000000000000000000000000 --- a/SAS/TMSS/test/t_tmssapp_scheduling_functional.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh - -# Run Test -./runctest.sh t_tmssapp_scheduling_functional - diff --git a/SAS/TMSS/test/t_tmssapp_specification_functional.py b/SAS/TMSS/test/t_tmssapp_specification_REST_API.py similarity index 60% rename from SAS/TMSS/test/t_tmssapp_specification_functional.py rename to SAS/TMSS/test/t_tmssapp_specification_REST_API.py index fb3deef98e6e7f768d10ec60207e6453c257c5e4..8c51bdb7dd7f74966b898f36b02ab94f3eee33b1 100755 --- a/SAS/TMSS/test/t_tmssapp_specification_functional.py +++ b/SAS/TMSS/test/t_tmssapp_specification_REST_API.py @@ -37,31 +37,37 @@ logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=loggin # use setup/teardown magic for tmss test database, ldap server and django server # (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_test_environment_unittest_setup module) from lofar.sas.tmss.test.tmss_test_environment_unittest_setup import * +from lofar.sas.tmss.test.tmss_test_data_django_models import * +from lofar.sas.tmss.tmss.tmssapp import models +from lofar.sas.tmss.test.test_utils import assertUrlList + # import and setup test data creator from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator test_data_creator = TMSSRESTTestDataCreator(BASE_URL, AUTH) + class BasicFunctionTestCase(unittest.TestCase): # todo: test_welcome_page (once we have one :)) pass class GeneratorTemplateTestCase(unittest.TestCase): + def test_generator_template_list_apiformat(self): r = requests.get(BASE_URL + '/generator_template/?format=api', auth=AUTH) self.assertEqual(r.status_code, 200) self.assertTrue("Generator Template List" in r.content.decode('utf8')) def test_generator_template_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/generator_template/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/generator_template/1234321/', 404) def test_generator_template_POST_and_GET(self): # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data_creator.GeneratorTemplate(), 201, test_data_creator.GeneratorTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, test_data_creator.GeneratorTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.GeneratorTemplate()) def test_generator_template_PUT_invalid_raises_error(self): PUT_and_assert_expected_response(self, BASE_URL + '/generator_template/9876789876/', test_data_creator.GeneratorTemplate(), 404, {}) @@ -71,18 +77,18 @@ class GeneratorTemplateTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data_creator.GeneratorTemplate(), 201, test_data_creator.GeneratorTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, test_data_creator.GeneratorTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.GeneratorTemplate()) # PUT new values, verify PUT_and_assert_expected_response(self, url, test_data_creator.GeneratorTemplate("generatortemplate2"), 200, test_data_creator.GeneratorTemplate("generatortemplate2")) - GET_and_assert_expected_response(self, url, 200, test_data_creator.GeneratorTemplate("generatortemplate2")) + GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.GeneratorTemplate("generatortemplate2")) def test_generator_template_PATCH(self): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data_creator.GeneratorTemplate(), 201, test_data_creator.GeneratorTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, test_data_creator.GeneratorTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.GeneratorTemplate()) test_patch = {"version": 'v6.28318530718', "schema": {"mykey": "my better value"}} @@ -91,18 +97,27 @@ class GeneratorTemplateTestCase(unittest.TestCase): PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(test_data_creator.GeneratorTemplate()) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_generator_template_DELETE(self): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data_creator.GeneratorTemplate(), 201, test_data_creator.GeneratorTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, test_data_creator.GeneratorTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.GeneratorTemplate()) # DELETE and check it's gone DELETE_and_assert_gone(self, url) + def test_GET_generator_template_view_returns_correct_entry(self): + + test_data_1 = GeneratorTemplate_test_data("test_generator_template_1") + test_data_2 = GeneratorTemplate_test_data("test_generator_template_2") + id1 = models.GeneratorTemplate.objects.create(**test_data_1).id + id2 = models.GeneratorTemplate.objects.create(**test_data_2).id + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/generator_template/' + str(id1), test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/generator_template/' + str(id2), test_data_2) + class SchedulingUnitTemplateTestCase(unittest.TestCase): def test_scheduling_unit_template_list_apiformat(self): @@ -111,14 +126,14 @@ class SchedulingUnitTemplateTestCase(unittest.TestCase): self.assertTrue("Scheduling Unit Template List" in r.content.decode('utf8')) def test_scheduling_unit_template_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_template/1234321/', 404) def test_scheduling_unit_template_POST_and_GET(self): # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', test_data_creator.SchedulingUnitTemplate(), 201, test_data_creator.SchedulingUnitTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url+'?format=json', 200, test_data_creator.SchedulingUnitTemplate()) + GET_OK_and_assert_equal_expected_response(self, url+'?format=json', test_data_creator.SchedulingUnitTemplate()) def test_scheduling_unit_template_PUT_invalid_raises_error(self): PUT_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/9876789876/', test_data_creator.SchedulingUnitTemplate(), 404, {}) @@ -128,18 +143,18 @@ class SchedulingUnitTemplateTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', test_data_creator.SchedulingUnitTemplate(), 201, test_data_creator.SchedulingUnitTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, test_data_creator.SchedulingUnitTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.SchedulingUnitTemplate()) # PUT new values, verify PUT_and_assert_expected_response(self, url, test_data_creator.SchedulingUnitTemplate("schedulingunittemplate2"), 200, test_data_creator.SchedulingUnitTemplate("schedulingunittemplate2")) - GET_and_assert_expected_response(self, url, 200, test_data_creator.SchedulingUnitTemplate("schedulingunittemplate2")) + GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.SchedulingUnitTemplate("schedulingunittemplate2")) def test_scheduling_unit_template_PATCH(self): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', test_data_creator.SchedulingUnitTemplate(), 201, test_data_creator.SchedulingUnitTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, test_data_creator.SchedulingUnitTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.SchedulingUnitTemplate()) test_patch = {"version": 'v6.28318530718', "schema": {"mykey": "my better value"}} @@ -148,18 +163,28 @@ class SchedulingUnitTemplateTestCase(unittest.TestCase): PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(test_data_creator.SchedulingUnitTemplate()) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_scheduling_unit_template_DELETE(self): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', test_data_creator.SchedulingUnitTemplate(), 201, test_data_creator.SchedulingUnitTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, test_data_creator.SchedulingUnitTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.SchedulingUnitTemplate()) # DELETE and check it's gone DELETE_and_assert_gone(self, url) + def test_GET_scheduling_unit_template_view_returns_correct_entry(self): + + test_data_1 = SchedulingUnitTemplate_test_data("scheduling_unit_template_1") + test_data_2 = SchedulingUnitTemplate_test_data("scheduling_unit_template_2") + id1 = models.SchedulingUnitTemplate.objects.create(**test_data_1).id + id2 = models.SchedulingUnitTemplate.objects.create(**test_data_2).id + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_unit_template/' + str(id1), test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_unit_template/' + str(id2), test_data_2) + + class TaskTemplateTestCase(unittest.TestCase): def test_task_template_list_apiformat(self): @@ -168,14 +193,14 @@ class TaskTemplateTestCase(unittest.TestCase): self.assertTrue("Task Template List" in r.content.decode('utf8')) def test_task_template_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/task_template/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/task_template/1234321/', 404) def test_task_template_POST_and_GET(self): # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data_creator.TaskTemplate(), 201, test_data_creator.TaskTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url + '?format=json', 200, test_data_creator.TaskTemplate()) + GET_OK_and_assert_equal_expected_response(self, url + '?format=json', test_data_creator.TaskTemplate()) def test_task_template_PUT_invalid_raises_error(self): PUT_and_assert_expected_response(self, BASE_URL + '/task_template/9876789876/', test_data_creator.TaskTemplate(), 404, {}) @@ -185,18 +210,18 @@ class TaskTemplateTestCase(unittest.TestCase): r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data_creator.TaskTemplate(), 201, test_data_creator.TaskTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, test_data_creator.TaskTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.TaskTemplate()) # PUT new values, verify PUT_and_assert_expected_response(self, url, test_data_creator.TaskTemplate("tasktemplate2"), 200, test_data_creator.TaskTemplate("tasktemplate2")) - GET_and_assert_expected_response(self, url, 200, test_data_creator.TaskTemplate("tasktemplate2")) + GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.TaskTemplate("tasktemplate2")) def test_task_template_PATCH(self): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data_creator.TaskTemplate(), 201, test_data_creator.TaskTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, test_data_creator.TaskTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.TaskTemplate()) test_patch = {"version": 'v6.28318530718', "schema": {"mykey": "my better value"}, @@ -205,18 +230,27 @@ class TaskTemplateTestCase(unittest.TestCase): PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(test_data_creator.TaskTemplate()) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_task_template_DELETE(self): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data_creator.TaskTemplate(), 201, test_data_creator.TaskTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, test_data_creator.TaskTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.TaskTemplate()) # DELETE and check it's gone DELETE_and_assert_gone(self, url) + def test_GET_task_template_view_returns_correct_entry(self): + + test_data_1 = TaskTemplate_test_data("task_template_1") + test_data_2 = TaskTemplate_test_data("task_template_2") + id1 = models.TaskTemplate.objects.create(**test_data_1).id + id2 = models.TaskTemplate.objects.create(**test_data_2).id + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_template/' + str(id1), test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_template/' + str(id2), test_data_2) + class WorkRelationSelectionTemplateTestCase(unittest.TestCase): def test_work_relation_selection_template_list_apiformat(self): @@ -225,14 +259,14 @@ class WorkRelationSelectionTemplateTestCase(unittest.TestCase): self.assertTrue("Work Relation Selection Template List" in r.content.decode('utf8')) def test_work_relation_selection_template_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/work_relation_selection_template/1234321/', 404) def test_work_relation_selection_template_POST_and_GET(self): # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data_creator.WorkRelationSelectionTemplate(), 201, test_data_creator.WorkRelationSelectionTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url+'?format=json', 200, test_data_creator.WorkRelationSelectionTemplate()) + GET_OK_and_assert_equal_expected_response(self, url+'?format=json', test_data_creator.WorkRelationSelectionTemplate()) def test_work_relation_selection_template_PUT_invalid_raises_error(self): PUT_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/9876789876/', test_data_creator.WorkRelationSelectionTemplate(), 404, {}) @@ -242,18 +276,18 @@ class WorkRelationSelectionTemplateTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data_creator.WorkRelationSelectionTemplate(), 201, test_data_creator.WorkRelationSelectionTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, test_data_creator.WorkRelationSelectionTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.WorkRelationSelectionTemplate()) # PUT new values, verify PUT_and_assert_expected_response(self, url, test_data_creator.WorkRelationSelectionTemplate("workrelationselectiontemplate2"), 200, test_data_creator.WorkRelationSelectionTemplate("workrelationselectiontemplate2")) - GET_and_assert_expected_response(self, url, 200, test_data_creator.WorkRelationSelectionTemplate("workrelationselectiontemplate2")) + GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.WorkRelationSelectionTemplate("workrelationselectiontemplate2")) def test_work_relation_selection_template_PATCH(self): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data_creator.WorkRelationSelectionTemplate(), 201, test_data_creator.WorkRelationSelectionTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, test_data_creator.WorkRelationSelectionTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.WorkRelationSelectionTemplate()) test_patch = {"version": 'v6.28318530718', "schema": {"mykey": "my better value"}, @@ -263,18 +297,27 @@ class WorkRelationSelectionTemplateTestCase(unittest.TestCase): PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(test_data_creator.WorkRelationSelectionTemplate()) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_work_relation_selection_template_DELETE(self): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data_creator.WorkRelationSelectionTemplate(), 201, test_data_creator.WorkRelationSelectionTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, test_data_creator.WorkRelationSelectionTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.WorkRelationSelectionTemplate()) # DELETE and check it's gone DELETE_and_assert_gone(self, url) + def test_GET_work_relation_selection_template_view_returns_correct_entry(self): + + test_data_1 = WorkRelationSelectionTemplate_test_data("work_relation_selection_template_1") + test_data_2 = WorkRelationSelectionTemplate_test_data("work_relation_selection_template_2") + id1 = models.WorkRelationSelectionTemplate.objects.create(**test_data_1).id + id2 = models.WorkRelationSelectionTemplate.objects.create(**test_data_2).id + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/work_relation_selection_template/' + str(id1), test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/work_relation_selection_template/' + str(id2), test_data_2) + class TaskConnectorsTestCase(unittest.TestCase): @classmethod @@ -288,14 +331,14 @@ class TaskConnectorsTestCase(unittest.TestCase): self.assertTrue("Task Connectors List" in r.content.decode('utf8')) def test_task_connectors_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/task_connectors/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/task_connectors/1234321/', 404) def test_task_connectors_POST_and_GET(self): tc_test_data = test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=self.output_of_url) # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', tc_test_data, 201, tc_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, tc_test_data) + GET_OK_and_assert_equal_expected_response(self, url, tc_test_data) def test_task_connectors_POST_invalid_role_raises_error(self): @@ -358,11 +401,11 @@ class TaskConnectorsTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', tc_test_data1, 201, tc_test_data1) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, tc_test_data1) + GET_OK_and_assert_equal_expected_response(self, url, tc_test_data1) # PUT new values, verify PUT_and_assert_expected_response(self, url, tc_test_data2, 200, tc_test_data2) - GET_and_assert_expected_response(self, url, 200, tc_test_data2) + GET_OK_and_assert_equal_expected_response(self, url, tc_test_data2) def test_task_connectors_PATCH(self): tc_test_data = test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=self.output_of_url) @@ -370,7 +413,7 @@ class TaskConnectorsTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', tc_test_data, 201, tc_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, tc_test_data) + GET_OK_and_assert_equal_expected_response(self, url, tc_test_data) test_patch = {"role": BASE_URL + '/role/calibrator/', "dataformats": [BASE_URL + '/dataformat/Beamformed/', @@ -380,7 +423,7 @@ class TaskConnectorsTestCase(unittest.TestCase): PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(tc_test_data) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_task_connectors_DELETE(self): tc_test_data = test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=self.output_of_url) @@ -388,44 +431,43 @@ class TaskConnectorsTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', tc_test_data, 201, tc_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, tc_test_data) + GET_OK_and_assert_equal_expected_response(self, url, tc_test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) - def test_task_relation_blueprint_CASCADE_behavior_on_inputs_template_deleted(self): input_of_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskTemplate(), '/task_template/') tc_test_data = test_data_creator.TaskConnectors(input_of_url=input_of_url, output_of_url=self.output_of_url) - # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', tc_test_data, 201, tc_test_data)['url'] - # verify - GET_and_assert_expected_response(self, url, 200, tc_test_data) - + GET_OK_and_assert_equal_expected_response(self, url, tc_test_data) # DELETE dependency DELETE_and_assert_gone(self, input_of_url) - # assert - GET_and_assert_expected_response(self, url, 404, {}) - + GET_and_assert_equal_expected_code(self, url, 404) def test_task_relation_blueprint_CASCADE_behavior_on_outputs_template_deleted(self): output_of_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskTemplate(), '/task_template/') tc_test_data = test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=output_of_url) - # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', tc_test_data, 201, tc_test_data)['url'] - # verify - GET_and_assert_expected_response(self, url, 200, tc_test_data) - + GET_OK_and_assert_equal_expected_response(self, url, tc_test_data) # DELETE dependency DELETE_and_assert_gone(self, output_of_url) - # assert - GET_and_assert_expected_response(self, url, 404, {}) + GET_and_assert_equal_expected_code(self, url, 404) + + def test_GET_task_connectors_view_returns_correct_entry(self): + + test_data_1 = TaskConnectors_test_data() + test_data_2 = TaskConnectors_test_data() + id1 = models.TaskConnectors.objects.create(**test_data_1).id + id2 = models.TaskConnectors.objects.create(**test_data_2).id + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_connectors/' + str(id1), test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_connectors/' + str(id2), test_data_2) class DefaultTemplates(unittest.TestCase): @@ -459,7 +501,6 @@ class DefaultTemplates(unittest.TestCase): test_data_1['template'] = url POST_and_assert_expected_response(self, BASE_URL + '/default_task_template/', test_data_1, 201, test_data_1) - def test_default_work_relation_selection_template_POST(self): r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data_creator.WorkRelationSelectionTemplate(), 201, @@ -486,7 +527,7 @@ class DefaultTemplates(unittest.TestCase): response = requests.delete(template_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, template_url, 200, test_data_creator.GeneratorTemplate()) + GET_OK_and_assert_equal_expected_response(self, template_url, test_data_creator.GeneratorTemplate()) def test_default_scheduling_unit_template_PROTECT_behavior_on_template_deleted(self): @@ -504,8 +545,7 @@ class DefaultTemplates(unittest.TestCase): response = requests.delete(template_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, template_url, 200, test_data_creator.SchedulingUnitTemplate()) - + GET_OK_and_assert_equal_expected_response(self, template_url, test_data_creator.SchedulingUnitTemplate()) def test_default_task_template_PROTECT_behavior_on_template_deleted(self): @@ -523,7 +563,7 @@ class DefaultTemplates(unittest.TestCase): response = requests.delete(template_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, template_url, 200, test_data_creator.TaskTemplate()) + GET_OK_and_assert_equal_expected_response(self, template_url, test_data_creator.TaskTemplate()) def test_default_work_relation_selection_template_PROTECT_behavior_on_template_deleted(self): @@ -541,7 +581,7 @@ class DefaultTemplates(unittest.TestCase): response = requests.delete(template_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, template_url, 200, test_data_creator.WorkRelationSelectionTemplate()) + GET_OK_and_assert_equal_expected_response(self, template_url, test_data_creator.WorkRelationSelectionTemplate()) class CycleTestCase(unittest.TestCase): @@ -551,7 +591,7 @@ class CycleTestCase(unittest.TestCase): self.assertTrue("Cycle List" in r.content.decode('utf8')) def test_cycle_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/cycle/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/cycle/1234321/', 404) def test_cycle_POST_and_GET(self): @@ -559,7 +599,7 @@ class CycleTestCase(unittest.TestCase): cycle_test_data = test_data_creator.Cycle() r_dict = POST_and_assert_expected_response(self, BASE_URL + '/cycle/', cycle_test_data, 201, cycle_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, cycle_test_data) + GET_OK_and_assert_equal_expected_response(self, url, cycle_test_data) def test_cycle_PUT_invalid_raises_error(self): PUT_and_assert_expected_response(self, BASE_URL + '/cycle/9876789876/', test_data_creator.Cycle(), 404, {}) @@ -570,22 +610,20 @@ class CycleTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/cycle/', cycle_test_data, 201, cycle_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, cycle_test_data) + GET_OK_and_assert_equal_expected_response(self, url, cycle_test_data) # PUT new values, verify test_data = dict(test_data_creator.Cycle("other description")) test_data['name'] = cycle_test_data['name'] # since name is PK, need to keep that unchanged PUT_and_assert_expected_response(self, url, test_data, 200, test_data) - GET_and_assert_expected_response(self, url, 200, test_data) + GET_OK_and_assert_equal_expected_response(self, url, test_data) def test_cycle_PATCH(self): cycle_test_data = test_data_creator.Cycle() - # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/cycle/', cycle_test_data, 201, cycle_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, cycle_test_data) + GET_OK_and_assert_equal_expected_response(self, url, cycle_test_data) test_patch = {"start": datetime(year=2015, month=10, day=21).isoformat()} @@ -593,19 +631,49 @@ class CycleTestCase(unittest.TestCase): PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(cycle_test_data) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_cycle_DELETE(self): cycle_test_data = test_data_creator.Cycle() - # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/cycle/', cycle_test_data, 201, cycle_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, cycle_test_data) - + GET_OK_and_assert_equal_expected_response(self, url, cycle_test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) + def test_GET_cycle_list_shows_entry(self): + + test_data_1 = Cycle_test_data() # uuid makes name unique + models.Cycle.objects.create(**test_data_1) + nbr_results = models.Cycle.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/cycle/', test_data_1, nbr_results) + + def test_GET_cycle_view_returns_correct_entry(self): + + test_data_1 = Cycle_test_data() # uuid makes name unique + test_data_2 = Cycle_test_data() + id1 = models.Cycle.objects.create(**test_data_1).name # name is pk + id2 = models.Cycle.objects.create(**test_data_2).name # name is pk + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/cycle/' + str(id1), test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/cycle/' + str(id2), test_data_2) + + def test_cycle_contains_list_of_releated_projects(self): + + cycle_test_data_1 = Cycle_test_data() + project_test_data_1 = Project_test_data() # uuid makes name unique + project_test_data_2 = Project_test_data() # uuid makes name unique + + cycle = models.Cycle.objects.create(**cycle_test_data_1) + project1 = models.Project.objects.create(**project_test_data_1) + project1.cycle = cycle + project1.save() + project2 = models.Project.objects.create(**project_test_data_2) + project2.cycle = cycle + project2.save() + response_data = GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/cycle/' + cycle.name, cycle_test_data_1) + assertUrlList(self, response_data['projects'], [project1, project2]) + class ProjectTestCase(unittest.TestCase): def test_project_list_apiformat(self): @@ -614,7 +682,7 @@ class ProjectTestCase(unittest.TestCase): self.assertTrue("Project List" in r.content.decode('utf8')) def test_project_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/project/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/project/1234321/', 404) def test_project_POST_and_GET(self): project_test_data = test_data_creator.Project() @@ -622,7 +690,7 @@ class ProjectTestCase(unittest.TestCase): # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/project/', project_test_data, 201, project_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, project_test_data) + GET_OK_and_assert_equal_expected_response(self, url, project_test_data) def test_project_PUT_invalid_raises_error(self): PUT_and_assert_expected_response(self, BASE_URL + '/project/9876789876/', test_data_creator.Project(), 404, {}) @@ -633,13 +701,13 @@ class ProjectTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/project/', project_test_data, 201, project_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, project_test_data) + GET_OK_and_assert_equal_expected_response(self, url, project_test_data) # PUT new values, verify test_data = dict(test_data_creator.Project("other description")) test_data['name'] = project_test_data['name'] # since name is PK, need to keep that unchanged PUT_and_assert_expected_response(self, url, test_data, 200, test_data) - GET_and_assert_expected_response(self, url, 200, test_data) + GET_OK_and_assert_equal_expected_response(self, url, test_data) def test_project_PATCH(self): project_test_data = test_data_creator.Project() @@ -647,7 +715,7 @@ class ProjectTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/project/', project_test_data, 201, project_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, project_test_data) + GET_OK_and_assert_equal_expected_response(self, url, project_test_data) test_patch = {"priority": 500, "tags": ["SUPERIMPORTANT"]} @@ -656,7 +724,7 @@ class ProjectTestCase(unittest.TestCase): PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(project_test_data) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_project_DELETE(self): project_test_data = test_data_creator.Project() @@ -664,7 +732,7 @@ class ProjectTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/project/', project_test_data, 201, project_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, project_test_data) + GET_OK_and_assert_equal_expected_response(self, url, project_test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -679,7 +747,7 @@ class ProjectTestCase(unittest.TestCase): url = POST_and_assert_expected_response(self, BASE_URL + '/project/', test_data, 201, test_data)['url'] # verify - GET_and_assert_expected_response(self, url, 200, test_data) + GET_OK_and_assert_equal_expected_response(self, url, test_data) # add project reference to cycle test data (we make Django add that to the cycle in serializer) cycle_test_data['projects'] = [url] # add the @@ -689,7 +757,146 @@ class ProjectTestCase(unittest.TestCase): response = requests.delete(cycle_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, cycle_url, 200, cycle_test_data) + GET_OK_and_assert_equal_expected_response(self, cycle_url, cycle_test_data) + + def test_GET_project_list_shows_entry(self): + + test_data_1 = Project_test_data() # uuid makes name unique + models.Project.objects.create(**test_data_1) + nbr_results = models.Project.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/project/', test_data_1, nbr_results) + + def test_GET_project_view_returns_correct_entry(self): + + test_data_1 = Project_test_data() # uuid makes name unique + test_data_2 = Project_test_data() + id1 = models.Project.objects.create(**test_data_1).name # name is pk + id2 = models.Project.objects.create(**test_data_2).name + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/project/' + str(id1), test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/project/' + str(id2), test_data_2) + + def test_nested_projects_are_filtered_according_to_cycle(self): + + cycle_1 = models.Cycle.objects.create(**Cycle_test_data()) + cycle_2 = models.Cycle.objects.create(**Cycle_test_data()) + test_data_1 = dict(Project_test_data()) # uuid makes project unique + test_data_1['cycle'] = cycle_1 + project_1 = models.Project.objects.create(**test_data_1) + test_data_2 = dict(Project_test_data()) # uuid makes project unique + test_data_2['cycle'] = cycle_2 + project_2 = models.Project.objects.create(**test_data_2) + # assert the returned list contains related items, A list of length 1 is retrieved + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/cycle/%s/project/' % cycle_2.name, test_data_2, 1) + # assert an existing related item is returned, name is pk + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/cycle/%s/project/%s' % (cycle_2.name, project_2.name), test_data_2) + # assert an existing unrelated item is not returned, name is pk + GET_and_assert_equal_expected_code(self, BASE_URL + '/cycle/%s/project/%s' % (cycle_2.name, project_1.name), 404) + +class ResourceTypeTestCase(unittest.TestCase): + def test_resource_type_list_apiformat(self): + r = requests.get(BASE_URL + '/resource_type/?format=api', auth=AUTH) + self.assertEqual(r.status_code, 200) + self.assertTrue("Resource Type List" in r.content.decode('utf8')) + + def test_resource_type_GET_nonexistant_raises_error(self): + GET_and_assert_equal_expected_code(self, BASE_URL + '/resource_type/1234321/', 404) + + def test_resource_type_POST_and_GET(self): + resource_type_test_data = test_data_creator.ResourceType() + # POST and GET a new item and assert correctness + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/resource_type/', resource_type_test_data, 201, resource_type_test_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, resource_type_test_data) + +class ProjectQuotaTestCase(unittest.TestCase): + def test_project_quota_list_apiformat(self): + r = requests.get(BASE_URL + '/project_quota/?format=api', auth=AUTH) + self.assertEqual(r.status_code, 200) + self.assertTrue("Project Quota List" in r.content.decode('utf8')) + + def test_project_quota_GET_nonexistant_raises_error(self): + GET_and_assert_equal_expected_code(self, BASE_URL + '/project_quota/1234321/', 404) + + def test_project_quota_POST_and_GET(self): + project_quota_test_data = test_data_creator.ProjectQuota() + + # POST and GET a new item and assert correctness + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/project_quota/', project_quota_test_data, 201, project_quota_test_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, project_quota_test_data) + + def test_project_quota_POST_and_GET(self): + project_quota_test_data = test_data_creator.ProjectQuota() + + # POST and GET a new item and assert correctness + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/project_quota/', project_quota_test_data, 201, project_quota_test_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, project_quota_test_data) + + def test_project_quota_PUT_invalid_raises_error(self): + PUT_and_assert_expected_response(self, BASE_URL + '/project_quota/9876789876/', test_data_creator.Project(), 404, {}) + + def test_project_quota_PUT(self): + project_quota_test_data = test_data_creator.ProjectQuota() + + # POST new item, verify + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/project_quota/', project_quota_test_data, 201, project_quota_test_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, project_quota_test_data) + + # PUT new values, verify + test_data = dict(test_data_creator.ProjectQuota("other description")) + PUT_and_assert_expected_response(self, url, test_data, 200, test_data) + GET_OK_and_assert_equal_expected_response(self, url, test_data) + + def test_project_quota_PATCH(self): + project_quota_test_data = test_data_creator.ProjectQuota() + + # POST new item, verify + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/project_quota/', project_quota_test_data, 201, project_quota_test_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, project_quota_test_data) + + test_patch = {"value": 500} + + # PATCH item and verify + PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) + expected_data = dict(project_quota_test_data) + expected_data.update(test_patch) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) + + def test_project_quota_DELETE(self): + project_quota_test_data = test_data_creator.ProjectQuota() + + # POST new item, verify + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/project_quota/', project_quota_test_data, 201, project_quota_test_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, project_quota_test_data) + + # DELETE and check it's gone + DELETE_and_assert_gone(self, url) + + def test_project_quota_PROTECT_behavior_on_project_deleted(self): + + # POST new item with dependencies + project_test_data = test_data_creator.Project() + project_url = POST_and_assert_expected_response(self, BASE_URL + '/project/', project_test_data, 201, project_test_data)['url'] + + project_quota_test_data = dict(test_data_creator.ProjectQuota(project_url=project_url)) + project_quota_url = POST_and_assert_expected_response(self, BASE_URL + '/project_quota/', project_quota_test_data, 201, project_quota_test_data)['url'] + + # verify + GET_OK_and_assert_equal_expected_response(self, project_quota_url, project_quota_test_data) + + project_test_data['project'] = [project_quota_url] # add the + + # Try to DELETE dependency, verify that was not successful + # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... + response = requests.delete(project_url, auth=AUTH) + self.assertEqual(500, response.status_code) + self.assertTrue("ProtectedError" in str(response.content)) + GET_OK_and_assert_equal_expected_response(self, project_quota_url, project_quota_test_data) + class SchedulingSetTestCase(unittest.TestCase): @@ -699,7 +906,7 @@ class SchedulingSetTestCase(unittest.TestCase): self.assertTrue("Scheduling Set List" in r.content.decode('utf8')) def test_scheduling_set_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/scheduling_set/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_set/1234321/', 404) def test_scheduling_set_POST_and_GET(self): schedulingset_test_data = test_data_creator.SchedulingSet() @@ -707,7 +914,7 @@ class SchedulingSetTestCase(unittest.TestCase): # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_set/', schedulingset_test_data, 201, schedulingset_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, schedulingset_test_data) + GET_OK_and_assert_equal_expected_response(self, url, schedulingset_test_data) def test_scheduling_set_PUT_invalid_raises_error(self): schedulingset_test_data = test_data_creator.SchedulingSet() @@ -720,12 +927,12 @@ class SchedulingSetTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_set/', schedulingset_test_data, 201, schedulingset_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, schedulingset_test_data) + GET_OK_and_assert_equal_expected_response(self, url, schedulingset_test_data) schedulingset_test_data2 = test_data_creator.SchedulingSet("schedulingset2", project_url=project_url) # PUT new values, verify PUT_and_assert_expected_response(self, url, schedulingset_test_data2, 200, schedulingset_test_data2) - GET_and_assert_expected_response(self, url, 200, schedulingset_test_data2) + GET_OK_and_assert_equal_expected_response(self, url, schedulingset_test_data2) def test_scheduling_set_PATCH(self): schedulingset_test_data = test_data_creator.SchedulingSet() @@ -733,16 +940,16 @@ class SchedulingSetTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_set/', schedulingset_test_data, 201, schedulingset_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, schedulingset_test_data) + GET_OK_and_assert_equal_expected_response(self, url, schedulingset_test_data) test_patch = {"description": "This is a new and improved description", - "generator_doc": "{'para': 'meter'}"} + "generator_doc": '{"para": "meter"}'} # PATCH item and verify PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(schedulingset_test_data) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_scheduling_set_DELETE(self): schedulingset_test_data = test_data_creator.SchedulingSet() @@ -750,46 +957,68 @@ class SchedulingSetTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_set/', schedulingset_test_data, 201, schedulingset_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, schedulingset_test_data) + GET_OK_and_assert_equal_expected_response(self, url, schedulingset_test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) def test_scheduling_set_PROTECT_behavior_on_project_deleted(self): project_url = test_data_creator.post_data_and_get_url(test_data_creator.Project(), '/project/') - project_test_data = GET_and_assert_expected_response(self, project_url, 200, {}) + project_test_data = GET_and_assert_equal_expected_code(self, project_url, 200) schedulingset_test_data = test_data_creator.SchedulingSet(project_url=project_url) - # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_set/', schedulingset_test_data, 201, schedulingset_test_data)['url'] - # verify - GET_and_assert_expected_response(self, url, 200, schedulingset_test_data) - + GET_OK_and_assert_equal_expected_response(self, url, schedulingset_test_data) # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(project_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, project_url, 200, project_test_data) + GET_OK_and_assert_equal_expected_response(self, project_url, project_test_data) def test_scheduling_set_SET_NULL_behavior_on_generator_template_deleted(self): generator_template_url = test_data_creator.post_data_and_get_url(test_data_creator.GeneratorTemplate(), '/generator_template/') schedulingset_test_data = test_data_creator.SchedulingSet(generator_template_url=generator_template_url) - # POST new item test_data = dict(schedulingset_test_data) url = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_set/', test_data, 201, test_data)['url'] - # verify - GET_and_assert_expected_response(self, url, 200, test_data) - + GET_OK_and_assert_equal_expected_response(self, url, test_data) # DELETE dependency DELETE_and_assert_gone(self, generator_template_url) - # assert test_data['generator_template'] = None - GET_and_assert_expected_response(self, url, 200, test_data) + GET_OK_and_assert_equal_expected_response(self, url, test_data) + + def test_GET_SchedulingSet_list_shows_entry(self): + + test_data_1 = SchedulingSet_test_data() + models.SchedulingSet.objects.create(**test_data_1) + nbr_results = models.SchedulingSet.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/scheduling_set/', test_data_1, nbr_results) + + def test_GET_SchedulingSet_view_returns_correct_entry(self): + + test_data_1 = SchedulingSet_test_data() # uuid makes name unique + test_data_2 = SchedulingSet_test_data() + id1 = models.SchedulingSet.objects.create(**test_data_1).id + id2 = models.SchedulingSet.objects.create(**test_data_2).id + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_set/' + str(id1), test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_set/' + str(id2), test_data_2) + + def test_SchedulingSet_contains_list_of_releated_SchedulingUnitDraft(self): + + test_data_1 = SchedulingSet_test_data() + scheduling_set = models.SchedulingSet.objects.create(**test_data_1) + scheduling_unit_draft_1 = models.SchedulingUnitDraft.objects.create(**SchedulingUnitDraft_test_data("scheduler draft one")) + scheduling_unit_draft_1.scheduling_set = scheduling_set + scheduling_unit_draft_1.save() + scheduling_unit_draft_2 = models.SchedulingUnitDraft.objects.create(**SchedulingUnitDraft_test_data("scheduler draft one")) + scheduling_unit_draft_2.scheduling_set = scheduling_set + scheduling_unit_draft_2.save() + response_data = GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_set/%d' % scheduling_set.id, test_data_1) + assertUrlList(self, response_data['scheduling_unit_drafts'], [scheduling_unit_draft_1, scheduling_unit_draft_2]) class SchedulingUnitDraftTestCase(unittest.TestCase): @@ -804,7 +1033,7 @@ class SchedulingUnitDraftTestCase(unittest.TestCase): self.assertTrue("Scheduling Unit Draft List" in r.content.decode('utf8')) def test_scheduling_unit_draft_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_draft/1234321/', 404) def test_scheduling_unit_draft_POST_and_GET(self): schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft(scheduling_set_url=self.scheduling_set_url, template_url=self.template_url) @@ -812,7 +1041,7 @@ class SchedulingUnitDraftTestCase(unittest.TestCase): # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/', schedulingunitdraft_test_data, 201, schedulingunitdraft_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, schedulingunitdraft_test_data) + GET_OK_and_assert_equal_expected_response(self, url, schedulingunitdraft_test_data) def test_scheduling_unit_draft_PUT_invalid_raises_error(self): schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft(scheduling_set_url=self.scheduling_set_url, template_url=self.template_url) @@ -824,13 +1053,13 @@ class SchedulingUnitDraftTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/', schedulingunitdraft_test_data, 201, schedulingunitdraft_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, schedulingunitdraft_test_data) + GET_OK_and_assert_equal_expected_response(self, url, schedulingunitdraft_test_data) schedulingunitdraft_test_data2 = test_data_creator.SchedulingUnitDraft("my_scheduling_unit_draft2", scheduling_set_url=self.scheduling_set_url, template_url=self.template_url) # PUT new values, verify PUT_and_assert_expected_response(self, url, schedulingunitdraft_test_data2, 200, schedulingunitdraft_test_data2) - GET_and_assert_expected_response(self, url, 200, schedulingunitdraft_test_data2) + GET_OK_and_assert_equal_expected_response(self, url, schedulingunitdraft_test_data2) def test_scheduling_unit_draft_PATCH(self): schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft(scheduling_set_url=self.scheduling_set_url, template_url=self.template_url) @@ -838,16 +1067,16 @@ class SchedulingUnitDraftTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/', schedulingunitdraft_test_data, 201, schedulingunitdraft_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, schedulingunitdraft_test_data) + GET_OK_and_assert_equal_expected_response(self, url, schedulingunitdraft_test_data) test_patch = {"description": "This is a new and improved description", - "requirements_doc": "{'para': 'meter'}"} + "requirements_doc": '{"para": "meter"}'} # PATCH item and verify PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(schedulingunitdraft_test_data) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_scheduling_unit_draft_DELETE(self): schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft(scheduling_set_url=self.scheduling_set_url, template_url=self.template_url) @@ -855,7 +1084,7 @@ class SchedulingUnitDraftTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/', schedulingunitdraft_test_data, 201, schedulingunitdraft_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, schedulingunitdraft_test_data) + GET_OK_and_assert_equal_expected_response(self, url, schedulingunitdraft_test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -863,34 +1092,26 @@ class SchedulingUnitDraftTestCase(unittest.TestCase): def test_scheduling_unit_draft_CASCADE_behavior_on_scheduling_unit_template_deleted(self): template_url = test_data_creator.post_data_and_get_url(test_data_creator.SchedulingUnitTemplate(), '/scheduling_unit_template/') schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft(template_url=template_url, scheduling_set_url=self.scheduling_set_url) - # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/', schedulingunitdraft_test_data, 201, schedulingunitdraft_test_data)['url'] - # verify - GET_and_assert_expected_response(self, url, 200, schedulingunitdraft_test_data) - + GET_OK_and_assert_equal_expected_response(self, url, schedulingunitdraft_test_data) # DELETE dependency DELETE_and_assert_gone(self, template_url) - # assert - GET_and_assert_expected_response(self, url, 404, {}) + GET_and_assert_equal_expected_code(self, url, 404) def test_scheduling_unit_draft_CASCADE_behavior_on_scheduling_set_deleted(self): scheduling_set_url = test_data_creator.post_data_and_get_url(test_data_creator.SchedulingSet(), '/scheduling_set/') schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft(scheduling_set_url=scheduling_set_url, template_url=self.template_url) - # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/', schedulingunitdraft_test_data, 201, schedulingunitdraft_test_data)['url'] - # verify - GET_and_assert_expected_response(self, url, 200, schedulingunitdraft_test_data) - + GET_OK_and_assert_equal_expected_response(self, url, schedulingunitdraft_test_data) # DELETE dependency DELETE_and_assert_gone(self, scheduling_set_url) - # assert - GET_and_assert_expected_response(self, url, 404, {}) + GET_and_assert_equal_expected_code(self, url, 404) def test_scheduling_unit_draft_SET_NULL_behavior_on_copies_deleted(self): schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft(scheduling_set_url=self.scheduling_set_url, template_url=self.template_url) @@ -900,16 +1121,89 @@ class SchedulingUnitDraftTestCase(unittest.TestCase): test_data = dict(schedulingunitdraft_test_data) test_data['copies'] = copy_url url = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/', test_data, 201, test_data)['url'] - # verify - GET_and_assert_expected_response(self, url, 200, test_data) - + GET_OK_and_assert_equal_expected_response(self, url, test_data) # DELETE dependency DELETE_and_assert_gone(self, copy_url) - # assert test_data['copies'] = None - GET_and_assert_expected_response(self, url, 200, test_data) + GET_OK_and_assert_equal_expected_response(self, url, test_data) + + def test_GET_SchedulingUnitDraft_list_view_shows_entry(self): + + test_data_1 = SchedulingUnitDraft_test_data("scheduler unit draft one") + models.SchedulingUnitDraft.objects.create(**test_data_1) + nbr_results = models.SchedulingUnitDraft.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/scheduling_unit_draft/', test_data_1, nbr_results) + + def test_GET_SchedulingUnitDraft_view_returns_correct_entry(self): + + test_data_1 = SchedulingUnitDraft_test_data("scheduler unit draft one one") + test_data_2 = SchedulingUnitDraft_test_data("scheduler unit draft one two") + id1 = models.SchedulingUnitDraft.objects.create(**test_data_1).id + id2 = models.SchedulingUnitDraft.objects.create(**test_data_2).id + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_unit_draft/%s/' % id1, test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_unit_draft/%s/' % id2, test_data_2) + + def test_nested_SchedulingUnitDraft_are_filtered_according_to_SchedulingSet(self): + # setup + test_data_1 = SchedulingUnitDraft_test_data("scheduler unit draft two one") + test_data_2 = SchedulingUnitDraft_test_data("scheduler unit draft two two") + sst_test_data_1 = SchedulingSet_test_data("scheduler set one") + sst_test_data_2 = SchedulingSet_test_data("scheduler set two") + scheduling_set_1 = models.SchedulingSet.objects.create(**sst_test_data_1) + scheduling_set_2 = models.SchedulingSet.objects.create(**sst_test_data_2) + test_data_1 = dict(test_data_1) + test_data_1['scheduling_set'] = scheduling_set_1 + scheduling_unit_draft_1 = models.SchedulingUnitDraft.objects.create(**test_data_1) + test_data_2 = dict(test_data_2) + test_data_2['scheduling_set'] = scheduling_set_2 + scheduling_unit_draft_2 = models.SchedulingUnitDraft.objects.create(**test_data_2) + + # assert the returned list contains related items, A list of length 1 is retrieved + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/scheduling_set/%s/scheduling_unit_draft/' + % scheduling_set_2.id, test_data_2, 1) + # assert an existing related item is returned + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_set/%s/scheduling_unit_draft/%s/' % + (scheduling_set_2.id, scheduling_unit_draft_2.id), test_data_2) + # assert an existing unrelated item is not returned + GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_set/%s/scheduling_unit_draft/%s/' % + (scheduling_set_2.id, scheduling_unit_draft_1.id), 404) + + def test_SchedulingUnitDraft_contains_list_of_related_SchedulingUnitBlueprint(self): + + # setup + test_data_1 = SchedulingUnitDraft_test_data("scheduler unit draft one") + subt_test_data_1 = SchedulingUnitBlueprint_test_data("scheduler unit blue print one") + subt_test_data_2 = SchedulingUnitBlueprint_test_data("scheduler unit blue print two") + scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(**test_data_1) + scheduling_unit_blueprint_1 = models.SchedulingUnitBlueprint.objects.create(**subt_test_data_1) + scheduling_unit_blueprint_1.draft = scheduling_unit_draft + scheduling_unit_blueprint_1.save() + scheduling_unit_blueprint_2 = models.SchedulingUnitBlueprint.objects.create(**subt_test_data_2) + scheduling_unit_blueprint_2.draft = scheduling_unit_draft + scheduling_unit_blueprint_2.save() + # assert + response_data = GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_unit_draft/%s/' % scheduling_unit_draft.id, test_data_1) + assertUrlList(self, response_data['related_scheduling_unit_blueprint'], [scheduling_unit_blueprint_1, scheduling_unit_blueprint_2]) + + def test_SchedulingUnitDraft_contains_list_of_related_TaskDraft(self): + + # setup + test_data_1 = SchedulingUnitDraft_test_data("scheduler unit draft one") + tdt_test_data_1 = TaskDraft_test_data("task draft one") + tdt_test_data_2 = TaskDraft_test_data("task draft two") + scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(**test_data_1) + task_draft_1 = models.TaskDraft.objects.create(**tdt_test_data_1) + task_draft_1.scheduling_unit_draft = scheduling_unit_draft + task_draft_1.save() + task_draft_2 = models.TaskDraft.objects.create(**tdt_test_data_2) + task_draft_2.scheduling_unit_draft = scheduling_unit_draft + task_draft_2.save() + # assert + response_data = GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_unit_draft/%s/' % + scheduling_unit_draft.id, test_data_1) + assertUrlList(self, response_data['task_drafts'], [task_draft_1, task_draft_2]) class TaskDraftTestCase(unittest.TestCase): @@ -924,7 +1218,7 @@ class TaskDraftTestCase(unittest.TestCase): self.assertTrue("Task Draft List" in r.content.decode('utf8')) def test_task_draft_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/task_draft/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/task_draft/1234321/', 404) def test_task_draft_POST_and_GET(self): taskdraft_test_data = test_data_creator.TaskDraft(scheduling_unit_draft_url=self.scheduling_unit_draft_url, template_url=self.template_url) @@ -932,7 +1226,7 @@ class TaskDraftTestCase(unittest.TestCase): # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_draft/', taskdraft_test_data, 201, taskdraft_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, taskdraft_test_data) + GET_OK_and_assert_equal_expected_response(self, url, taskdraft_test_data) def test_task_draft_PUT_invalid_raises_error(self): taskdraft_test_data = test_data_creator.TaskDraft(scheduling_unit_draft_url=self.scheduling_unit_draft_url, template_url=self.template_url) @@ -945,11 +1239,11 @@ class TaskDraftTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_draft/', taskdraft_test_data1, 201, taskdraft_test_data1) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, taskdraft_test_data1) + GET_OK_and_assert_equal_expected_response(self, url, taskdraft_test_data1) # PUT new values, verify PUT_and_assert_expected_response(self, url, taskdraft_test_data2, 200, taskdraft_test_data2) - GET_and_assert_expected_response(self, url, 200, taskdraft_test_data2) + GET_OK_and_assert_equal_expected_response(self, url, taskdraft_test_data2) def test_task_draft_PATCH(self): taskdraft_test_data = test_data_creator.TaskDraft(scheduling_unit_draft_url=self.scheduling_unit_draft_url, template_url=self.template_url) @@ -957,16 +1251,16 @@ class TaskDraftTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_draft/', taskdraft_test_data, 201, taskdraft_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, taskdraft_test_data) + GET_OK_and_assert_equal_expected_response(self, url, taskdraft_test_data) test_patch = {"description": "This is a new and improved description", - "specifications_doc": "{'para': 'meter'}"} + "specifications_doc": '{"para": "meter"}'} # PATCH item and verify PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(taskdraft_test_data) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_task_draft_DELETE(self): taskdraft_test_data = test_data_creator.TaskDraft(scheduling_unit_draft_url=self.scheduling_unit_draft_url, template_url=self.template_url) @@ -974,7 +1268,7 @@ class TaskDraftTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_draft/', taskdraft_test_data, 201, taskdraft_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, taskdraft_test_data) + GET_OK_and_assert_equal_expected_response(self, url, taskdraft_test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -987,13 +1281,13 @@ class TaskDraftTestCase(unittest.TestCase): url = POST_and_assert_expected_response(self, BASE_URL + '/task_draft/', taskdraft_test_data, 201, taskdraft_test_data)['url'] # verify - GET_and_assert_expected_response(self, url, 200, taskdraft_test_data) + GET_OK_and_assert_equal_expected_response(self, url, taskdraft_test_data) # DELETE dependency DELETE_and_assert_gone(self, template_url) # assert - GET_and_assert_expected_response(self, url, 404, {}) + GET_and_assert_equal_expected_code(self, url, 404) def test_task_draft_CASCADE_behavior_on_scheduling_unit_draft_deleted(self): scheduling_unit_draft_url = test_data_creator.post_data_and_get_url(test_data_creator.SchedulingUnitDraft(), '/scheduling_unit_draft/') @@ -1003,13 +1297,13 @@ class TaskDraftTestCase(unittest.TestCase): url = POST_and_assert_expected_response(self, BASE_URL + '/task_draft/', taskdraft_test_data, 201, taskdraft_test_data)['url'] # verify - GET_and_assert_expected_response(self, url, 200, taskdraft_test_data) + GET_OK_and_assert_equal_expected_response(self, url, taskdraft_test_data) # DELETE dependency DELETE_and_assert_gone(self, scheduling_unit_draft_url) # assert - GET_and_assert_expected_response(self, url, 404, {}) + GET_and_assert_equal_expected_code(self, url, 404) def test_task_draft_SET_NULL_behavior_on_copies_deleted(self): taskdraft_test_data1 = test_data_creator.TaskDraft(name="the one", scheduling_unit_draft_url=self.scheduling_unit_draft_url, template_url=self.template_url) @@ -1022,14 +1316,89 @@ class TaskDraftTestCase(unittest.TestCase): url = POST_and_assert_expected_response(self, BASE_URL + '/task_draft/', test_data, 201, test_data)['url'] # verify - GET_and_assert_expected_response(self, url, 200, test_data) + GET_OK_and_assert_equal_expected_response(self, url, test_data) # DELETE dependency DELETE_and_assert_gone(self, copy_url) # assert test_data['copies'] = None - GET_and_assert_expected_response(self, url, 200, test_data) + GET_OK_and_assert_equal_expected_response(self, url, test_data) + + def test_GET_TaskDraft_list_view_shows_entry(self): + + test_data_1 = TaskDraft_test_data("task draft") + models.TaskDraft.objects.create(**test_data_1) + nbr_results = models.TaskDraft.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_draft/', test_data_1, nbr_results) + + def test_GET_TaskDraft_view_returns_correct_entry(self): + + # setup + test_data_1 = TaskDraft_test_data("task draft one") + test_data_2 = TaskDraft_test_data("task draft two") + id1 = models.TaskDraft.objects.create(**test_data_1).id + id2 = models.TaskDraft.objects.create(**test_data_2).id + # assert + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_draft/%s/' % id1, test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_draft/%s/' % id2, test_data_2) + + def test_nested_TaskDraft_are_filtered_according_to_SchedulingUnitDraft(self): + + # setup + test_data_1 = TaskDraft_test_data("task draft one") + test_data_2 = TaskDraft_test_data("task draft two") + sudt_test_data_1 = SchedulingUnitDraft_test_data("scheduling unit draft one") + sudt_test_data_2 = SchedulingUnitDraft_test_data("scheduling unit draft two") + scheduling_unit_draft_1 = models.SchedulingUnitDraft.objects.create(**sudt_test_data_1) + scheduling_unit_draft_2 = models.SchedulingUnitDraft.objects.create(**sudt_test_data_2) + test_data_1 = dict(test_data_1) + test_data_1['scheduling_unit_draft'] = scheduling_unit_draft_1 + task_draft_1 = models.TaskDraft.objects.create(**test_data_1) + test_data_2 = dict(test_data_2) + test_data_2['scheduling_unit_draft'] = scheduling_unit_draft_2 + task_draft_2 = models.TaskDraft.objects.create(**test_data_2) + # assert the returned list contains related items, A list of length 1 is retrieved + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/scheduling_unit_draft/%s/task_draft/' % scheduling_unit_draft_2.id, test_data_2, 1) + # assert an existing related item is returned + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_unit_draft/%s/task_draft/%s/' % (scheduling_unit_draft_2.id, task_draft_2.id), test_data_2) + # assert an existing unrelated item is not returned + GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_draft/%s/task_draft/%s/' % (scheduling_unit_draft_2.id, task_draft_1.id), 404) + + def test_TaskDraft_contains_list_of_related_TaskBlueprint(self): + + # setup + test_data_1 = TaskDraft_test_data("task draft one") + tbt_test_data_1 = TaskBlueprint_test_data() + tbt_test_data_2 = TaskBlueprint_test_data() + task_draft = models.TaskDraft.objects.create(**test_data_1) + task_blueprint_1 = models.TaskBlueprint.objects.create(**tbt_test_data_1) + task_blueprint_1.draft = task_draft + task_blueprint_1.save() + task_blueprint_2 = models.TaskBlueprint.objects.create(**tbt_test_data_2) + task_blueprint_2.draft = task_draft + task_blueprint_2.save() + # assert + response_data = GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_draft/%s/' % task_draft.id, test_data_1) + assertUrlList(self, response_data['related_task_blueprint'], [task_blueprint_1, task_blueprint_2]) + + def test_TaskDraft_contains_lists_of_related_TaskRelationDraft(self): + + # setup + test_data_1 = TaskDraft_test_data("task draft one") + trdt_test_data_1 = TaskRelationDraft_test_data() + trdt_test_data_2 = TaskRelationDraft_test_data() + task_draft = models.TaskDraft.objects.create(**test_data_1) + task_relation_draft_1 = models.TaskRelationDraft.objects.create(**trdt_test_data_1) + task_relation_draft_1.producer = task_draft + task_relation_draft_1.save() + task_relation_draft_2 = models.TaskRelationDraft.objects.create(**trdt_test_data_2) + task_relation_draft_2.consumer = task_draft + task_relation_draft_2.save() + # assert + response_data = GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_draft/%s/' % task_draft.id, test_data_1) + assertUrlList(self, response_data['produced_by'], [task_relation_draft_1]) + assertUrlList(self, response_data['consumed_by'], [task_relation_draft_2]) class TaskRelationDraftTestCase(unittest.TestCase): @@ -1047,7 +1416,7 @@ class TaskRelationDraftTestCase(unittest.TestCase): self.assertTrue("Task Relation Draft List" in r.content.decode('utf8')) def test_task_relation_draft_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/task_relation_draft/1234321/', 404) def test_task_relation_draft_POST_and_GET(self): trd_test_data = test_data_creator.TaskRelationDraft(producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url) @@ -1055,7 +1424,7 @@ class TaskRelationDraftTestCase(unittest.TestCase): # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', trd_test_data, 201, trd_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, trd_test_data) + GET_OK_and_assert_equal_expected_response(self, url, trd_test_data) def test_task_relation_draft_PUT_invalid_raises_error(self): trd_test_data = test_data_creator.TaskRelationDraft(producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url) @@ -1068,11 +1437,11 @@ class TaskRelationDraftTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', trd_test_data1, 201, trd_test_data1) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, trd_test_data1) + GET_OK_and_assert_equal_expected_response(self, url, trd_test_data1) # PUT new values, verify PUT_and_assert_expected_response(self, url, trd_test_data2, 200, trd_test_data2) - GET_and_assert_expected_response(self, url, 200, trd_test_data2) + GET_OK_and_assert_equal_expected_response(self, url, trd_test_data2) def test_task_relation_draft_PATCH(self): trd_test_data = test_data_creator.TaskRelationDraft(producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url) @@ -1080,15 +1449,15 @@ class TaskRelationDraftTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', trd_test_data, 201, trd_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, trd_test_data) + GET_OK_and_assert_equal_expected_response(self, url, trd_test_data) - test_patch = {"selection_doc": "{'para': 'meter'}"} + test_patch = {"selection_doc": '{"para": "meter"}'} # PATCH item and verify PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(trd_test_data) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_task_relation_draft_DELETE(self): trd_test_data = test_data_creator.TaskRelationDraft(producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url) @@ -1096,7 +1465,7 @@ class TaskRelationDraftTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', trd_test_data, 201, trd_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, trd_test_data) + GET_OK_and_assert_equal_expected_response(self, url, trd_test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -1109,13 +1478,13 @@ class TaskRelationDraftTestCase(unittest.TestCase): url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', trd_test_data, 201, trd_test_data)['url'] # verify - GET_and_assert_expected_response(self, url, 200, trd_test_data) + GET_OK_and_assert_equal_expected_response(self, url, trd_test_data) # DELETE dependency DELETE_and_assert_gone(self, template_url) # assert - GET_and_assert_expected_response(self, url, 404, {}) + GET_and_assert_equal_expected_code(self, url, 404) def test_task_relation_draft_CASCADE_behavior_on_producer_deleted(self): producer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskDraft(), '/task_draft/') @@ -1126,14 +1495,13 @@ class TaskRelationDraftTestCase(unittest.TestCase): trd_test_data, 201, trd_test_data)['url'] # verify - GET_and_assert_expected_response(self, url, 200, trd_test_data) + GET_OK_and_assert_equal_expected_response(self, url, trd_test_data) # DELETE dependency DELETE_and_assert_gone(self, producer_url) # assert - GET_and_assert_expected_response(self, url, 404, {}) - + GET_and_assert_equal_expected_code(self, url, 404) def test_task_relation_draft_CASCADE_behavior_on_consumer_deleted(self): consumer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskDraft(), '/task_draft/') @@ -1144,14 +1512,13 @@ class TaskRelationDraftTestCase(unittest.TestCase): trd_test_data, 201, trd_test_data)['url'] # verify - GET_and_assert_expected_response(self, url, 200, trd_test_data) + GET_OK_and_assert_equal_expected_response(self, url, trd_test_data) # DELETE dependency DELETE_and_assert_gone(self, consumer_url) # assert - GET_and_assert_expected_response(self, url, 404, {}) - + GET_and_assert_equal_expected_code(self, url, 404) def test_task_relation_draft_CASCADE_behavior_on_input_deleted(self): input_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectors(), '/task_connectors/') @@ -1162,14 +1529,13 @@ class TaskRelationDraftTestCase(unittest.TestCase): trd_test_data, 201, trd_test_data)['url'] # verify - GET_and_assert_expected_response(self, url, 200, trd_test_data) + GET_OK_and_assert_equal_expected_response(self, url, trd_test_data) # DELETE dependency DELETE_and_assert_gone(self, input_url) # assert - GET_and_assert_expected_response(self, url, 404, {}) - + GET_and_assert_equal_expected_code(self, url, 404) def test_task_relation_draft_CASCADE_behavior_on_output_deleted(self): output_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectors(), '/task_connectors/') @@ -1180,13 +1546,55 @@ class TaskRelationDraftTestCase(unittest.TestCase): trd_test_data, 201, trd_test_data)['url'] # verify - GET_and_assert_expected_response(self, url, 200, trd_test_data) + GET_OK_and_assert_equal_expected_response(self, url, trd_test_data) # DELETE dependency DELETE_and_assert_gone(self, output_url) # assert - GET_and_assert_expected_response(self, url, 404, {}) + GET_and_assert_equal_expected_code(self, url, 404) + + def test_GET_TaskRelationDraft_list_view_shows_entry(self): + + test_data_1 = TaskRelationDraft_test_data() + models.TaskRelationDraft.objects.create(**test_data_1) + nbr_results = models.TaskRelationDraft.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_relation_draft/', test_data_1, nbr_results) + + def test_GET_TaskRelationDraft_view_returns_correct_entry(self): + + # setup + test_data_1 = TaskRelationDraft_test_data() + test_data_2 = TaskRelationDraft_test_data() + id1 = models.TaskRelationDraft.objects.create(**test_data_1).id + id2 = models.TaskRelationDraft.objects.create(**test_data_2).id + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_relation_draft/%s/' % id1, test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_relation_draft/%s/' % id2, test_data_2) + + def test_nested_TaskRelationDraft_are_filtered_according_to_TaskDraft(self): + + # setup + test_data_1 = TaskRelationDraft_test_data() + test_data_2 = TaskRelationDraft_test_data() + tdt_test_data_1 = TaskDraft_test_data() + tdt_test_data_2 = TaskDraft_test_data() + task_draft_1 = models.TaskDraft.objects.create(**tdt_test_data_1) + task_draft_2 = models.TaskDraft.objects.create(**tdt_test_data_2) + test_data_1 = dict(test_data_1) + test_data_1['producer'] = task_draft_1 + task_relation_draft_1 = models.TaskRelationDraft.objects.create(**test_data_1) + test_data_2 = dict(test_data_2) + test_data_2['consumer'] = task_draft_2 + task_relation_draft_2 = models.TaskRelationDraft.objects.create(**test_data_2) + + # assert the returned list contains related items, A list of length 1 is retrieved + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_draft/%s/task_relation_draft/' % task_draft_2.id, test_data_2, 1) + # assert an existing related producer is returned + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_draft/%s/task_relation_draft/%s/' % (task_draft_1.id, task_relation_draft_1.id), test_data_1) + # assert an existing related consumer is returned + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_draft/%s/task_relation_draft/%s/' % (task_draft_2.id, task_relation_draft_2.id), test_data_2) + # assert an existing unrelated item is not returned + GET_and_assert_equal_expected_code(self, BASE_URL + '/task_draft/%s/task_relation_draft/%s/' % (task_draft_2.id, task_relation_draft_1.id), 404) class SchedulingUnitBlueprintTestCase(unittest.TestCase): @@ -1201,7 +1609,7 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase): self.assertTrue("Scheduling Unit Blueprint List" in r.content.decode('utf8')) def test_scheduling_unit_blueprint_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_blueprint/1234321/', 404) def test_scheduling_unit_blueprint_POST_and_GET(self): sub_test_data = test_data_creator.SchedulingUnitBlueprint(scheduling_unit_draft_url=self.scheduling_unit_draft_url, template_url=self.template_url) @@ -1209,7 +1617,7 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase): # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/', sub_test_data, 201, sub_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, sub_test_data) + GET_OK_and_assert_equal_expected_response(self, url, sub_test_data) def test_scheduling_unit_blueprint_PUT_invalid_raises_error(self): sub_test_data = test_data_creator.SchedulingUnitBlueprint(scheduling_unit_draft_url=self.scheduling_unit_draft_url, template_url=self.template_url) @@ -1222,11 +1630,11 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/', sub_test_data1, 201, sub_test_data1) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, sub_test_data1) + GET_OK_and_assert_equal_expected_response(self, url, sub_test_data1) # PUT new values, verify PUT_and_assert_expected_response(self, url, sub_test_data2, 200, sub_test_data2) - GET_and_assert_expected_response(self, url, 200, sub_test_data2) + GET_OK_and_assert_equal_expected_response(self, url, sub_test_data2) def test_scheduling_unit_blueprint_PATCH(self): sub_test_data = test_data_creator.SchedulingUnitBlueprint(scheduling_unit_draft_url=self.scheduling_unit_draft_url, template_url=self.template_url) @@ -1234,7 +1642,7 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/', sub_test_data, 201, sub_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, sub_test_data) + GET_OK_and_assert_equal_expected_response(self, url, sub_test_data) test_patch = {"description": "This is an updated description", "do_cancel": True} @@ -1243,7 +1651,7 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase): PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(sub_test_data) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_scheduling_unit_blueprint_DELETE(self): sub_test_data = test_data_creator.SchedulingUnitBlueprint(scheduling_unit_draft_url=self.scheduling_unit_draft_url, template_url=self.template_url) @@ -1251,7 +1659,7 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/', sub_test_data, 201, sub_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, sub_test_data) + GET_OK_and_assert_equal_expected_response(self, url, sub_test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -1264,13 +1672,13 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase): url = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/', sub_test_data, 201, sub_test_data)['url'] # verify - GET_and_assert_expected_response(self, url, 200, sub_test_data) + GET_OK_and_assert_equal_expected_response(self, url, sub_test_data) # DELETE dependency DELETE_and_assert_gone(self, template_url) # assert - GET_and_assert_expected_response(self, url, 404, {}) + GET_and_assert_equal_expected_code(self, url, 404) def test_scheduling_unit_blueprint_CASCADE_behavior_on_scheduling_unit_draft_deleted(self): scheduling_unit_draft_url = test_data_creator.post_data_and_get_url(test_data_creator.SchedulingUnitDraft(), '/scheduling_unit_draft/') @@ -1280,13 +1688,55 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase): url = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/', sub_test_data, 201, sub_test_data)['url'] # verify - GET_and_assert_expected_response(self, url, 200, sub_test_data) + GET_OK_and_assert_equal_expected_response(self, url, sub_test_data) # DELETE dependency DELETE_and_assert_gone(self, scheduling_unit_draft_url) # assert - GET_and_assert_expected_response(self, url, 404, {}) + GET_and_assert_equal_expected_code(self, url, 404) + + def test_GET_SchedulingUnitBlueprint_list_view_shows_entry(self): + + test_data_1 = SchedulingUnitBlueprint_test_data("scheduler unit blue print one") + models.SchedulingUnitBlueprint.objects.create(**test_data_1) + nbr_results = models.SchedulingUnitBlueprint.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/scheduling_unit_blueprint/', test_data_1, nbr_results) + + def test_GET_SchedulingUnitBlueprint_view_returns_correct_entry(self): + + # setup + test_data_1 = SchedulingUnitBlueprint_test_data("scheduler unit blue print two one ") + test_data_2 = SchedulingUnitBlueprint_test_data("scheduler unit blue print two two ") + id1 = models.SchedulingUnitBlueprint.objects.create(**test_data_1).id + id2 = models.SchedulingUnitBlueprint.objects.create(**test_data_2).id + # assert + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/%s/' % id1, test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/%s/' % id2, test_data_2) + + def test_nested_SchedulingUnitBlueprint_are_filtered_according_to_SchedulingUnitDraft(self): + + # setup + test_data_1 = SchedulingUnitBlueprint_test_data("scheduler unit blue print three one") + test_data_2 = SchedulingUnitBlueprint_test_data("scheduler unit blue print three two") + sudt_test_data_1 = SchedulingUnitDraft_test_data() + sudt_test_data_2 = SchedulingUnitDraft_test_data() + scheduling_unit_draft_1 = models.SchedulingUnitDraft.objects.create(**sudt_test_data_1) + scheduling_unit_draft_2 = models.SchedulingUnitDraft.objects.create(**sudt_test_data_2) + test_data_1 = dict(test_data_1) + test_data_1['draft'] = scheduling_unit_draft_1 + scheduling_unit_blueprint_1 = models.SchedulingUnitBlueprint.objects.create(**test_data_1) + test_data_2 = dict(test_data_2) + test_data_2['draft'] = scheduling_unit_draft_2 + scheduling_unit_blueprint_2 = models.SchedulingUnitBlueprint.objects.create(**test_data_2) + + # assert the returned list contains related items, A list of length 1 is retrieved + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/scheduling_unit_draft/%s/scheduling_unit_blueprint/' % scheduling_unit_draft_2.id, test_data_2, 1) + # assert an existing related item is returned + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_unit_draft/%s/scheduling_unit_blueprint/%s/' % (scheduling_unit_draft_2.id, scheduling_unit_blueprint_2.id), test_data_2) + # assert an existing unrelated item is not returned + GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_draft/%s/scheduling_unit_blueprint/%s/' % (scheduling_unit_draft_2.id, scheduling_unit_blueprint_1.id), 404) + class TaskBlueprintTestCase(unittest.TestCase): @classmethod @@ -1301,7 +1751,7 @@ class TaskBlueprintTestCase(unittest.TestCase): self.assertTrue("Task Blueprint List" in r.content.decode('utf8')) def test_task_blueprint_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/task_blueprint/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/task_blueprint/1234321/', 404) def test_task_blueprint_POST_and_GET(self): tb_test_data = test_data_creator.TaskBlueprint(draft_url=self.draft_url, template_url=self.template_url, scheduling_unit_blueprint_url=self.scheduling_unit_blueprint_url) @@ -1309,7 +1759,7 @@ class TaskBlueprintTestCase(unittest.TestCase): # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_blueprint/', tb_test_data, 201, tb_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, tb_test_data) + GET_OK_and_assert_equal_expected_response(self, url, tb_test_data) def test_task_blueprint_PUT_invalid_raises_error(self): tb_test_data = test_data_creator.TaskBlueprint(draft_url=self.draft_url, template_url=self.template_url, scheduling_unit_blueprint_url=self.scheduling_unit_blueprint_url) @@ -1322,11 +1772,11 @@ class TaskBlueprintTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_blueprint/', tb_test_data1, 201, tb_test_data1) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, tb_test_data1) + GET_OK_and_assert_equal_expected_response(self, url, tb_test_data1) # PUT new values, verify PUT_and_assert_expected_response(self, url, tb_test_data2, 200, tb_test_data2) - GET_and_assert_expected_response(self, url, 200, tb_test_data2) + GET_OK_and_assert_equal_expected_response(self, url, tb_test_data2) def test_task_blueprint_PATCH(self): tb_test_data = test_data_creator.TaskBlueprint(draft_url=self.draft_url, template_url=self.template_url, scheduling_unit_blueprint_url=self.scheduling_unit_blueprint_url) @@ -1334,7 +1784,7 @@ class TaskBlueprintTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_blueprint/', tb_test_data, 201, tb_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, tb_test_data) + GET_OK_and_assert_equal_expected_response(self, url, tb_test_data) test_patch = {"description": "This is an updated description", "do_cancel": True} @@ -1343,7 +1793,7 @@ class TaskBlueprintTestCase(unittest.TestCase): PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(tb_test_data) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_task_blueprint_DELETE(self): tb_test_data = test_data_creator.TaskBlueprint(draft_url=self.draft_url, template_url=self.template_url, scheduling_unit_blueprint_url=self.scheduling_unit_blueprint_url) @@ -1351,7 +1801,7 @@ class TaskBlueprintTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_blueprint/', tb_test_data, 201, tb_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, tb_test_data) + GET_OK_and_assert_equal_expected_response(self, url, tb_test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -1397,13 +1847,13 @@ class TaskBlueprintTestCase(unittest.TestCase): url = POST_and_assert_expected_response(self, BASE_URL + '/task_blueprint/', tb_test_data, 201, tb_test_data)['url'] # verify - GET_and_assert_expected_response(self, url, 200, tb_test_data) + GET_OK_and_assert_equal_expected_response(self, url, tb_test_data) # DELETE dependency DELETE_and_assert_gone(self, template_url) # assert - GET_and_assert_expected_response(self, url, 404, {}) + GET_and_assert_equal_expected_code(self, url, 404) def test_task_blueprint_CASCADE_behavior_on_task_draft_deleted(self): draft_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskDraft(), '/task_draft/') @@ -1413,13 +1863,13 @@ class TaskBlueprintTestCase(unittest.TestCase): url = POST_and_assert_expected_response(self, BASE_URL + '/task_blueprint/', tb_test_data, 201, tb_test_data)['url'] # verify - GET_and_assert_expected_response(self, url, 200, tb_test_data) + GET_OK_and_assert_equal_expected_response(self, url, tb_test_data) # DELETE dependency DELETE_and_assert_gone(self, draft_url) # assert - GET_and_assert_expected_response(self, url, 404, {}) + GET_and_assert_equal_expected_code(self, url, 404) def test_task_blueprint_CASCADE_behavior_on_scheduling_unit_blueprint_deleted(self): scheduling_unit_blueprint_url = test_data_creator.post_data_and_get_url(test_data_creator.SchedulingUnitBlueprint(), '/scheduling_unit_blueprint/') @@ -1429,13 +1879,91 @@ class TaskBlueprintTestCase(unittest.TestCase): url = POST_and_assert_expected_response(self, BASE_URL + '/task_blueprint/', tb_test_data, 201, tb_test_data)['url'] # verify - GET_and_assert_expected_response(self, url, 200, tb_test_data) + GET_OK_and_assert_equal_expected_response(self, url, tb_test_data) # DELETE dependency DELETE_and_assert_gone(self, scheduling_unit_blueprint_url) # assert - GET_and_assert_expected_response(self, url, 404, {}) + GET_and_assert_equal_expected_code(self, url, 404) + + def test_GET_TaskBlueprint_list_view_shows_entry(self): + + test_data_1 = TaskBlueprint_test_data() + models.TaskBlueprint.objects.create(**test_data_1) + nbr_results = models.TaskBlueprint.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_blueprint/', test_data_1, nbr_results) + + def test_GET_TaskBlueprint_view_returns_correct_entry(self): + + # setup + test_data_1 = TaskBlueprint_test_data("task blue print two one") + test_data_2 = TaskBlueprint_test_data("task blue print two two") + id1 = models.TaskBlueprint.objects.create(**test_data_1).id + id2 = models.TaskBlueprint.objects.create(**test_data_2).id + # assert + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_blueprint/%s/' % id1, test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_blueprint/%s/' % id2, test_data_2) + + def test_nested_TaskBlueprint_are_filtered_according_to_TaskDraft(self): + + # setup + test_data_1 = TaskBlueprint_test_data("task blue print three one") + test_data_2 = TaskBlueprint_test_data("task blue print three two") + tdt_test_data_1 = TaskDraft_test_data("task draft two one") + tdt_test_data_2 = TaskDraft_test_data("task draft two one") + task_draft_1 = models.TaskDraft.objects.create(**tdt_test_data_1) + task_draft_2 = models.TaskDraft.objects.create(**tdt_test_data_2) + test_data_1 = dict(test_data_1) + test_data_1['draft'] = task_draft_1 + task_blueprint_1 = models.TaskBlueprint.objects.create(**test_data_1) + test_data_2 = dict(test_data_2) + test_data_2['draft'] = task_draft_2 + task_blueprint_2 = models.TaskBlueprint.objects.create(**test_data_2) + + # assert the returned list contains related items, A list of length 1 is retrieved + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_draft/%s/task_blueprint/' % task_draft_2.id, test_data_2, 1) + # assert an existing related item is returned + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_draft/%s/task_blueprint/%s/' % (task_draft_2.id, task_blueprint_2.id), test_data_2) + # assert an existing unrelated item is not returned + GET_and_assert_equal_expected_code(self, BASE_URL + '/task_draft/%s/task_blueprint/%s/' % (task_draft_2.id, task_blueprint_1.id), 404) + + def test_TaskBlueprint_contains_list_of_related_Subtask(self): + + # setup + test_data_1 = TaskBlueprint_test_data() + test_data_2 = TaskBlueprint_test_data() + st_test_data_1 = Subtask_test_data() + st_test_data_2 = Subtask_test_data() + task_blueprint = models.TaskBlueprint.objects.create(**test_data_1) + subtask_1 = models.Subtask.objects.create(**st_test_data_1) + subtask_1.task_blueprint = task_blueprint + subtask_1.save() + subtask_2 = models.Subtask.objects.create(**st_test_data_2) + subtask_2.task_blueprint = task_blueprint + subtask_2.save() + # assert + response_data = GET_and_assert_equal_expected_code(self, BASE_URL + '/task_blueprint/%s/' % task_blueprint.id, 200) + assertUrlList(self, response_data['subtasks'], [subtask_1, subtask_2]) + + def test_TaskBlueprint_contains_lists_of_related_TaskRelationBlueprint(self): + + # setup + test_data_1 = TaskBlueprint_test_data() + test_data_2 = TaskBlueprint_test_data() + trbt_test_data_1 = TaskRelationBlueprint_test_data() + trbt_test_data_2 = TaskRelationBlueprint_test_data() + task_blueprint = models.TaskBlueprint.objects.create(**test_data_1) + task_relation_blueprint_1 = models.TaskRelationBlueprint.objects.create(**trbt_test_data_1) + task_relation_blueprint_1.producer = task_blueprint + task_relation_blueprint_1.save() + task_relation_blueprint_2 = models.TaskRelationBlueprint.objects.create(**trbt_test_data_2) + task_relation_blueprint_2.consumer = task_blueprint + task_relation_blueprint_2.save() + # assert + response_data = GET_and_assert_equal_expected_code(self, BASE_URL + '/task_blueprint/%s/' % task_blueprint.id, 200) + assertUrlList(self, response_data['produced_by'], [task_relation_blueprint_1]) + assertUrlList(self, response_data['consumed_by'], [task_relation_blueprint_2]) class TaskRelationBlueprintTestCase(unittest.TestCase): @@ -1455,7 +1983,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): self.assertTrue("Task Relation Blueprint List" in r.content.decode('utf8')) def test_task_relation_blueprint_GET_nonexistant_raises_error(self): - GET_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/1234321/', 404, {}) + GET_and_assert_equal_expected_code(self, BASE_URL + '/task_relation_blueprint/1234321/', 404) def test_task_relation_blueprint_POST_and_GET(self): trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url) @@ -1463,7 +1991,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', trb_test_data, 201, trb_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, trb_test_data) + GET_OK_and_assert_equal_expected_response(self, url, trb_test_data) def test_task_relation_blueprint_PUT_invalid_raises_error(self): trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url) @@ -1476,11 +2004,11 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', trb_test_data1, 201, trb_test_data1) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, trb_test_data1) + GET_OK_and_assert_equal_expected_response(self, url, trb_test_data1) # PUT new values, verify PUT_and_assert_expected_response(self, url, trb_test_data2, 200, trb_test_data2) - GET_and_assert_expected_response(self, url, 200, trb_test_data2) + GET_OK_and_assert_equal_expected_response(self, url, trb_test_data2) def test_task_relation_blueprint_PATCH(self): trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url) @@ -1488,15 +2016,15 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', trb_test_data, 201, trb_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, trb_test_data) + GET_OK_and_assert_equal_expected_response(self, url, trb_test_data) - test_patch = {"selection_doc": "{'new': 'doc'}"} + test_patch = {"selection_doc": '{"new": "doc"}'} # PATCH item and verify PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) expected_data = dict(trb_test_data) expected_data.update(test_patch) - GET_and_assert_expected_response(self, url, 200, expected_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_task_relation_blueprint_DELETE(self): trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url) @@ -1504,7 +2032,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', trb_test_data, 201, trb_test_data) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, trb_test_data) + GET_OK_and_assert_equal_expected_response(self, url, trb_test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -1583,13 +2111,13 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', trb_test_data, 201, trb_test_data)['url'] # verify - GET_and_assert_expected_response(self, url, 200, trb_test_data) + GET_OK_and_assert_equal_expected_response(self, url, trb_test_data) # DELETE dependency DELETE_and_assert_gone(self, template_url) # assert - GET_and_assert_expected_response(self, url, 404, {}) + GET_and_assert_equal_expected_code(self, url, 404) def test_task_relation_blueprint_CASCADE_behavior_on_producer_deleted(self): producer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/') @@ -1600,14 +2128,13 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): trb_test_data, 201, trb_test_data)['url'] # verify - GET_and_assert_expected_response(self, url, 200, trb_test_data) + GET_OK_and_assert_equal_expected_response(self, url, trb_test_data) # DELETE dependency DELETE_and_assert_gone(self, producer_url) # assert - GET_and_assert_expected_response(self, url, 404, {}) - + GET_and_assert_equal_expected_code(self, url, 404) def test_task_relation_blueprint_CASCADE_behavior_on_consumer_deleted(self): consumer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/') @@ -1618,14 +2145,13 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): trb_test_data, 201, trb_test_data)['url'] # verify - GET_and_assert_expected_response(self, url, 200, trb_test_data) + GET_OK_and_assert_equal_expected_response(self, url, trb_test_data) # DELETE dependency DELETE_and_assert_gone(self, consumer_url) # assert - GET_and_assert_expected_response(self, url, 404, {}) - + GET_and_assert_equal_expected_code(self, url, 404) def test_task_relation_blueprint_CASCADE_behavior_on_input_deleted(self): input_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectors(), '/task_connectors/') @@ -1636,14 +2162,13 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): trb_test_data, 201, trb_test_data)['url'] # verify - GET_and_assert_expected_response(self, url, 200, trb_test_data) + GET_OK_and_assert_equal_expected_response(self, url, trb_test_data) # DELETE dependency DELETE_and_assert_gone(self, input_url) # assert - GET_and_assert_expected_response(self, url, 404, {}) - + GET_and_assert_equal_expected_code(self, url, 404) def test_task_relation_blueprint_CASCADE_behavior_on_output_deleted(self): output_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectors(), '/task_connectors/') @@ -1654,13 +2179,78 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): trb_test_data, 201, trb_test_data)['url'] # verify - GET_and_assert_expected_response(self, url, 200, trb_test_data) + GET_OK_and_assert_equal_expected_response(self, url, trb_test_data) # DELETE dependency DELETE_and_assert_gone(self, output_url) # assert - GET_and_assert_expected_response(self, url, 404, {}) + GET_and_assert_equal_expected_code(self, url, 404) + + def test_GET_TaskRelationBlueprint_list_view_shows_entry(self): + + test_data_1 = TaskRelationBlueprint_test_data() + models.TaskRelationBlueprint.objects.create(**test_data_1) + nbr_results = models.TaskRelationBlueprint.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_relation_blueprint/', test_data_1, nbr_results) + + def test_GET_TaskRelationBlueprint_view_returns_correct_entry(self): + + # setup + test_data_1 = TaskRelationBlueprint_test_data() + test_data_2 = TaskRelationBlueprint_test_data() + id1 = models.TaskRelationBlueprint.objects.create(**test_data_1).id + id2 = models.TaskRelationBlueprint.objects.create(**test_data_2).id + # assert + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_relation_blueprint/%s/' % id1, test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_relation_blueprint/%s/' % id2, test_data_2) + + def test_nested_TaskRelationBlueprint_are_filtered_according_to_TaskRelationDraft(self): + + # setup + test_data_1 = TaskRelationBlueprint_test_data() + test_data_2 = TaskRelationBlueprint_test_data() + trdt_test_data_1 = TaskRelationDraft_test_data() + trdt_test_data_2 = TaskRelationDraft_test_data() + task_relation_draft_1 = models.TaskRelationDraft.objects.create(**trdt_test_data_1) + task_relation_draft_2 = models.TaskRelationDraft.objects.create(**trdt_test_data_2) + test_data_1 = dict(test_data_1) + test_data_1['draft'] = task_relation_draft_1 + task_relation_blueprint_1 = models.TaskRelationBlueprint.objects.create(**test_data_1) + test_data_2 = dict(test_data_2) + test_data_2['draft'] = task_relation_draft_2 + task_relation_blueprint_2 = models.TaskRelationBlueprint.objects.create(**test_data_2) + + # assert the returned list contains related items, A list of length 1 is retrieved + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_relation_draft/%s/task_relation_blueprint/' % task_relation_draft_2.id, test_data_2, 1) + # assert an existing related item is returned + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_relation_draft/%s/task_relation_blueprint/%s/' % (task_relation_draft_2.id, task_relation_blueprint_2.id), test_data_2) + # assert an existing unrelated item is not returned + GET_and_assert_equal_expected_code(self, BASE_URL + '/task_relation_draft/%s/task_relation_blueprint/%s/' % (task_relation_draft_2.id, task_relation_blueprint_1.id), 404) + + def test_nested_TaskRelationBlueprint_are_filtered_according_to_TaskBlueprint(self): + + # setup + test_data_1 = TaskRelationBlueprint_test_data() + test_data_2 = TaskRelationBlueprint_test_data() + tbt_test_data_1 = TaskBlueprint_test_data() + tbt_test_data_2 = TaskBlueprint_test_data() + task_blueprint_1 = models.TaskBlueprint.objects.create(**tbt_test_data_1) + task_blueprint_2 = models.TaskBlueprint.objects.create(**tbt_test_data_2) + test_data_1 = dict(test_data_1) + test_data_1['producer'] = task_blueprint_1 + task_relation_blueprint_1 = models.TaskRelationBlueprint.objects.create(**test_data_1) + test_data_2 = dict(test_data_2) + test_data_2['consumer'] = task_blueprint_2 + task_relation_blueprint_2 = models.TaskRelationBlueprint.objects.create(**test_data_2) + # assert the returned list contains related producer + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_blueprint/%s/task_relation_blueprint/' % task_blueprint_1.id, test_data_1, 1) + # assert the returned list contains related consumer + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_blueprint/%s/task_relation_blueprint/' % task_blueprint_2.id, test_data_2, 1) + # assert an existing related item is returned + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_blueprint/%s/task_relation_blueprint/%s/' % (task_blueprint_2.id, task_relation_blueprint_2.id), test_data_2) + # assert an existing unrelated item is not returned + GET_and_assert_equal_expected_code(self, BASE_URL + '/task_blueprint/%s/task_relation_blueprint/%s/' % (task_blueprint_2.id, task_relation_blueprint_1.id), 404) if __name__ == "__main__": diff --git a/SAS/TMSS/test/t_tmssapp_specification_REST_API.run b/SAS/TMSS/test/t_tmssapp_specification_REST_API.run new file mode 100755 index 0000000000000000000000000000000000000000..54223874549b2398838dd8c17d87db4db1adb03d --- /dev/null +++ b/SAS/TMSS/test/t_tmssapp_specification_REST_API.run @@ -0,0 +1,6 @@ +#!/bin/bash + +# Run the unit test +source python-coverage.sh +python_coverage_test "*tmss*" t_tmssapp_specification_REST_API.py + diff --git a/SAS/TMSS/test/t_tmssapp_specification_REST_API.sh b/SAS/TMSS/test/t_tmssapp_specification_REST_API.sh new file mode 100755 index 0000000000000000000000000000000000000000..e6d6e7f7e59a94f14d1b9d9b45d1d759411b5ec6 --- /dev/null +++ b/SAS/TMSS/test/t_tmssapp_specification_REST_API.sh @@ -0,0 +1,5 @@ +#!/bin/sh + +# Run Test +./runctest.sh t_tmssapp_specification_REST_API + diff --git a/SAS/TMSS/test/t_tmssapp_specification_django.py b/SAS/TMSS/test/t_tmssapp_specification_django.py deleted file mode 100755 index 2682fd8abb405f0540149e5a568de48f14becaab..0000000000000000000000000000000000000000 --- a/SAS/TMSS/test/t_tmssapp_specification_django.py +++ /dev/null @@ -1,1423 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright (C) 2018 ASTRON (Netherlands Institute for Radio Astronomy) -# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands -# -# This file is part of the LOFAR software suite. -# The LOFAR software suite is free software: you can redistribute it and/or -# modify it under the terms of the GNU General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# The LOFAR software suite is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. - -# $Id: $ - -import os -import unittest -from datetime import datetime -import uuid - -import logging -logger = logging.getLogger(__name__) -logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.DEBUG) - -# todo: Tags? -> Decide how to deal with them first. -# todo: Immutability of Blueprints on db level? - -# Do Mandatory setup: -# use setup/teardown magic for tmss test database -# (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_database_unittest_setup module) -from lofar.sas.tmss.test.tmss_database_unittest_setup import * - -from lofar.sas.tmss.test.tmss_test_data_django_models import * - -from django.db.utils import IntegrityError - -# TODO: rest API testing should be moved out of this test module. -# import rest_framework.test -# client = rest_framework.test.APIClient() -# from lofar.sas.tmss.test.test_utils import assertDataWithUrls, assertUrlList - -class GeneratorTemplateTest(unittest.TestCase): - def test_GeneratorTemplate_gets_created_with_correct_creation_timestamp(self): - # setup - before = datetime.utcnow() - entry = models.GeneratorTemplate.objects.create(**GeneratorTemplate_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_GeneratorTemplate_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.GeneratorTemplate.objects.create(**GeneratorTemplate_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_GeneratorTemplate_list_view_shows_entry(self): - # - # # setup - # entry = models.GeneratorTemplate.objects.create(**self.test_data_1) - # - # # assert - # response = client.get('/generator_template/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # for item in self.test_data_1.items(): - # self.assertIn(item, response.data['results'][0].items()) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_GeneratorTemplate_view_returns_correct_entry(self): - # - # # setup - # id1 = models.GeneratorTemplate.objects.create(**self.test_data_1).id - # id2 = models.GeneratorTemplate.objects.create(**self.test_data_2).id - # - # # assert - # response1 = client.get('/generator_template/%s/' % id1, format='json', follow=True) - # response2 = client.get('/generator_template/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # for item in self.test_data_1.items(): - # self.assertIn(item, response1.data.items()) - # for item in self.test_data_2.items(): - # self.assertIn(item, response2.data.items()) - - -class DefaultGeneratorTemplateTest(unittest.TestCase): - def test_DefaultGeneratorTemplate_prevents_same_name(self): - common_forbidden_name = "my_name" - template = models.GeneratorTemplate.objects.create(**GeneratorTemplate_test_data()) - - test_data_1 = DefaultGeneratorTemplate_test_data(common_forbidden_name, template) - models.DefaultGeneratorTemplate.objects.create(**test_data_1) - - test_data_2 = DefaultGeneratorTemplate_test_data(common_forbidden_name, template) - with self.assertRaises(IntegrityError): - models.DefaultGeneratorTemplate.objects.create(**test_data_2) - - -class SchedulingUnitTemplateTest(unittest.TestCase): - def test_SchedulingUnitTemplate_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.SchedulingUnitTemplate.objects.create(**SchedulingUnitTemplate_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_SchedulingUnitTemplate_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.SchedulingUnitTemplate.objects.create(**SchedulingUnitTemplate_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_SchedulingUnitTemplate_list_view_shows_entry(self): - # - # # setup - # entry = models.SchedulingUnitTemplate.objects.create(**self.test_data_1) - # - # # assert - # response = client.get('/scheduling_unit_template/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # for item in self.test_data_1.items(): - # self.assertIn(item, response.data['results'][0].items()) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_SchedulingUnitTemplate_view_returns_correct_entry(self): - # - # # setup - # id1 = models.SchedulingUnitTemplate.objects.create(**self.test_data_1).id - # id2 = models.SchedulingUnitTemplate.objects.create(**self.test_data_2).id - # - # # assert - # response1 = client.get('/scheduling_unit_template/%s/' % id1, format='json', follow=True) - # response2 = client.get('/scheduling_unit_template/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # for item in self.test_data_1.items(): - # self.assertIn(item, response1.data.items()) - # for item in self.test_data_2.items(): - # self.assertIn(item, response2.data.items()) - - -class TaskTemplateTest(unittest.TestCase): - def test_TaskTemplate_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.TaskTemplate.objects.create(**TaskTemplate_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_TaskTemplate_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.TaskTemplate.objects.create(**TaskTemplate_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_TaskTemplate_list_view_shows_entry(self): - # - # # setup - # entry = models.TaskTemplate.objects.create(**self.test_data_1) - # - # # assert - # response = client.get('/task_template/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # for item in self.test_data_1.items(): - # self.assertIn(item, response.data['results'][0].items()) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_TaskTemplate_view_returns_correct_entry(self): - # - # # setup - # id1 = models.TaskTemplate.objects.create(**self.test_data_1).id - # id2 = models.TaskTemplate.objects.create(**self.test_data_2).id - # - # # assert - # response1 = client.get('/task_template/%s/' % id1, format='json', follow=True) - # response2 = client.get('/task_template/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # for item in self.test_data_1.items(): - # self.assertIn(item, response1.data.items()) - # for item in self.test_data_2.items(): - # self.assertIn(item, response2.data.items()) - - -class WorkRelationSelectionTemplateTest(unittest.TestCase): - def test_WorkRelationSelectionTemplate_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.WorkRelationSelectionTemplate.objects.create(**WorkRelationSelectionTemplate_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_WorkRelationSelectionTemplate_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.WorkRelationSelectionTemplate.objects.create(**WorkRelationSelectionTemplate_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_WorkRelationSelectionTemplate_list_view_shows_entry(self): - # - # # setup - # entry = models.WorkRelationSelectionTemplate.objects.create(**self.test_data_1) - # - # # assert - # response = client.get('/work_relation_selection_template/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # for item in self.test_data_1.items(): - # self.assertIn(item, response.data['results'][0].items()) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_WorkRelationSelectionTemplate_view_returns_correct_entry(self): - # - # # setup - # id1 = models.WorkRelationSelectionTemplate.objects.create(**self.test_data_1).id - # id2 = models.WorkRelationSelectionTemplate.objects.create(**self.test_data_2).id - # - # # assert - # response1 = client.get('/work_relation_selection_template/%s/' % id1, format='json', follow=True) - # response2 = client.get('/work_relation_selection_template/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # for item in self.test_data_1.items(): - # self.assertIn(item, response1.data.items()) - # for item in self.test_data_2.items(): - # self.assertIn(item, response2.data.items()) - - -class TaskConnectorsTest(unittest.TestCase): - # TODO: rest API testing should be moved out of this test module. - # def test_GET_TaskConnectors_list_view_shows_entry(self): - # - # # setup - # models.TaskConnectors.objects.create(**self.get_test_data()) - # - # # assert - # response = client.get('/task_connectors/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data['results'][0], self.get_test_data()) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_TaskConnectors_view_returns_correct_entry(self): - # - # # setup - # id1 = models.TaskConnectors.objects.create(**self.test_data_1).id - # id2 = models.TaskConnectors.objects.create(**self.test_data_2).id - # - # # assert - # response1 = client.get('/task_connectors/%s/' % id1, format='json', follow=True) - # response2 = client.get('/task_connectors/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # assertDataWithUrls(self, response1.data, self.test_data_1) - # assertDataWithUrls(self, response2.data, self.test_data_2) - - def test_POST_TaskConnectors_prevents_missing_input_of(self): - - # setup - test_data_1 = dict(TaskConnectors_test_data()) - test_data_1['input_of'] = None - - # assert - with self.assertRaises(IntegrityError): - models.TaskConnectors.objects.create(**test_data_1) - - def test_POST_TaskConnectors_prevents_missing_output_of(self): - - # setup - test_data_1 = dict(TaskConnectors_test_data()) - test_data_1['output_of'] = None - - # assert - with self.assertRaises(IntegrityError): - models.TaskConnectors.objects.create(**test_data_1) - - # TODO: rest API testing should be moved out of this test module. - # def test_TaskConnectors_allows_setting_dataformats(self): - # # Other then through the API view, we cannot assign ManyToMany on creation, but have to set it later - # - # test_data_1 = dict(self.test_data_1) - # test_data_1['input_of'] = None - # wior = models.TaskConnectors.objects.create(**self.test_data_2) - # wior.dataformats.set([models.Dataformat.objects.get(value='Beamformed'), - # models.Dataformat.objects.get(value='MeasurementSet')]) - # wior.save() - # - # # assert - # response = client.get('/task_connectors/%s/' % wior.id, format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data, self.test_data_2) - - -class CycleTest(unittest.TestCase): - # TODO: rest API testing should be moved out of this test module. - # def test_GET_Cycle_list_view_shows_entry(self): - # - # # setup - # models.Cycle.objects.create(**self.test_data_1) - # - # # assert - # response = client.get('/cycle/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # for item in self.test_data_1.items(): - # self.assertIn(item, response.data['results'][0].items()) - - def test_Cycle_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.Cycle.objects.create(**Cycle_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_Cycle_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.Cycle.objects.create(**Cycle_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_Cycle_view_returns_correct_entry(self): - # - # # setup - # id1 = models.Cycle.objects.create(**self.test_data_1).name # name is pk - # id2 = models.Cycle.objects.create(**self.test_data_2).name - # - # # assert - # response1 = client.get('/cycle/%s/' % id1, format='json', follow=True) - # response2 = client.get('/cycle/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # for item in self.test_data_1.items(): - # self.assertIn(item, response1.data.items()) - # for item in self.test_data_2.items(): - # self.assertIn(item, response2.data.items()) - - # TODO: rest API testing should be moved out of this test module. - # def test_Cycle_constains_list_of_related_projects(self): - # - # self.pt = ProjectTest() - # - # # setup - # cycle = models.Cycle.objects.create(**self.test_data_1) - # project1 = models.Project.objects.create(**self.pt.test_data_1) - # project1.cycle = cycle - # project1.save() - # project2 = models.Project.objects.create(**self.pt.test_data_2) - # project2.cycle = cycle - # project2.save() - # - # # assert - # response = client.get('/cycle/%s/' % cycle.name, format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertUrlList(self, response.data['projects'], [project1, project2]) - - -class ProjectTest(unittest.TestCase): - # TODO: rest API testing should be moved out of this test module. - # def test_GET_Project_list_view_shows_entry(self): - # - # # setup - # models.Project.objects.create(**self.test_data_1) - # - # # assert - # response = client.get('/project/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # for item in self.test_data_1.items(): - # self.assertIn(item, response.data['results'][0].items()) - - def test_Project_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.Project.objects.create(**Project_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_Project_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.Project.objects.create(**Project_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_Project_view_returns_correct_entry(self): - # - # # setup - # id1 = models.Project.objects.create(**self.test_data_1).name # name is pk - # id2 = models.Project.objects.create(**self.test_data_2).name - # - # # assert - # response1 = client.get('/project/%s/' % id1, format='json', follow=True) - # response2 = client.get('/project/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # for item in self.test_data_1.items(): - # self.assertIn(item, response1.data.items()) - # for item in self.test_data_2.items(): - # self.assertIn(item, response2.data.items()) - - # TODO: rest API testing should be moved out of this test module. - # def test_nested_projects_are_filtered_according_to_cycle(self): - # - # self.ct = CycleTest() - # self.ct.setUp() - # - # # setup - # cycle_1 = models.Cycle.objects.create(**self.ct.test_data_1) - # cycle_2 = models.Cycle.objects.create(**self.ct.test_data_2) - # test_data_1 = dict(self.test_data_1) - # test_data_1['cycle'] = cycle_1 - # project_1 = models.Project.objects.create(**test_data_1) - # test_data_2 = dict(self.test_data_2) - # test_data_2['cycle'] = cycle_2 - # project_2 = models.Project.objects.create(**test_data_2) - # - # # assert the returned list contains related items - # response = client.get('/cycle/%s/project/' % cycle_2.name, format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # self.assertEqual(len(response.data['results']), 1) - # assertDataWithUrls(self, response.data['results'][0], test_data_2) - # - # # assert an existing related item is returned, name is pk - # response = client.get('/cycle/%s/project/%s/' % (cycle_2.name, project_2.name) , format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data, test_data_2) - # - # # assert an existing unrelated item is not returned, name is pk - # response = client.get('/cycle/%s/project/%s/' % (cycle_2.name, project_1.name) , format='json', follow=True) - # self.assertEqual(response.status_code, 404) - - -class SchedulingSetTest(unittest.TestCase): - # TODO: rest API testing should be moved out of this test module. - # def test_GET_SchedulingSet_list_view_shows_entry(self): - # - # # setup - # models.SchedulingSet.objects.create(**self.test_data_1) - # - # # assert - # response = client.get('/scheduling_set/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data['results'][0], self.test_data_1) - - def test_SchedulingSet_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.SchedulingSet.objects.create(**SchedulingSet_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_SchedulingSet_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.SchedulingSet.objects.create(**SchedulingSet_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_SchedulingSet_view_returns_correct_entry(self): - # - # # setup - # id1 = models.SchedulingSet.objects.create(**self.test_data_1).id - # id2 = models.SchedulingSet.objects.create(**self.test_data_2).id - # - # # assert - # response1 = client.get('/scheduling_set/%s/' % id1, format='json', follow=True) - # response2 = client.get('/scheduling_set/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # assertDataWithUrls(self, response1.data, self.test_data_1) - # assertDataWithUrls(self, response2.data, self.test_data_2) - - def test_SchedulingSet_prevents_missing_project(self): - - # setup - test_data = dict(SchedulingSet_test_data()) - test_data['project'] = None - - # assert - with self.assertRaises(IntegrityError): - models.SchedulingSet.objects.create(**test_data) - - # TODO: rest API testing should be moved out of this test module. - # def test_SchedulingSet_contains_list_of_related_SchedulingUnitDraft(self): - # - # sudt = SchedulingUnitDraftTest() - # sudt.setUp(populate=True) - # - # # setup - # scheduling_set = models.SchedulingSet.objects.create(**self.test_data_1) - # scheduling_unit_draft_1 = models.SchedulingUnitDraft.objects.create(**sudt.test_data_1) - # scheduling_unit_draft_1.scheduling_set = scheduling_set - # scheduling_unit_draft_1.save() - # scheduling_unit_draft_2 = models.SchedulingUnitDraft.objects.create(**sudt.test_data_2) - # scheduling_unit_draft_2.scheduling_set = scheduling_set - # scheduling_unit_draft_2.save() - # - # # assert - # response = client.get('/scheduling_set/%s/' % scheduling_set.id, format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertUrlList(self, response.data['scheduling_unit_drafts'], [scheduling_unit_draft_1, scheduling_unit_draft_2]) - - -class SchedulingUnitDraftTest(unittest.TestCase): - # TODO: rest API testing should be moved out of this test module. - # def test_GET_SchedulingUnitDraft_list_view_shows_entry(self): - # - # # setup - # models.SchedulingUnitDraft.objects.create(**self.test_data_1) - # - # # assert - # response = client.get('/scheduling_unit_draft/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # - # assertDataWithUrls(self, response.data['results'][0], self.test_data_1) - - def test_SchedulingUnitDraft_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.SchedulingUnitDraft.objects.create(**SchedulingUnitDraft_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_SchedulingUnitDraft_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.SchedulingUnitDraft.objects.create(**SchedulingUnitDraft_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_SchedulingUnitDraft_view_returns_correct_entry(self): - # - # # setup - # id1 = models.SchedulingUnitDraft.objects.create(**self.test_data_1).id - # id2 = models.SchedulingUnitDraft.objects.create(**self.test_data_2).id - # - # # assert - # response1 = client.get('/scheduling_unit_draft/%s/' % id1, format='json', follow=True) - # response2 = client.get('/scheduling_unit_draft/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # assertDataWithUrls(self, response1.data, self.test_data_1) - # assertDataWithUrls(self, response2.data, self.test_data_2) - - def test_SchedulingUnitDraft_prevents_missing_template(self): - - # setup - test_data = dict(SchedulingUnitDraft_test_data()) - test_data['requirements_template'] = None - - # assert - with self.assertRaises(IntegrityError): - models.SchedulingUnitDraft.objects.create(**test_data) - - def test_SchedulingUnitDraft_prevents_missing_scheduling_set(self): - - # setup - test_data = dict(SchedulingUnitDraft_test_data()) - test_data['scheduling_set'] = None - - # assert - with self.assertRaises(IntegrityError): - models.SchedulingUnitDraft.objects.create(**test_data) - - # TODO: rest API testing should be moved out of this test module. - # def test_nested_SchedulingUnitDraft_are_filtered_according_to_SchedulingSet(self): - # - # sst = SchedulingSetTest() - # sst.setUp() - # - # # setup - # scheduling_set_1 = models.SchedulingSet.objects.create(**sst.test_data_1) - # scheduling_set_2 = models.SchedulingSet.objects.create(**sst.test_data_2) - # test_data_1 = dict(self.test_data_1) - # test_data_1['scheduling_set'] = scheduling_set_1 - # scheduling_unit_draft_1 = models.SchedulingUnitDraft.objects.create(**test_data_1) - # test_data_2 = dict(self.test_data_2) - # test_data_2['scheduling_set'] = scheduling_set_2 - # scheduling_unit_draft_2 = models.SchedulingUnitDraft.objects.create(**test_data_2) - # - # # assert the returned list contains related items - # response = client.get('/scheduling_set/%s/scheduling_unit_draft/' % scheduling_set_2.id, format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # self.assertEqual(len(response.data['results']), 1) - # assertDataWithUrls(self, response.data['results'][0], test_data_2) - # - # # assert an existing related item is returned - # response = client.get('/scheduling_set/%s/scheduling_unit_draft/%s/' % (scheduling_set_2.id, scheduling_unit_draft_2.id) , format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data, test_data_2) - # - # # assert an existing unrelated item is not returned - # response = client.get('/scheduling_set/%s/scheduling_unit_draft/%s/' % (scheduling_set_2.id, scheduling_unit_draft_1.id) , format='json', follow=True) - # self.assertEqual(response.status_code, 404) - - # TODO: rest API testing should be moved out of this test module. - # def test_SchedulingUnitDraft_contains_list_of_related_SchedulingUnitBlueprint(self): - # subt = SchedulingUnitBlueprintTest() - # subt.setUp(populate=False) - # - # # setup - # scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(**self.test_data_1) - # scheduling_unit_blueprint_1 = models.SchedulingUnitBlueprint.objects.create(**subt.test_data_1) - # scheduling_unit_blueprint_1.draft = scheduling_unit_draft - # scheduling_unit_blueprint_1.save() - # scheduling_unit_blueprint_2 = models.SchedulingUnitBlueprint.objects.create(**subt.test_data_2) - # scheduling_unit_blueprint_2.draft = scheduling_unit_draft - # scheduling_unit_blueprint_2.save() - # - # # assert - # response = client.get('/scheduling_unit_draft/%s/' % scheduling_unit_draft.id, format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertUrlList(self, response.data['related_scheduling_unit_blueprint'], [scheduling_unit_blueprint_1, scheduling_unit_blueprint_2]) - - # TODO: rest API testing should be moved out of this test module. - # def test_SchedulingUnitDraft_contains_list_of_related_TaskDraft(self): - # tdt = TaskDraftTest() - # tdt.setUp(populate=False) - # - # # setup - # scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(**self.test_data_1) - # task_draft_1 = models.TaskDraft.objects.create(**tdt.test_data_1) - # task_draft_1.scheduling_unit_draft = scheduling_unit_draft - # task_draft_1.save() - # task_draft_2 = models.TaskDraft.objects.create(**tdt.test_data_2) - # task_draft_2.scheduling_unit_draft = scheduling_unit_draft - # task_draft_2.save() - # - # # assert - # response = client.get('/scheduling_unit_draft/%s/' % scheduling_unit_draft.id, format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertUrlList(self, response.data['task_drafts'], [task_draft_1, task_draft_2]) - - -class TaskDraftTest(unittest.TestCase): - # TODO: rest API testing should be moved out of this test module. - # def test_GET_TaskDraft_list_view_shows_entry(self): - # - # # setup - # models.TaskDraft.objects.create(**self.test_data_1) - # - # # assert - # response = client.get('/task_draft/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # - # assertDataWithUrls(self, response.data['results'][0], self.test_data_1) - - def test_TaskDraft_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.TaskDraft.objects.create(**TaskDraft_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_TaskDraft_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.TaskDraft.objects.create(**TaskDraft_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_TaskDraft_view_returns_correct_entry(self): - # - # # setup - # id1 = models.TaskDraft.objects.create(**self.test_data_1).id - # id2 = models.TaskDraft.objects.create(**self.test_data_2).id - # - # # assert - # response1 = client.get('/task_draft/%s/' % id1, format='json', follow=True) - # response2 = client.get('/task_draft/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # assertDataWithUrls(self, response1.data, self.test_data_1) - # assertDataWithUrls(self, response2.data, self.test_data_2) - - def test_TaskDraft_prevents_missing_template(self): - - # setup - test_data = dict(TaskDraft_test_data()) - test_data['specifications_template'] = None - - # assert - with self.assertRaises(IntegrityError): - models.TaskDraft.objects.create(**test_data) - - def test_TaskDraft_prevents_missing_scheduling_unit_draft(self): - - # setup - test_data = dict(TaskDraft_test_data()) - test_data['scheduling_unit_draft'] = None - - # assert - with self.assertRaises(IntegrityError): - models.TaskDraft.objects.create(**test_data) - - # TODO: rest API testing should be moved out of this test module. - # def test_nested_TaskDraft_are_filtered_according_to_SchedulingUnitDraft(self): - # sudt = SchedulingUnitDraftTest() - # sudt.setUp(populate=False) - # - # # setup - # scheduling_unit_draft_1 = models.SchedulingUnitDraft.objects.create(**sudt.test_data_1) - # scheduling_unit_draft_2 = models.SchedulingUnitDraft.objects.create(**sudt.test_data_2) - # test_data_1 = dict(self.test_data_1) - # test_data_1['scheduling_unit_draft'] = scheduling_unit_draft_1 - # task_draft_1 = models.TaskDraft.objects.create(**test_data_1) - # test_data_2 = dict(self.test_data_2) - # test_data_2['scheduling_unit_draft'] = scheduling_unit_draft_2 - # task_draft_2 = models.TaskDraft.objects.create(**test_data_2) - # - # # assert the returned list contains related items - # response = client.get('/scheduling_unit_draft/%s/task_draft/' % scheduling_unit_draft_2.id, format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # self.assertEqual(len(response.data['results']), 1) - # assertDataWithUrls(self, response.data['results'][0], test_data_2) - # - # # assert an existing related item is returned - # response = client.get('/scheduling_unit_draft/%s/task_draft/%s/' % (scheduling_unit_draft_2.id, task_draft_2.id), format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data, test_data_2) - # - # # assert an existing unrelated item is not returned - # response = client.get('/scheduling_unit_draft/%s/task_draft/%s/' % (scheduling_unit_draft_2.id, task_draft_1.id), format='json', follow=True) - # self.assertEqual(response.status_code, 404) - - - # TODO: rest API testing should be moved out of this test module. - # def test_TaskDraft_contains_list_of_related_TaskBlueprint(self): - # - # tbt = TaskBlueprintTest() - # tbt.setUp(populate=False) - # - # # setup - # task_draft = models.TaskDraft.objects.create(**self.test_data_1) - # task_blueprint_1 = models.TaskBlueprint.objects.create(**tbt.test_data_1) - # task_blueprint_1.draft = task_draft - # task_blueprint_1.save() - # task_blueprint_2 = models.TaskBlueprint.objects.create(**tbt.test_data_2) - # task_blueprint_2.draft = task_draft - # task_blueprint_2.save() - # - # # assert - # response = client.get('/task_draft/%s/' % task_draft.id, format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertUrlList(self, response.data['related_task_blueprint'], [task_blueprint_1, task_blueprint_2]) - - - # TODO: rest API testing should be moved out of this test module. - # def test_TaskDraft_contains_lists_of_related_TaskRelationDraft(self): - # - # trdt = TaskRelationDraftTest() - # trdt.setUp(populate=False) - # - # # setup - # task_draft = models.TaskDraft.objects.create(**self.test_data_1) - # task_relation_draft_1 = models.TaskRelationDraft.objects.create(**trdt.test_data_1) - # task_relation_draft_1.producer = task_draft - # task_relation_draft_1.save() - # task_relation_draft_2 = models.TaskRelationDraft.objects.create(**trdt.test_data_2) - # task_relation_draft_2.consumer = task_draft - # task_relation_draft_2.save() - # - # # assert - # response = client.get('/task_draft/%s/' % task_draft.id, format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertUrlList(self, response.data['produced_by'], [task_relation_draft_1]) - # assertUrlList(self, response.data['consumed_by'], [task_relation_draft_2]) - - -class TaskRelationDraftTest(unittest.TestCase): - # TODO: rest API testing should be moved out of this test module. - # def test_GET_TaskRelationDraft_list_view_shows_entry(self): - # - # # setup - # models.TaskRelationDraft.objects.create(**self.test_data_1) - # - # # assert - # response = client.get('/task_relation_draft/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # - # assertDataWithUrls(self, response.data['results'][0], self.test_data_1) - - def test_TaskRelationDraft_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.TaskRelationDraft.objects.create(**TaskRelationDraft_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_TaskRelationDraft_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.TaskRelationDraft.objects.create(**TaskRelationDraft_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_TaskRelationDraft_view_returns_correct_entry(self): - # - # # setup - # id1 = models.TaskRelationDraft.objects.create(**self.test_data_1).id - # id2 = models.TaskRelationDraft.objects.create(**self.test_data_2).id - # - # # assert - # response1 = client.get('/task_relation_draft/%s/' % id1, format='json', follow=True) - # response2 = client.get('/task_relation_draft/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # assertDataWithUrls(self, response1.data, self.test_data_1) - # assertDataWithUrls(self, response2.data, self.test_data_2) - - def test_TaskRelationDraft_prevents_missing_template(self): - - # setup - test_data = dict(TaskRelationDraft_test_data()) - test_data['selection_template'] = None - - # assert - with self.assertRaises(IntegrityError): - models.TaskRelationDraft.objects.create(**test_data) - - def test_TaskRelationDraft_prevents_missing_consumer(self): - - # setup - test_data = dict(TaskRelationDraft_test_data()) - test_data['consumer'] = None - - # assert - with self.assertRaises(IntegrityError): - models.TaskRelationDraft.objects.create(**test_data) - - def test_TaskRelationDraft_prevents_missing_producer(self): - - # setup - test_data = dict(TaskRelationDraft_test_data()) - test_data['producer'] = None - - # assert - with self.assertRaises(IntegrityError): - models.TaskRelationDraft.objects.create(**test_data) - - # TODO: rest API testing should be moved out of this test module. - # def test_nested_TaskRelationDraft_are_filtered_according_to_TaskDraft(self): - # tdt = TaskDraftTest() - # tdt.setUp(populate=False) - # - # # setup - # task_draft_1 = models.TaskDraft.objects.create(**tdt.test_data_1) - # task_draft_2 = models.TaskDraft.objects.create(**tdt.test_data_2) - # test_data_1 = dict(self.test_data_1) - # test_data_1['producer'] = task_draft_1 - # task_relation_draft_1 = models.TaskRelationDraft.objects.create(**test_data_1) - # test_data_2 = dict(self.test_data_2) - # test_data_2['consumer'] = task_draft_2 - # task_relation_draft_2 = models.TaskRelationDraft.objects.create(**test_data_2) - # - # # assert the returned list contains related items - # response = client.get('/task_draft/%s/task_relation_draft/' % task_draft_2.id, format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # self.assertEqual(len(response.data['results']), 1) - # assertDataWithUrls(self, response.data['results'][0], test_data_2) - # - # # assert an existing related producer is returned - # response = client.get('/task_draft/%s/task_relation_draft/%s/' % (task_draft_1.id, task_relation_draft_1.id), format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data, test_data_1) - # - # # assert an existing related consumer is returned - # response = client.get('/task_draft/%s/task_relation_draft/%s/' % (task_draft_2.id, task_relation_draft_2.id), format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data, test_data_2) - # - # # assert an existing unrelated item is not returned - # response = client.get('/task_draft/%s/task_relation_draft/%s/' % (task_draft_2.id, task_relation_draft_1.id), format='json', follow=True) - # self.assertEqual(response.status_code, 404) - - -class SchedulingUnitBlueprintTest(unittest.TestCase): - # TODO: rest API testing should be moved out of this test module. - # def test_GET_SchedulingUnitBlueprint_list_view_shows_entry(self): - # - # # setup - # models.SchedulingUnitBlueprint.objects.create(**SchedulingUnitBlueprint_test_data()) - # - # # assert - # response = client.get('/scheduling_unit_blueprint/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # - # assertDataWithUrls(self, response.data['results'][0], SchedulingUnitBlueprint_test_data()) - - def test_SchedulingUnitBlueprint_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.SchedulingUnitBlueprint.objects.create(**SchedulingUnitBlueprint_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_SchedulingUnitBlueprint_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.SchedulingUnitBlueprint.objects.create(**SchedulingUnitBlueprint_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_SchedulingUnitBlueprint_view_returns_correct_entry(self): - # - # # setup - # id1 = models.SchedulingUnitBlueprint.objects.create(**SchedulingUnitBlueprint_test_data()).id - # id2 = models.SchedulingUnitBlueprint.objects.create(**SchedulingUnitBlueprint_test_data()).id - # - # # assert - # response1 = client.get('/scheduling_unit_blueprint/%s/' % id1, format='json', follow=True) - # response2 = client.get('/scheduling_unit_blueprint/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # assertDataWithUrls(self, response1.data, self.test_data_1) - # assertDataWithUrls(self, response2.data, self.test_data_2) - - def test_SchedulingUnitBlueprint_prevents_missing_template(self): - - # setup - test_data = dict(SchedulingUnitBlueprint_test_data()) - test_data['requirements_template'] = None - - # assert - with self.assertRaises(IntegrityError): - models.SchedulingUnitBlueprint.objects.create(**test_data) - - def test_SchedulingUnitBlueprint_prevents_missing_draft(self): - - # setup - test_data = dict(SchedulingUnitBlueprint_test_data()) - test_data['draft'] = None - - # assert - with self.assertRaises(IntegrityError): - models.SchedulingUnitBlueprint.objects.create(**test_data) - - # TODO: rest API testing should be moved out of this test module. - # def test_nested_SchedulingUnitBlueprint_are_filtered_according_to_SchedulingUnitDraft(self): - # - # sudt = SchedulingUnitDraftTest() - # sudt.setUp(populate=False) - # - # # setup - # scheduling_unit_draft_1 = models.SchedulingUnitDraft.objects.create(**sudt.test_data_1) - # scheduling_unit_draft_2 = models.SchedulingUnitDraft.objects.create(**sudt.test_data_2) - # test_data_1 = dict(self.test_data_1) - # test_data_1['draft'] = scheduling_unit_draft_1 - # scheduling_unit_blueprint_1 = models.SchedulingUnitBlueprint.objects.create(**test_data_1) - # test_data_2 = dict(self.test_data_2) - # test_data_2['draft'] = scheduling_unit_draft_2 - # scheduling_unit_blueprint_2 = models.SchedulingUnitBlueprint.objects.create(**test_data_2) - # - # # assert the returned list contains related items - # response = client.get('/scheduling_unit_draft/%s/scheduling_unit_blueprint/' % scheduling_unit_draft_2.id, format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # self.assertEqual(len(response.data['results']), 1) - # assertDataWithUrls(self, response.data['results'][0], test_data_2) - # - # # assert an existing related item is returned - # response = client.get('/scheduling_unit_draft/%s/scheduling_unit_blueprint/%s/' % (scheduling_unit_draft_2.id, scheduling_unit_blueprint_2.id) , format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data, test_data_2) - # - # # assert an existing unrelated item is not returned - # response = client.get('/scheduling_unit_draft/%s/scheduling_unit_blueprint/%s/' % (scheduling_unit_draft_2.id, scheduling_unit_blueprint_1.id) , format='json', follow=True) - # self.assertEqual(response.status_code, 404) - - -class TaskBlueprintTest(unittest.TestCase): - # TODO: rest API testing should be moved out of this test module. - # def test_GET_TaskBlueprint_list_view_shows_entry(self): - # - # # setup - # models.TaskBlueprint.objects.create(**self.test_data_1) - # - # # assert - # response = client.get('/task_blueprint/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # - # assertDataWithUrls(self, response.data['results'][0], self.test_data_1) - - def test_TaskBlueprint_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.TaskBlueprint.objects.create(**TaskBlueprint_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_TaskBlueprint_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.TaskBlueprint.objects.create(**TaskBlueprint_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_TaskBlueprint_view_returns_correct_entry(self): - # - # # setup - # id1 = models.TaskBlueprint.objects.create(**self.test_data_1).id - # id2 = models.TaskBlueprint.objects.create(**self.test_data_2).id - # - # # assert - # response1 = client.get('/task_blueprint/%s/' % id1, format='json', follow=True) - # response2 = client.get('/task_blueprint/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # assertDataWithUrls(self, response1.data, self.test_data_1) - # assertDataWithUrls(self, response2.data, self.test_data_2) - - def test_TaskBlueprint_prevents_missing_template(self): - - # setup - test_data = dict(TaskBlueprint_test_data()) - test_data['specifications_template'] = None - - # assert - with self.assertRaises(IntegrityError): - models.TaskBlueprint.objects.create(**test_data) - - def test_TaskBlueprint_prevents_missing_draft(self): - - # setup - test_data = dict(TaskBlueprint_test_data()) - test_data['draft'] = None - - # assert - with self.assertRaises(IntegrityError): - models.TaskBlueprint.objects.create(**test_data) - - def test_TaskBlueprint_prevents_missing_scheduling_unit_blueprint(self): - - # setup - test_data = dict(TaskBlueprint_test_data()) - test_data['scheduling_unit_blueprint'] = None - - # assert - with self.assertRaises(IntegrityError): - models.TaskBlueprint.objects.create(**test_data) - - - # TODO: rest API testing should be moved out of this test module. - # def test_nested_TaskBlueprint_are_filtered_according_to_TaskDraft(self): - # tdt = TaskDraftTest() - # tdt.setUp(populate=False) - # - # # setup - # task_draft_1 = models.TaskDraft.objects.create(**tdt.test_data_1) - # task_draft_2 = models.TaskDraft.objects.create(**tdt.test_data_2) - # test_data_1 = dict(self.test_data_1) - # test_data_1['draft'] = task_draft_1 - # task_blueprint_1 = models.TaskBlueprint.objects.create(**test_data_1) - # test_data_2 = dict(self.test_data_2) - # test_data_2['draft'] = task_draft_2 - # task_blueprint_2 = models.TaskBlueprint.objects.create(**test_data_2) - # - # # assert the returned list contains related items - # response = client.get('/task_draft/%s/task_blueprint/' % task_draft_2.id, format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # self.assertEqual(len(response.data['results']), 1) - # assertDataWithUrls(self, response.data['results'][0], test_data_2) - # - # # assert an existing related item is returned - # response = client.get('/task_draft/%s/task_blueprint/%s/' % (task_draft_2.id, task_blueprint_2.id), format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data, test_data_2) - # - # # assert an existing unrelated item is not returned - # response = client.get('/task_draft/%s/task_blueprint/%s/' % (task_draft_2.id, task_blueprint_1.id), format='json', follow=True) - # self.assertEqual(response.status_code, 404) - - # TODO: rest API testing should be moved out of this test module. - # def test_TaskBlueprint_contains_list_of_related_Subtask(self): - # - # from t_tmssapp_scheduling_django import SubtaskTest # Note: cannot do this on module level due to circular import - # st = SubtaskTest() - # st.setUp(populate=False) - # - # # setup - # task_blueprint = models.TaskBlueprint.objects.create(**self.test_data_1) - # subtask_1 = models.Subtask.objects.create(**st.test_data_1) - # subtask_1.task_blueprint = task_blueprint - # subtask_1.save() - # subtask_2 = models.Subtask.objects.create(**st.test_data_2) - # subtask_2.task_blueprint = task_blueprint - # subtask_2.save() - # - # # assert - # response = client.get('/task_blueprint/%s/' % task_blueprint.id, format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertUrlList(self, response.data['subtasks'], [subtask_1, subtask_2]) - - # TODO: rest API testing should be moved out of this test module. - # def test_TaskBlueprint_contains_lists_of_related_TaskRelationBlueprint(self): - # - # trbt = TaskRelationBlueprintTest() - # trbt.setUp(populate=False) - # - # # setup - # task_blueprint = models.TaskBlueprint.objects.create(**self.test_data_1) - # task_relation_blueprint_1 = models.TaskRelationBlueprint.objects.create(**trbt.test_data_1) - # task_relation_blueprint_1.producer = task_blueprint - # task_relation_blueprint_1.save() - # task_relation_blueprint_2 = models.TaskRelationBlueprint.objects.create(**trbt.test_data_2) - # task_relation_blueprint_2.consumer = task_blueprint - # task_relation_blueprint_2.save() - # - # # assert - # response = client.get('/task_blueprint/%s/' % task_blueprint.id, format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertUrlList(self, response.data['produced_by'], [task_relation_blueprint_1]) - # assertUrlList(self, response.data['consumed_by'], [task_relation_blueprint_2]) - - -class TaskRelationBlueprintTest(unittest.TestCase): - # TODO: rest API testing should be moved out of this test module. - # def test_GET_TaskRelationBlueprint_list_view_shows_entry(self): - # # setup - # models.TaskRelationBlueprint.objects.create(**self.test_data_1) - # - # # assert - # response = client.get('/task_relation_blueprint/', format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # - # assertDataWithUrls(self, response.data['results'][0], self.test_data_1) - - def test_TaskRelationBlueprint_gets_created_with_correct_creation_timestamp(self): - # setup - before = datetime.utcnow() - entry = models.TaskRelationBlueprint.objects.create(**TaskRelationBlueprint_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_TaskRelationBlueprint_update_timestamp_gets_changed_correctly(self): - # setup - entry = models.TaskRelationBlueprint.objects.create(**TaskRelationBlueprint_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - # TODO: rest API testing should be moved out of this test module. - # def test_GET_TaskRelationBlueprint_view_returns_correct_entry(self): - # # setup - # id1 = models.TaskRelationBlueprint.objects.create(**self.test_data_1).id - # id2 = models.TaskRelationBlueprint.objects.create(**self.test_data_2).id - # - # # assert - # response1 = client.get('/task_relation_blueprint/%s/' % id1, format='json', follow=True) - # response2 = client.get('/task_relation_blueprint/%s/' % id2, format='json', follow=True) - # self.assertEqual(response1.status_code, 200) - # self.assertEqual(response2.status_code, 200) - # assertDataWithUrls(self, response1.data, self.test_data_1) - # assertDataWithUrls(self, response2.data, self.test_data_2) - - def test_TaskRelationBlueprint_prevents_missing_selection_template(self): - # setup - test_data = dict(TaskRelationBlueprint_test_data()) - test_data['selection_template'] = None - - # assert - with self.assertRaises(IntegrityError): - models.TaskRelationBlueprint.objects.create(**test_data) - - def test_TaskRelationBlueprint_prevents_missing_draft(self): - # setup - test_data = dict(TaskRelationBlueprint_test_data()) - test_data['draft'] = None - - # assert - with self.assertRaises(IntegrityError): - models.TaskRelationBlueprint.objects.create(**test_data) - - def test_TaskRelationBlueprint_prevents_missing_producer(self): - # setup - test_data = dict(TaskRelationBlueprint_test_data()) - test_data['producer'] = None - - # assert - with self.assertRaises(IntegrityError): - models.TaskRelationBlueprint.objects.create(**test_data) - - def test_TaskRelationBlueprint_prevents_missing_consumer(self): - # setup - test_data = dict(TaskRelationBlueprint_test_data()) - test_data['consumer'] = None - - # assert - with self.assertRaises(IntegrityError): - models.TaskRelationBlueprint.objects.create(**test_data) - - def test_TaskRelationBlueprint_prevents_missing_input(self): - # setup - test_data = dict(TaskRelationBlueprint_test_data()) - test_data['input'] = None - - # assert - with self.assertRaises(IntegrityError): - models.TaskRelationBlueprint.objects.create(**test_data) - - def test_TaskRelationBlueprint_prevents_missing_output(self): - # setup - test_data = dict(TaskRelationBlueprint_test_data()) - test_data['output'] = None - - # assert - with self.assertRaises(IntegrityError): - models.TaskRelationBlueprint.objects.create(**test_data) - - # TODO: rest API testing should be moved out of this test module. - # def test_nested_TaskRelationBlueprint_are_filtered_according_to_TaskRelationDraft(self): - # trdt = TaskRelationDraftTest() - # trdt.setUp(populate=False) - # - # # setup - # task_relation_draft_1 = models.TaskRelationDraft.objects.create(**trdt.test_data_1) - # task_relation_draft_2 = models.TaskRelationDraft.objects.create(**trdt.test_data_2) - # test_data_1 = dict(self.test_data_1) - # test_data_1['draft'] = task_relation_draft_1 - # task_relation_blueprint_1 = models.TaskRelationBlueprint.objects.create(**test_data_1) - # test_data_2 = dict(self.test_data_2) - # test_data_2['draft'] = task_relation_draft_2 - # task_relation_blueprint_2 = models.TaskRelationBlueprint.objects.create(**test_data_2) - # - # # assert the returned list contains related items - # response = client.get('/task_relation_draft/%s/task_relation_blueprint/' % task_relation_draft_2.id, format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # self.assertEqual(len(response.data['results']), 1) - # assertDataWithUrls(self, response.data['results'][0], test_data_2) - # - # # assert an existing related item is returned - # response = client.get('/task_relation_draft/%s/task_relation_blueprint/%s/' % (task_relation_draft_2.id, task_relation_blueprint_2.id), format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data, test_data_2) - # - # # assert an existing unrelated item is not returned - # response = client.get('/task_relation_draft/%s/task_relation_blueprint/%s/' % (task_relation_draft_2.id, task_relation_blueprint_1.id), format='json', follow=True) - # self.assertEqual(response.status_code, 404) - - # TODO: rest API testing should be moved out of this test module. - # def test_nested_TaskRelationBlueprint_are_filtered_according_to_TaskBlueprint(self): - # tbt = TaskBlueprintTest() - # tbt.setUp(populate=False) - # - # # setup - # task_blueprint_1 = models.TaskBlueprint.objects.create(**tbt.test_data_1) - # task_blueprint_2 = models.TaskBlueprint.objects.create(**tbt.test_data_2) - # test_data_1 = dict(self.test_data_1) - # test_data_1['producer'] = task_blueprint_1 - # task_relation_blueprint_1 = models.TaskRelationBlueprint.objects.create(**test_data_1) - # test_data_2 = dict(self.test_data_2) - # test_data_2['consumer'] = task_blueprint_2 - # task_relation_blueprint_2 = models.TaskRelationBlueprint.objects.create(**test_data_2) - # - # # assert the returned list contains related producer - # response = client.get('/task_blueprint/%s/task_relation_blueprint/' % task_blueprint_1.id, format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # self.assertEqual(len(response.data['results']), 1) - # assertDataWithUrls(self, response.data['results'][0], test_data_1) - # - # # assert the returned list contains related consumer - # response = client.get('/task_blueprint/%s/task_relation_blueprint/' % task_blueprint_2.id, format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # self.assertEqual(len(response.data['results']), 1) - # assertDataWithUrls(self, response.data['results'][0], test_data_2) - # - # # assert an existing related item is returned - # response = client.get('/task_blueprint/%s/task_relation_blueprint/%s/' % (task_blueprint_2.id, task_relation_blueprint_2.id), format='json', follow=True) - # self.assertEqual(response.status_code, 200) - # assertDataWithUrls(self, response.data, test_data_2) - # - # # assert an existing unrelated item is not returned - # response = client.get('/task_blueprint/%s/task_relation_blueprint/%s/' % (task_blueprint_2.id, task_relation_blueprint_1.id), format='json', follow=True) - # self.assertEqual(response.status_code, 404) - # - -if __name__ == "__main__": - os.environ['TZ'] = 'UTC' - unittest.main() diff --git a/SAS/TMSS/test/t_tmssapp_specification_django.run b/SAS/TMSS/test/t_tmssapp_specification_django.run deleted file mode 100755 index ebb019daef4a56a1b336a787f3afd8ae55f756cc..0000000000000000000000000000000000000000 --- a/SAS/TMSS/test/t_tmssapp_specification_django.run +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash - -# Run the unit test -source python-coverage.sh -python_coverage_test "*tmss*" t_tmssapp_specification_django.py - diff --git a/SAS/TMSS/test/t_tmssapp_specification_django.sh b/SAS/TMSS/test/t_tmssapp_specification_django.sh deleted file mode 100755 index 76ef35dfb40dcd49fe5304cb66ffb18f56f1d895..0000000000000000000000000000000000000000 --- a/SAS/TMSS/test/t_tmssapp_specification_django.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/sh - -./runctest.sh t_tmssapp_specification_django diff --git a/SAS/TMSS/test/t_tmssapp_specification_django_API.py b/SAS/TMSS/test/t_tmssapp_specification_django_API.py new file mode 100755 index 0000000000000000000000000000000000000000..d4301723b0c597e5054c3f7de2dad64db244d8f8 --- /dev/null +++ b/SAS/TMSS/test/t_tmssapp_specification_django_API.py @@ -0,0 +1,623 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2018 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id: $ + +import os +import unittest +from datetime import datetime +import uuid + +import logging +logger = logging.getLogger(__name__) +logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.DEBUG) + +# todo: Tags? -> Decide how to deal with them first. +# todo: Immutability of Blueprints on db level? + +# Do Mandatory setup: +# use setup/teardown magic for tmss test database +# (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_database_unittest_setup module) +from lofar.sas.tmss.test.tmss_database_unittest_setup import * + +from lofar.sas.tmss.test.tmss_test_data_django_models import * + +from django.db.utils import IntegrityError + + +class GeneratorTemplateTest(unittest.TestCase): + def test_GeneratorTemplate_gets_created_with_correct_creation_timestamp(self): + # setup + before = datetime.utcnow() + entry = models.GeneratorTemplate.objects.create(**GeneratorTemplate_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_GeneratorTemplate_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.GeneratorTemplate.objects.create(**GeneratorTemplate_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + +class DefaultGeneratorTemplateTest(unittest.TestCase): + def test_DefaultGeneratorTemplate_prevents_same_name(self): + common_forbidden_name = "my_name" + template = models.GeneratorTemplate.objects.create(**GeneratorTemplate_test_data()) + + test_data_1 = DefaultGeneratorTemplate_test_data(common_forbidden_name, template) + models.DefaultGeneratorTemplate.objects.create(**test_data_1) + + test_data_2 = DefaultGeneratorTemplate_test_data(common_forbidden_name, template) + with self.assertRaises(IntegrityError): + models.DefaultGeneratorTemplate.objects.create(**test_data_2) + + +class SchedulingUnitTemplateTest(unittest.TestCase): + def test_SchedulingUnitTemplate_gets_created_with_correct_creation_timestamp(self): + + # setup + before = datetime.utcnow() + entry = models.SchedulingUnitTemplate.objects.create(**SchedulingUnitTemplate_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_SchedulingUnitTemplate_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.SchedulingUnitTemplate.objects.create(**SchedulingUnitTemplate_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + +class TaskTemplateTest(unittest.TestCase): + def test_TaskTemplate_gets_created_with_correct_creation_timestamp(self): + + # setup + before = datetime.utcnow() + entry = models.TaskTemplate.objects.create(**TaskTemplate_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_TaskTemplate_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.TaskTemplate.objects.create(**TaskTemplate_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + +class WorkRelationSelectionTemplateTest(unittest.TestCase): + def test_WorkRelationSelectionTemplate_gets_created_with_correct_creation_timestamp(self): + + # setup + before = datetime.utcnow() + entry = models.WorkRelationSelectionTemplate.objects.create(**WorkRelationSelectionTemplate_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_WorkRelationSelectionTemplate_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.WorkRelationSelectionTemplate.objects.create(**WorkRelationSelectionTemplate_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + +class TaskConnectorsTest(unittest.TestCase): + + def test_POST_TaskConnectors_prevents_missing_input_of(self): + + # setup + test_data_1 = dict(TaskConnectors_test_data()) + test_data_1['input_of'] = None + + # assert + with self.assertRaises(IntegrityError): + models.TaskConnectors.objects.create(**test_data_1) + + def test_POST_TaskConnectors_prevents_missing_output_of(self): + + # setup + test_data_1 = dict(TaskConnectors_test_data()) + test_data_1['output_of'] = None + + # assert + with self.assertRaises(IntegrityError): + models.TaskConnectors.objects.create(**test_data_1) + + +class CycleTest(unittest.TestCase): + + def test_Cycle_gets_created_with_correct_creation_timestamp(self): + + # setup + before = datetime.utcnow() + entry = models.Cycle.objects.create(**Cycle_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_Cycle_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.Cycle.objects.create(**Cycle_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + +class ProjectTest(unittest.TestCase): + + def test_Project_gets_created_with_correct_creation_timestamp(self): + + # setup + before = datetime.utcnow() + entry = models.Project.objects.create(**Project_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_Project_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.Project.objects.create(**Project_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + +class ProjectQuotaTest(unittest.TestCase): + def test_ProjectQuota_prevents_missing_project(self): + # setup + test_data = dict(ProjectQuota_test_data()) + test_data['project'] = None + + # assert + with self.assertRaises(IntegrityError): + models.ProjectQuota.objects.create(**test_data) + + +class SchedulingSetTest(unittest.TestCase): + + def test_SchedulingSet_gets_created_with_correct_creation_timestamp(self): + + # setup + before = datetime.utcnow() + entry = models.SchedulingSet.objects.create(**SchedulingSet_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_SchedulingSet_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.SchedulingSet.objects.create(**SchedulingSet_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + def test_SchedulingSet_prevents_missing_project(self): + + # setup + test_data = dict(SchedulingSet_test_data()) + test_data['project'] = None + + # assert + with self.assertRaises(IntegrityError): + models.SchedulingSet.objects.create(**test_data) + + +class SchedulingUnitDraftTest(unittest.TestCase): + + def test_SchedulingUnitDraft_gets_created_with_correct_creation_timestamp(self): + + # setup + before = datetime.utcnow() + entry = models.SchedulingUnitDraft.objects.create(**SchedulingUnitDraft_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_SchedulingUnitDraft_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.SchedulingUnitDraft.objects.create(**SchedulingUnitDraft_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + def test_SchedulingUnitDraft_prevents_missing_template(self): + + # setup + test_data = dict(SchedulingUnitDraft_test_data()) + test_data['requirements_template'] = None + + # assert + with self.assertRaises(IntegrityError): + models.SchedulingUnitDraft.objects.create(**test_data) + + def test_SchedulingUnitDraft_prevents_missing_scheduling_set(self): + + # setup + test_data = dict(SchedulingUnitDraft_test_data()) + test_data['scheduling_set'] = None + + # assert + with self.assertRaises(IntegrityError): + models.SchedulingUnitDraft.objects.create(**test_data) + + +class TaskDraftTest(unittest.TestCase): + + def test_TaskDraft_gets_created_with_correct_creation_timestamp(self): + + # setup + before = datetime.utcnow() + entry = models.TaskDraft.objects.create(**TaskDraft_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_TaskDraft_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.TaskDraft.objects.create(**TaskDraft_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + def test_TaskDraft_prevents_missing_template(self): + + # setup + test_data = dict(TaskDraft_test_data()) + test_data['specifications_template'] = None + + # assert + with self.assertRaises(IntegrityError): + models.TaskDraft.objects.create(**test_data) + + def test_TaskDraft_prevents_missing_scheduling_unit_draft(self): + + # setup + test_data = dict(TaskDraft_test_data()) + test_data['scheduling_unit_draft'] = None + + # assert + with self.assertRaises(IntegrityError): + models.TaskDraft.objects.create(**test_data) + + +class TaskRelationDraftTest(unittest.TestCase): + + def test_TaskRelationDraft_gets_created_with_correct_creation_timestamp(self): + + # setup + before = datetime.utcnow() + entry = models.TaskRelationDraft.objects.create(**TaskRelationDraft_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_TaskRelationDraft_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.TaskRelationDraft.objects.create(**TaskRelationDraft_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + + def test_TaskRelationDraft_prevents_missing_template(self): + + # setup + test_data = dict(TaskRelationDraft_test_data()) + test_data['selection_template'] = None + + # assert + with self.assertRaises(IntegrityError): + models.TaskRelationDraft.objects.create(**test_data) + + def test_TaskRelationDraft_prevents_missing_consumer(self): + + # setup + test_data = dict(TaskRelationDraft_test_data()) + test_data['consumer'] = None + + # assert + with self.assertRaises(IntegrityError): + models.TaskRelationDraft.objects.create(**test_data) + + def test_TaskRelationDraft_prevents_missing_producer(self): + + # setup + test_data = dict(TaskRelationDraft_test_data()) + test_data['producer'] = None + + # assert + with self.assertRaises(IntegrityError): + models.TaskRelationDraft.objects.create(**test_data) + + +class SchedulingUnitBlueprintTest(unittest.TestCase): + + def test_SchedulingUnitBlueprint_gets_created_with_correct_creation_timestamp(self): + + # setup + before = datetime.utcnow() + entry = models.SchedulingUnitBlueprint.objects.create(**SchedulingUnitBlueprint_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_SchedulingUnitBlueprint_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.SchedulingUnitBlueprint.objects.create(**SchedulingUnitBlueprint_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + + def test_SchedulingUnitBlueprint_prevents_missing_template(self): + + # setup + test_data = dict(SchedulingUnitBlueprint_test_data()) + test_data['requirements_template'] = None + + # assert + with self.assertRaises(IntegrityError): + models.SchedulingUnitBlueprint.objects.create(**test_data) + + def test_SchedulingUnitBlueprint_prevents_missing_draft(self): + + # setup + test_data = dict(SchedulingUnitBlueprint_test_data()) + test_data['draft'] = None + + # assert + with self.assertRaises(IntegrityError): + models.SchedulingUnitBlueprint.objects.create(**test_data) + + +class TaskBlueprintTest(unittest.TestCase): + + def test_TaskBlueprint_gets_created_with_correct_creation_timestamp(self): + + # setup + before = datetime.utcnow() + entry = models.TaskBlueprint.objects.create(**TaskBlueprint_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_TaskBlueprint_update_timestamp_gets_changed_correctly(self): + + # setup + entry = models.TaskBlueprint.objects.create(**TaskBlueprint_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + def test_TaskBlueprint_prevents_missing_template(self): + + # setup + test_data = dict(TaskBlueprint_test_data()) + test_data['specifications_template'] = None + + # assert + with self.assertRaises(IntegrityError): + models.TaskBlueprint.objects.create(**test_data) + + def test_TaskBlueprint_prevents_missing_draft(self): + + # setup + test_data = dict(TaskBlueprint_test_data()) + test_data['draft'] = None + + # assert + with self.assertRaises(IntegrityError): + models.TaskBlueprint.objects.create(**test_data) + + def test_TaskBlueprint_prevents_missing_scheduling_unit_blueprint(self): + + # setup + test_data = dict(TaskBlueprint_test_data()) + test_data['scheduling_unit_blueprint'] = None + + # assert + with self.assertRaises(IntegrityError): + models.TaskBlueprint.objects.create(**test_data) + + +class TaskRelationBlueprintTest(unittest.TestCase): + + def test_TaskRelationBlueprint_gets_created_with_correct_creation_timestamp(self): + # setup + before = datetime.utcnow() + entry = models.TaskRelationBlueprint.objects.create(**TaskRelationBlueprint_test_data()) + + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.created_at) + self.assertGreater(after, entry.created_at) + + def test_TaskRelationBlueprint_update_timestamp_gets_changed_correctly(self): + # setup + entry = models.TaskRelationBlueprint.objects.create(**TaskRelationBlueprint_test_data()) + before = datetime.utcnow() + entry.save() + after = datetime.utcnow() + + # assert + self.assertLess(before, entry.updated_at) + self.assertGreater(after, entry.updated_at) + + def test_TaskRelationBlueprint_prevents_missing_selection_template(self): + # setup + test_data = dict(TaskRelationBlueprint_test_data()) + test_data['selection_template'] = None + + # assert + with self.assertRaises(IntegrityError): + models.TaskRelationBlueprint.objects.create(**test_data) + + def test_TaskRelationBlueprint_prevents_missing_draft(self): + # setup + test_data = dict(TaskRelationBlueprint_test_data()) + test_data['draft'] = None + + # assert + with self.assertRaises(IntegrityError): + models.TaskRelationBlueprint.objects.create(**test_data) + + def test_TaskRelationBlueprint_prevents_missing_producer(self): + # setup + test_data = dict(TaskRelationBlueprint_test_data()) + test_data['producer'] = None + + # assert + with self.assertRaises(IntegrityError): + models.TaskRelationBlueprint.objects.create(**test_data) + + def test_TaskRelationBlueprint_prevents_missing_consumer(self): + # setup + test_data = dict(TaskRelationBlueprint_test_data()) + test_data['consumer'] = None + + # assert + with self.assertRaises(IntegrityError): + models.TaskRelationBlueprint.objects.create(**test_data) + + def test_TaskRelationBlueprint_prevents_missing_input(self): + # setup + test_data = dict(TaskRelationBlueprint_test_data()) + test_data['input'] = None + + # assert + with self.assertRaises(IntegrityError): + models.TaskRelationBlueprint.objects.create(**test_data) + + def test_TaskRelationBlueprint_prevents_missing_output(self): + # setup + test_data = dict(TaskRelationBlueprint_test_data()) + test_data['output'] = None + + # assert + with self.assertRaises(IntegrityError): + models.TaskRelationBlueprint.objects.create(**test_data) + + +if __name__ == "__main__": + os.environ['TZ'] = 'UTC' + unittest.main() diff --git a/SAS/TMSS/test/t_tmssapp_specification_functional.run b/SAS/TMSS/test/t_tmssapp_specification_django_API.run similarity index 50% rename from SAS/TMSS/test/t_tmssapp_specification_functional.run rename to SAS/TMSS/test/t_tmssapp_specification_django_API.run index 9fcde0fc57f257b1b8ba49b95e6db03e4d848533..2d3b15933824c515ab6b7d331914b3da9935f714 100755 --- a/SAS/TMSS/test/t_tmssapp_specification_functional.run +++ b/SAS/TMSS/test/t_tmssapp_specification_django_API.run @@ -2,5 +2,5 @@ # Run the unit test source python-coverage.sh -python_coverage_test "*tmss*" t_tmssapp_specification_functional.py +python_coverage_test "*tmss*" t_tmssapp_specification_django_API.py diff --git a/SAS/TMSS/test/t_tmssapp_specification_django_API.sh b/SAS/TMSS/test/t_tmssapp_specification_django_API.sh new file mode 100755 index 0000000000000000000000000000000000000000..c737df3fc6bcea4167fca56cf893c06ba76a678a --- /dev/null +++ b/SAS/TMSS/test/t_tmssapp_specification_django_API.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +./runctest.sh t_tmssapp_specification_django_API diff --git a/SAS/TMSS/test/t_tmssapp_specification_functional.sh b/SAS/TMSS/test/t_tmssapp_specification_functional.sh deleted file mode 100755 index 6a17726700b1c0593ee3b9c38fa3f207a61c0ddf..0000000000000000000000000000000000000000 --- a/SAS/TMSS/test/t_tmssapp_specification_functional.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh - -# Run Test -./runctest.sh t_tmssapp_specification_functional - diff --git a/SAS/TMSS/test/t_tmssapp_specification_permissions.py b/SAS/TMSS/test/t_tmssapp_specification_permissions.py old mode 100644 new mode 100755 diff --git a/SAS/TMSS/test/test_utils.py b/SAS/TMSS/test/test_utils.py index c1204e0eebd54258df3784e8c7375d8bc423b22f..34e44c6f384073421611136ed7c6d8e7c24b39c2 100644 --- a/SAS/TMSS/test/test_utils.py +++ b/SAS/TMSS/test/test_utils.py @@ -31,6 +31,7 @@ from lofar.common.testing.postgres import PostgresTestMixin, PostgresTestDatabas from lofar.common.dbcredentials import Credentials, DBCredentials from lofar.common.util import find_free_port, waitForInterrupt from lofar.sas.tmss.test.ldap_test_service import TestLDAPServer +from lofar.sas.tmss.tmss.exceptions import TMSSException def assertDataWithUrls(self, data, expected): """ @@ -44,7 +45,9 @@ def assertDataWithUrls(self, data, expected): if isinstance(v, models.Model): v = str(v.pk) v = v.replace(' ', '%20') - self.assertTrue(str(v) in data[k]) + err_msg = "The value '%s' (key is %s) is not in expected %s" % (str(v), str(data[k]), k) + self.assertTrue(str(v) in data[k], err_msg) + else: self.assertEqual(v, data[k]) @@ -90,10 +93,12 @@ class TMSSTestDatabaseInstance(PostgresTestDatabaseInstance): django.setup() django.core.management.call_command('migrate') - self._server_process = Process(target=_migrate_helper, daemon=True) - self._server_process.start() - self._server_process.join() + migrate_process = Process(target=_migrate_helper, daemon=True) + migrate_process.start() + migrate_process.join() + if migrate_process.exitcode != 0: + raise TMSSException("Could not initialize TMSS database with django migrations") class TMSSPostgresTestMixin(PostgresTestMixin): @@ -108,16 +113,17 @@ class TMSSPostgresTestMixin(PostgresTestMixin): class TMSSDjangoServerInstance(): ''' Creates a running django TMSS server at the requested port with the requested database credentials. ''' - def __init__(self, db_dbcreds_id: str="TMSS", ldap_dbcreds_id: str="TMSS_LDAP", port: int=8000): + def __init__(self, db_dbcreds_id: str="TMSS", ldap_dbcreds_id: str="TMSS_LDAP", host: str='127.0.0.1', port: int=8000): self._db_dbcreds_id = db_dbcreds_id self._ldap_dbcreds_id = ldap_dbcreds_id + self.host = host self.port = port self._server_process = None @property def address(self): ''':returns the address and port of the django server''' - return "127.0.0.1:%d" % self.port + return "%s:%d" % (self.host, self.port) @property def url(self): @@ -235,11 +241,12 @@ class TMSSDjangoServerInstance(): class TMSSTestEnvironment: '''Create and run a test django TMSS server against a newly created test database and a test ldap server (and cleanup automagically)''' - def __init__(self, preferred_django_port: int=8000): + def __init__(self, host: str='127.0.0.1', preferred_django_port: int=8000): self.ldap_server = TestLDAPServer(user='test', password='test') self.database = TMSSTestDatabaseInstance() self.django_server = TMSSDjangoServerInstance(db_dbcreds_id=self.database.dbcreds_id, ldap_dbcreds_id=self.ldap_server.dbcreds_id, + host=host, port=find_free_port(preferred_django_port)) def start(self): @@ -305,13 +312,15 @@ def main_test_environment(): parser = OptionParser('%prog [options]', description='setup/run/teardown a full TMSS test environment including a fresh and isolated database, LDAP server and DJANGO REST server.') + parser.add_option("-H", "--host", dest="host", type="string", default='127.0.0.1', + help="expose the TMSS Django REST API via this host. [default=%default]") parser.add_option("-p", "--port", dest="port", type="int", default=find_free_port(8000), help="try to use this port for the DJANGO REST API. If not available, then a random free port is used and logged. [default=%default]") (options, args) = parser.parse_args() logging.basicConfig(format = '%(asctime)s %(levelname)s %(message)s', level = logging.INFO) - with TMSSTestEnvironment(preferred_django_port=options.port) as instance: + with TMSSTestEnvironment(host=options.host, preferred_django_port=options.port) as instance: # print some nice info for the user to use the test servers... # use print instead of log for clean lines. for h in logging.root.handlers: diff --git a/SAS/TMSS/test/tmss_database_unittest_setup.py b/SAS/TMSS/test/tmss_database_unittest_setup.py index 1819ee491a3a9119f1aa99a77736f960a54583bc..adc9193ff72ab130c86629968b9d605c5170555e 100644 --- a/SAS/TMSS/test/tmss_database_unittest_setup.py +++ b/SAS/TMSS/test/tmss_database_unittest_setup.py @@ -28,13 +28,16 @@ which is automatically destroyed at the end of the unittest session. # Setup step 1: from lofar.sas.tmss.test.test_utils import TMSSTestDatabaseInstance tmss_test_db_instance = TMSSTestDatabaseInstance() -tmss_test_db_instance.create() +try: + tmss_test_db_instance.create() +except: + tmss_test_db_instance.destroy() + exit(1) # tell unittest to stop (and automagically cleanup) the test database once all testing is done. def tearDownModule(): tmss_test_db_instance.destroy() - # Setup step 2: # now it's safe to import django modules, and mandatory to set env DJANGO_SETTINGS_MODULE and do django.setup() import django diff --git a/SAS/TMSS/test/tmss_test_data_django_models.py b/SAS/TMSS/test/tmss_test_data_django_models.py index adf787e0a9608cf60c2bf4981ef4203f23f61d74..0473b8c7b18a101acfb3c973adb66f55a9e263eb 100644 --- a/SAS/TMSS/test/tmss_test_data_django_models.py +++ b/SAS/TMSS/test/tmss_test_data_django_models.py @@ -32,6 +32,7 @@ from lofar.common.json_utils import get_default_json_object_for_schema from datetime import datetime import uuid +import json def GeneratorTemplate_test_data(name="my_GeneratorTemplate") -> dict: return {"name": name, @@ -46,23 +47,23 @@ def DefaultGeneratorTemplate_test_data(name=None, template=None) -> dict: 'template': template, 'tags':[]} -def SchedulingUnitTemplate_test_data() -> dict: - return {"name": "My SchedulingUnitTemplate", +def SchedulingUnitTemplate_test_data(name="my_SchedulingUnitTemplate") -> dict: + return {"name": name, "description": 'My SchedulingUnitTemplate description', "version": 'v0.314159265359', "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} -def TaskTemplate_test_data() -> dict: +def TaskTemplate_test_data(name="my TaskTemplate") -> dict: return {"validation_code_js":"", - "name": "my TaskTemplate", + "name": name, "description": 'My TaskTemplate description', "version": 'v0.314159265359', "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} -def WorkRelationSelectionTemplate_test_data() -> dict: - return {"name": "my WorkRelationSelectionTemplate", +def WorkRelationSelectionTemplate_test_data(name="my_WorkRelationSelectionTemplate") -> dict: + return {"name": name, "description": 'My WorkRelationSelectionTemplate description', "version": 'v0.314159265359', "schema": {"mykey": "my value"}, @@ -79,8 +80,8 @@ def Cycle_test_data() -> dict: return {"name": 'my_cycle' + str(uuid.uuid4()), "description": "", "tags": [], - "start": datetime.utcnow(), - "stop": datetime.utcnow(), + "start": datetime.utcnow().isoformat(), + "stop": datetime.utcnow().isoformat(), "number": 1, "standard_hours": 2, "expert_hours": 3, @@ -97,8 +98,30 @@ def Project_test_data() -> dict: "expert": True, "filler": False} -def SchedulingSet_test_data() -> dict: - return {"name": 'my_scheduling_set', +def ResourceUnit_test_data() -> dict: + return { + "tags": [], + "description": 'my description ' + str(uuid.uuid4()), + "name": 'my_resource_unit_' + str(uuid.uuid4()), + } + +def ResourceType_test_data() -> dict: + return { + "tags": [], + "description": 'my description ' + str(uuid.uuid4()), + "resource_unit": models.ResourceUnit.objects.create(**ResourceUnit_test_data()), + "name": 'my_resource_type_' + str(uuid.uuid4()), + } + +def ProjectQuota_test_data() -> dict: + return { + "value": '1000', + "project": models.Project.objects.create(**Project_test_data()), + "resource_type": models.ResourceType.objects.create(**ResourceType_test_data()) + } + +def SchedulingSet_test_data(name="my_scheduling_set") -> dict: + return {"name": name, "description": "", "tags": [], "generator_doc": "{}", @@ -106,16 +129,16 @@ def SchedulingSet_test_data() -> dict: "generator_template": models.GeneratorTemplate.objects.create(**GeneratorTemplate_test_data()), "generator_source": None} -def SchedulingUnitDraft_test_data() -> dict: - return {"name": 'my_scheduling_unit_draft', - "description": "", - "tags": [], - "requirements_doc": "{}", - "copy_reason": models.CopyReason.objects.get(value='template'), - "generator_instance_doc": "para", - "copies": None, - "scheduling_set": models.SchedulingSet.objects.create(**SchedulingSet_test_data()), - "requirements_template": models.SchedulingUnitTemplate.objects.create(**SchedulingUnitTemplate_test_data())} +def SchedulingUnitDraft_test_data(name="my_scheduling_unit_draft") -> dict: + return {"name": name, + "description": "", + "tags": [], + "requirements_doc": "{}", + "copy_reason": models.CopyReason.objects.get(value='template'), + "generator_instance_doc": "para", + "copies": None, + "scheduling_set": models.SchedulingSet.objects.create(**SchedulingSet_test_data()), + "requirements_template": models.SchedulingUnitTemplate.objects.create(**SchedulingUnitTemplate_test_data())} def TaskDraft_test_data(name: str="my_task_draft", specifications_template: models.TaskTemplate=None) -> dict: if specifications_template is None: @@ -140,8 +163,8 @@ def TaskRelationDraft_test_data() -> dict: "output": models.TaskConnectors.objects.create(**TaskConnectors_test_data()), "selection_template": models.WorkRelationSelectionTemplate.objects.create(**WorkRelationSelectionTemplate_test_data())} -def SchedulingUnitBlueprint_test_data() -> dict: - return {"name": 'my_scheduling_unit_blueprint', +def SchedulingUnitBlueprint_test_data(name='my_scheduling_unit_blueprint') -> dict: + return {"name": name, "description": "", "tags": [], "requirements_doc": "{}", @@ -149,14 +172,14 @@ def SchedulingUnitBlueprint_test_data() -> dict: "draft": models.SchedulingUnitDraft.objects.create(**SchedulingUnitDraft_test_data()), "requirements_template": models.SchedulingUnitTemplate.objects.create(**SchedulingUnitTemplate_test_data())} -def TaskBlueprint_test_data(task_draft: models.TaskDraft = None) -> dict: +def TaskBlueprint_test_data(name='my_task_blueprint', task_draft: models.TaskDraft = None) -> dict: if task_draft is None: task_draft = models.TaskDraft.objects.create(**TaskDraft_test_data()) - return {"name": 'my_task_blueprint', + return {"name": name, "description": "", "tags": [], - "specifications_doc": str(task_draft.specifications_doc), + "specifications_doc": task_draft.specifications_doc if isinstance(task_draft.specifications_doc, str) else json.dumps(task_draft.specifications_doc), "do_cancel": False, "draft": task_draft, "specifications_template": task_draft.specifications_template, @@ -223,7 +246,7 @@ def SubtaskInput_test_data() -> dict: "tags":[]} def Subtask_test_data(task_blueprint: models.TaskBlueprint=None, subtask_template: models.SubtaskTemplate=None, - specifications_doc: str=None, start_time=None, stop_time=None, cluster_object=None) -> dict: + specifications_doc: str=None, start_time=None, stop_time=None, cluster=None) -> dict: if task_blueprint is None: task_blueprint = models.TaskBlueprint.objects.create(**TaskBlueprint_test_data()) @@ -235,13 +258,13 @@ def Subtask_test_data(task_blueprint: models.TaskBlueprint=None, subtask_templat specifications_doc = get_default_json_object_for_schema(subtask_template.schema) if start_time is None: - start_time = datetime.utcnow() + start_time = datetime.utcnow().isoformat() if stop_time is None: - stop_time = datetime.utcnow() + stop_time = datetime.utcnow().isoformat() - if cluster_object is None: - cluster_object = models.Cluster.objects.create(name="dummy cluster", location="downstairs", tags=[]) + if cluster is None: + cluster = models.Cluster.objects.create(name="dummy cluster", location="downstairs", tags=[]) return { "start_time": start_time, "stop_time": stop_time, @@ -250,26 +273,32 @@ def Subtask_test_data(task_blueprint: models.TaskBlueprint=None, subtask_templat "task_blueprint": task_blueprint, "specifications_template": subtask_template, "tags": ["TMSS", "TESTING"], - "do_cancel": datetime.utcnow(), + "do_cancel": datetime.utcnow().isoformat(), "priority": 1, "schedule_method": models.ScheduleMethod.objects.get(value='manual'), - "cluster": cluster_object, + "cluster": cluster, "scheduler_input_doc": "{}"} -def Dataproduct_test_data(producer: models.SubtaskOutput=None, filename: str="my_file.ext", directory: str="/tmp/my/dir/") -> dict: +def Dataproduct_test_data(producer: models.SubtaskOutput=None, + filename: str="my_file.ext", + directory: str="CEP4:/data/test-projects", + dataformat: models.Dataformat=None) -> dict: if producer is None: producer = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data()) + if dataformat is None: + dataformat = models.Dataformat.objects.get(value="MeasurementSet") + return {"filename": filename, "directory": directory, - "dataformat": models.Dataformat.objects.get(value='Beamformed'), - "deleted_since": datetime.utcnow(), - "pinned_since": datetime.utcnow(), + "dataformat": dataformat, + "deleted_since": None, + "pinned_since": None, "specifications_doc": "{}", "specifications_template": models.DataproductSpecificationsTemplate.objects.create(**DataproductSpecificationsTemplate_test_data()), "tags": ["TMSS", "TESTING"], "producer": producer, - "do_cancel": datetime.utcnow(), + "do_cancel": None, "expected_size": 1234, "size": 123, "feedback_doc": "{}", @@ -316,8 +345,8 @@ def Cluster_test_data(name="default cluster") -> dict: def DataproductArchiveInfo_test_data() -> dict: return {"dataproduct": models.Dataproduct.objects.create(**Dataproduct_test_data()), "storage_ticket": "myticket_1", - "public_since": datetime.utcnow(), - "corrupted_since": datetime.utcnow(), + "public_since": datetime.utcnow().isoformat(), + "corrupted_since": datetime.utcnow().isoformat(), "tags": ['tmss', 'testing']} def DataproductHash_test_data() -> dict: diff --git a/SAS/TMSS/test/tmss_test_data_rest.py b/SAS/TMSS/test/tmss_test_data_rest.py index 60cb443aa880c4a6802688b008deb621d6c1d2ee..656a2f5cd70cf451241ab4fb0c93af52ba4600cb 100644 --- a/SAS/TMSS/test/tmss_test_data_rest.py +++ b/SAS/TMSS/test/tmss_test_data_rest.py @@ -38,7 +38,7 @@ class TMSSRESTTestDataCreator(): def post_data_and_get_url(self, data, url_postfix): """POST the given data the self.django_api_url+url_postfix, and return the response's url""" return json.loads(self.post_data_and_get_response(data, url_postfix).content.decode('utf-8'))['url'] - + ####################################################### # the methods below can be used to create test data # naming convention is: <django_model_name>() @@ -110,10 +110,42 @@ class TMSSRESTTestDataCreator(): return {"name": 'my_project_' + str(uuid.uuid4()), "description": description, "tags": [], + "project_quota": [], "priority": 1, "can_trigger": False, "private_data": True} + + def ResourceUnit(self): + return { + "tags": [], + "description": 'my description ' + str(uuid.uuid4()), + "name": 'my_resource_unit_' + str(uuid.uuid4()) + } + + def ResourceType(self, description="my resource_type description", resource_url=None): + if resource_url is None: + resource_url = self.post_data_and_get_url(self.ResourceUnit(), '/resource_unit/') + return { + "tags": [], + "description": description, + "resource_unit": resource_url, + "name": 'my_resource_type_' + str(uuid.uuid4()) + } + + def ProjectQuota(self, description="my project quota description", project_url=None, resource_url=None): + if project_url is None: + project_url = self.post_data_and_get_url(self.Project(), '/project/') + if resource_url is None: + resource_url = self.post_data_and_get_url(self.ResourceType(), '/resource_type/') + + return { + "value": 1000, + "project": project_url, + "resource_type": resource_url + } + + def SchedulingSet(self, name="my_scheduling_set", project_url=None, generator_template_url=None): if project_url is None: project_url = self.post_data_and_get_url(self.Project(), '/project/') diff --git a/SAS/TMSS/test/tmss_test_environment_unittest_setup.py b/SAS/TMSS/test/tmss_test_environment_unittest_setup.py index e52f8ccf97cbebf10d40373f937949ecc499523e..269771a1b2e3df2111cb007d4a72b9708d2d254a 100644 --- a/SAS/TMSS/test/tmss_test_environment_unittest_setup.py +++ b/SAS/TMSS/test/tmss_test_environment_unittest_setup.py @@ -22,19 +22,24 @@ By importing this helper module in your unittest module you get a TMSSTestDataba which is automatically destroyed at the end of the unittest session. ''' +import logging +logger = logging.getLogger(__name__) + # before we import any django modules the DJANGO_SETTINGS_MODULE, TMSS_LDAPCREDENTIALS and TMSS_DBCREDENTIALS need to be known/set. # import and start an isolated TMSSTestEnvironment (with fresh database and attached django and ldap server on free ports) # this automagically sets the required DJANGO_SETTINGS_MODULE, TMSS_LDAPCREDENTIALS and TMSS_DBCREDENTIALS envvars. from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment tmss_test_env = TMSSTestEnvironment() -tmss_test_env.start() +try: + tmss_test_env.start() +except: + tmss_test_env.stop() + exit(1) # tell unittest to stop (and automagically cleanup) the test database once all testing is done. def tearDownModule(): tmss_test_env.stop() - - ################################################################################################ # the methods below can be used to to HTTP REST calls to the django server and check the results ################################################################################################ @@ -44,6 +49,9 @@ import requests AUTH = requests.auth.HTTPBasicAuth(tmss_test_env.ldap_server.dbcreds.user, tmss_test_env.ldap_server.dbcreds.password) BASE_URL = tmss_test_env.django_server.url OIDC_URL = tmss_test_env.django_server.oidc_url +from lofar.sas.tmss.test.test_utils import assertDataWithUrls +import lofar.sas.tmss.tmss.settings as TMSS_SETTINGS + def _call_API_and_assert_expected_response(test_instance, url, call, data, expected_code, expected_content): """ @@ -64,18 +72,24 @@ def _call_API_and_assert_expected_response(test_instance, url, call, data, expec raise ValueError("The provided call '%s' is not a valid API method choice" % call) if response.status_code != expected_code: - print("!!! Unexpected: [%s] - %s %s: %s" % (test_instance.id(), call, url, response.content.decode('utf-8').strip())) + logger.error("!!! Unexpected: [%s] - %s %s: %s", test_instance.id(), call, url, response.content.decode('utf-8').strip()) test_instance.assertEqual(response.status_code, expected_code) content = response.content.decode('utf-8') - if response.status_code in range(200, 300): + from django.db import models + + if response.status_code in range(200, 300) and expected_content is not None: r_dict = json.loads(content) for key, value in expected_content.items(): if key not in r_dict.keys(): - print('!!! Missing key: %s in %s' % (key, r_dict.keys())) + logger.error('!!! Missing key: %s in %s', key, r_dict.keys()) test_instance.assertTrue(key in r_dict.keys()) - if type(value) is list: + if isinstance(value, models.Model): + value = str(value.pk) + value = value.replace(' ', '%20') + test_instance.assertTrue(str(value) in r_dict[key]) + elif type(value) is list: test_instance.assertEqual(sorted(value), sorted(r_dict[key])) # compare lists independent of ordering else: test_instance.assertEqual(value, r_dict[key]) @@ -91,36 +105,75 @@ def PUT_and_assert_expected_response(test_instance, url, data, expected_code, ex """ PUT data on url and assert the expected code is returned and the expected content is in the response content """ - r_dict = _call_API_and_assert_expected_response(test_instance, url, 'PUT', data, expected_code, expected_content) return r_dict + def POST_and_assert_expected_response(test_instance, url, data, expected_code, expected_content): """ POST data on url and assert the expected code is returned and the expected content is in the response content :return: response dict """ - r_dict = _call_API_and_assert_expected_response(test_instance, url, 'POST', data, expected_code, expected_content) return r_dict -def GET_and_assert_expected_response(test_instance, url, expected_code, expected_content): + +def GET_and_assert_equal_expected_code(test_instance, url, expected_code): """ GET from url and assert the expected code is returned and the expected content is in the response content """ + r_dict = _call_API_and_assert_expected_response(test_instance, url, 'GET', {}, expected_code, None) + return r_dict + - r_dict = _call_API_and_assert_expected_response(test_instance, url, 'GET', {}, expected_code, expected_content) +def GET_and_assert_in_expected_response_result_list(test_instance, url, expected_content, expected_nbr_results): + """ + GET from url and assert the expected code is returned and the expected content is in the response content + Use this check when multiple results (list) are returned + """ + r_dict = _call_API_and_assert_expected_response(test_instance, url, 'GET', {}, 200, None) + page_size = TMSS_SETTINGS.REST_FRAMEWORK.get('PAGE_SIZE') + if page_size is not None and expected_nbr_results > page_size: + logger.warning("Limited result length due to pagination setting (%d)", page_size) + test_instance.assertEqual(page_size, len(r_dict["results"])) + test_instance.assertEqual(page_size, len(r_dict["results"])) + test_instance.assertEqual(expected_nbr_results, r_dict["count"]) + test_instance.assertNotEqual(None, r_dict['next']) + url_check = False + else: + test_instance.assertEqual(expected_nbr_results, len(r_dict["results"])) + test_instance.assertEqual(r_dict["count"], len(r_dict["results"])) + test_instance.assertEqual(None, r_dict['next']) + url_check = True + + for item in r_dict["results"]: + for key in expected_content.keys(): + test_instance.assertIn(key, item.keys()) + + if url_check: + assertDataWithUrls(test_instance, r_dict['results'][expected_nbr_results-1], expected_content) + return r_dict + + +def GET_OK_and_assert_equal_expected_response(test_instance, url, expected_content): + """ + GET from url and assert the expected code is returned and the expected content is equal the response content + assertDataWithUrls is already checked in _call_API_and_assert_expected_response + """ + r_dict = _call_API_and_assert_expected_response(test_instance, url, 'GET', {}, 200, expected_content) + # assertDataWithUrls(test_instance, r_dict, expected_content) return r_dict + def PATCH_and_assert_expected_response(test_instance, url, data, expected_code, expected_content): """ POST data on url and assert the provided values have changed based on the server response. :return: url for new item """ - r_dict = _call_API_and_assert_expected_response(test_instance, url, 'PATCH', data, expected_code, expected_content) return r_dict + def DELETE_and_assert_gone(test_instance, url): """ DELETE item at provided url and assert that the request was accepted by the server @@ -128,10 +181,13 @@ def DELETE_and_assert_gone(test_instance, url): """ response = requests.delete(url, auth=AUTH) if response.status_code != 204: - print("!!! Unexpected: [%s] - %s %s: %s" % (test_instance.id(), 'DELETE', url, response.content)) + logger.error("!!! Unexpected: [%s] - %s %s: %s", test_instance.id(), 'DELETE', url, response.content) test_instance.assertEqual(response.status_code, 204) response = requests.get(url, auth=AUTH) if response.status_code != 404: - print("!!! Unexpected: [%s] - %s %s: %s" % (test_instance.id(), 'GET', url, response.content)) + logger.error("!!! Unexpected: [%s] - %s %s: %s", test_instance.id(), 'GET', url, response.content) test_instance.assertEqual(response.status_code, 404) + + +