diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 21a29aee9711c54f3acdc7a4354100ae2245f8f8..cb877ffb4ad03e1af55e7ca0edb239a22854c594 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,4 +1,5 @@ stages: + - prepare-base - prepare - build - unit_test @@ -7,26 +8,30 @@ stages: - deploy # -# PREPARE STAGE +# PREPARE BASE STAGE # +prepare_ci_base_docker_image: + stage: prepare-base + script: + - docker build -t ci_base:$CI_COMMIT_SHORT_SHA -f Docker/lofar-ci/Dockerfile_ci_base . +# +# PREPARE STAGE +# prepare_ci_sas_docker_image: stage: prepare script: - - docker build -t ci_base -f Docker/lofar-ci/Dockerfile_ci_base . - - docker build -t ci_sas -f Docker/lofar-ci/Dockerfile_ci_sas . + - docker build --build-arg BASE_VERSION=$CI_COMMIT_SHORT_SHA -t ci_sas:$CI_COMMIT_SHORT_SHA -f Docker/lofar-ci/Dockerfile_ci_sas . prepare_ci_lta_docker_image: stage: prepare script: - - docker build -t ci_base -f Docker/lofar-ci/Dockerfile_ci_base . - - docker build -t ci_lta -f Docker/lofar-ci/Dockerfile_ci_lta . + - docker build --build-arg BASE_VERSION=$CI_COMMIT_SHORT_SHA -t ci_lta:$CI_COMMIT_SHORT_SHA -f Docker/lofar-ci/Dockerfile_ci_lta . prepare_ci_mac_docker_image: stage: prepare script: - - docker build -t ci_base -f Docker/lofar-ci/Dockerfile_ci_base . - - docker build -t ci_mac -f Docker/lofar-ci/Dockerfile_ci_mac . + - docker build --build-arg BASE_VERSION=$CI_COMMIT_SHORT_SHA -t ci_mac:$CI_COMMIT_SHORT_SHA -f Docker/lofar-ci/Dockerfile_ci_mac . # # BUILD STAGE @@ -34,7 +39,7 @@ prepare_ci_mac_docker_image: build_TMSS: stage: build - image: ci_sas:latest + image: ci_sas:$CI_COMMIT_SHORT_SHA script: - PACKAGE=TMSS - echo "Building $PACKAGE..." @@ -52,7 +57,7 @@ build_TMSS: build_RAServices: stage: build - image: ci_sas:latest + image: ci_sas:$CI_COMMIT_SHORT_SHA script: - PACKAGE=RAServices - echo "Building $PACKAGE..." @@ -70,7 +75,7 @@ build_RAServices: build_LTAIngest: stage: build - image: ci_lta:latest + image: ci_lta:$CI_COMMIT_SHORT_SHA script: - PACKAGE=LTAIngest - echo "Building $PACKAGE..." @@ -88,15 +93,19 @@ build_LTAIngest: build_MCU_MAC: stage: build - image: ci_mac:latest + image: ci_mac:$CI_COMMIT_SHORT_SHA script: - - PACKAGE=MainCU + - PACKAGE=MCU_MAC - echo "Building $PACKAGE..." - mkdir -p build/gnucxx11_opt - cd build/gnucxx11_opt - - cmake -DBUILD_PACKAGES=$PACKAGE -DUSE_LOG4CPLUS=false -DWINCC_ROOT_DIR=/opt/WinCC_OA/3.14/ -DBLITZ_ROOT_DIR=/opt/blitz/ ../.. + - cmake -DBUILD_PACKAGES=$PACKAGE -DWINCC_ROOT_DIR=/opt/WinCC_OA/3.16/ -DBLITZ_ROOT_DIR=/opt/blitz/ -DCASACORE_ROOT_DIR=/opt/casacore/ -DCMAKE_INSTALL_PREFIX=/opt/lofar ../.. - make -j 12 - - make install + - make DESTDIR=${CI_BUILDS_DIR}/install install + - cd ${CI_BUILDS_DIR}/install/opt/lofar + - tar --ignore-failed-read --exclude=include -czf MCU_MAC_${CI_COMMIT_REF_NAME}-${CI_COMMIT_SHORT_SHA}.ztar * + - curl --insecure --upload-file MCU_MAC_${CI_COMMIT_REF_NAME}-${CI_COMMIT_SHORT_SHA}.ztar -u upload:upload https://support.astron.nl/nexus/content/repositories/branches/nl/astron/lofar/${CI_COMMIT_REF_NAME}-${CI_COMMIT_SHORT_SHA}/MCU_MAC_${CI_COMMIT_REF_NAME}-${CI_COMMIT_SHORT_SHA}.x86_64.ztar + dependencies: - prepare_ci_mac_docker_image artifacts: @@ -110,7 +119,7 @@ build_MCU_MAC: unit_test_TMSS: stage: unit_test - image: ci_sas:latest + image: ci_sas:$CI_COMMIT_SHORT_SHA script: - PACKAGE=TMSS - echo "Testing $PACKAGE..." @@ -126,7 +135,7 @@ unit_test_TMSS: unit_test_RAServices: stage: unit_test - image: ci_sas:latest + image: ci_sas:$CI_COMMIT_SHORT_SHA script: - PACKAGE=RAServices - echo "Testing $PACKAGE..." @@ -148,7 +157,7 @@ unit_test_RAServices: unit_test_LTAIngest: stage: unit_test - image: ci_lta:latest + image: ci_lta:$CI_COMMIT_SHORT_SHA script: - PACKAGE=LTAIngest - echo "Testing $PACKAGE..." @@ -170,9 +179,10 @@ unit_test_LTAIngest: unit_test_MCU_MAC: stage: unit_test - image: ci_mac:latest + image: ci_mac:$CI_COMMIT_SHORT_SHA script: - - PACKAGE=MainCu + - echo "Starting Qpid server..." && qpidd & + - PACKAGE=MCU_MAC - echo "Testing $PACKAGE..." - cd build/gnucxx11_opt - SKIP_INTEGRATION_TESTS=true ctest @@ -200,7 +210,7 @@ dockerize_TMSS: script: - cd build/gnucxx11_opt - ls * - - docker build -t tmss_django:$CI_COMMIT_SHORT_SHA -f docker/Dockerfile-tmss . + - docker build --build-arg SAS_VERSION=$CI_COMMIT_SHORT_SHA -t tmss_django:$CI_COMMIT_SHORT_SHA -f docker/Dockerfile-tmss . - cd ../.. - cd SAS/TMSS/test/oidc/docker-test-mozilla-django-oidc - docker build -t tmss_testprovider:$CI_COMMIT_SHORT_SHA -f dockerfiles/oidc_testprovider . @@ -222,7 +232,7 @@ dockerize_TMSS: integration_test_TMSS: stage: integration_test - image: ci_sas:latest + image: ci_sas:$CI_COMMIT_SHORT_SHA script: - PACKAGE=TMSS - echo "Integration Testing $PACKAGE..." @@ -244,7 +254,7 @@ integration_test_TMSS: integration_test_RAServices: stage: integration_test - image: ci_sas:latest + image: ci_sas:$CI_COMMIT_SHORT_SHA services: - rabbitmq:latest variables: @@ -266,7 +276,7 @@ integration_test_RAServices: integration_test_LTAIngest: stage: integration_test - image: ci_lta:latest + image: ci_lta:$CI_COMMIT_SHORT_SHA script: - PACKAGE=LTAIngest - echo "Integration Testing $PACKAGE..." @@ -338,3 +348,19 @@ deploy-tmss-ua: when: manual only: - "master" + +deploy-MCU_MAC-test: + stage: deploy + before_script: + - 'which ssh-agent || ( apt-get update -y && apt-get install openssh-client git -y )' + - eval $(ssh-agent -s) + - echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add - + - mkdir -p ~/.ssh + - chmod 700 ~/.ssh + - ssh-keyscan mcu199.control.lofar >> ~/.ssh/known_hosts + - chmod 644 ~/.ssh/known_hosts + script: + - ssh lofarsys@mcu199.control.lofar "MAC_install -b ${CI_COMMIT_REF_NAME}-${CI_COMMIT_SHORT_SHA} -v ${CI_COMMIT_REF_NAME}-${CI_COMMIT_SHORT_SHA}" + dependencies: + - unit_test_MCU_MAC + when: manual \ No newline at end of file diff --git a/CMake/FindBoost.cmake b/CMake/FindBoost.cmake index 7591d7fd009eda4096be60058155b5ea7baeeee3..ea39e0aecac0fb98b90b8722641dd7c680b0f914 100644 --- a/CMake/FindBoost.cmake +++ b/CMake/FindBoost.cmake @@ -79,6 +79,21 @@ if("${Boost_FIND_COMPONENTS}" MATCHES "python" AND NOT "${Boost_FIND_COMPONENTS} else(PYTHON_FOUND) message(SEND_ERROR "boost-python was requested but python was not found.") endif(PYTHON_FOUND) +else("${Boost_FIND_COMPONENTS}" MATCHES "python" AND NOT "${Boost_FIND_COMPONENTS}" MATCHES "python3") + if("${Boost_FIND_COMPONENTS}" MATCHES "python") + find_package(Python) + if(PYTHON_FOUND) + if(PYTHON_VERSION_MINOR EQUAL 8) + if(EXISTS "/etc/debian_version") + # ubuntu 20.04 comes with python3.8 and boost1.71 + string(REPLACE "python3" "python" Boost_FIND_COMPONENTS "${Boost_FIND_COMPONENTS}") + set(Boost_FIND_REQUIRED_python TRUE) + endif(EXISTS "/etc/debian_version") + endif(PYTHON_VERSION_MINOR EQUAL 8) + else(PYTHON_FOUND) + message(SEND_ERROR "boost-python was requested but python was not found.") + endif(PYTHON_FOUND) + endif("${Boost_FIND_COMPONENTS}" MATCHES "python") endif("${Boost_FIND_COMPONENTS}" MATCHES "python" AND NOT "${Boost_FIND_COMPONENTS}" MATCHES "python3") # Call the "real" FindBoost module. diff --git a/CMake/LofarPackageList.cmake b/CMake/LofarPackageList.cmake index 3e4b94eb3309582b500af2f559b9c5e0537a4056..740c52181b38c49d3b3a57866e5423582e3d096d 100644 --- a/CMake/LofarPackageList.cmake +++ b/CMake/LofarPackageList.cmake @@ -1,7 +1,7 @@ # - Create for each LOFAR package a variable containing the absolute path to # its source directory. # -# Generated by gen_LofarPackageList_cmake.sh at Fr 28. Feb 20:47:32 CET 2020 +# Generated by gen_LofarPackageList_cmake.sh at do 28 mei 2020 11:22:44 CEST # # ---- DO NOT EDIT ---- # @@ -142,7 +142,6 @@ if(NOT DEFINED LOFAR_PACKAGE_LIST_INCLUDED) set(OTDB_Comps_SOURCE_DIR ${CMAKE_SOURCE_DIR}/MAC/Deployment/data/OTDB) set(StaticMetaData_SOURCE_DIR ${CMAKE_SOURCE_DIR}/MAC/Deployment/data/StaticMetaData) set(WinCCPublisher_SOURCE_DIR ${CMAKE_SOURCE_DIR}/MAC/WinCCPublisher) - set(WinCCREST_SOURCE_DIR ${CMAKE_SOURCE_DIR}/MAC/WinCCREST) set(WinCCDBBridge_SOURCE_DIR ${CMAKE_SOURCE_DIR}/MAC/WinCCDBBridge) set(TaskManagement_SOURCE_DIR ${CMAKE_SOURCE_DIR}/MAC/Services/TaskManagement) set(TBB_SOURCE_DIR ${CMAKE_SOURCE_DIR}/MAC/TBB) @@ -207,6 +206,8 @@ if(NOT DEFINED LOFAR_PACKAGE_LIST_INCLUDED) set(RAScripts_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/ResourceAssignment/RAScripts) set(TaskPrescheduler_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/ResourceAssignment/TaskPrescheduler) set(RACommon_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/ResourceAssignment/Common) + set(TMSSClient_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/client) + set(TMSSSubtaskSchedulingService_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/services/subtask_scheduling) set(TriggerEmailServiceCommon_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TriggerEmailService/Common) set(TriggerEmailServiceServer_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TriggerEmailService/Server) set(CCU_MAC_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SubSystems/CCU_MAC) diff --git a/CMake/variants/variants.lcs157 b/CMake/variants/variants.lcs157 index 16996501e47d33d05dd108a53270f29261c09b94..1e70de71e0b9d86b4c0e0f32fbdba10342bd8c1d 100644 --- a/CMake/variants/variants.lcs157 +++ b/CMake/variants/variants.lcs157 @@ -3,7 +3,7 @@ #option(BUILD_SHARED_LIBS "Build shared libraries" OFF) set(WINCC_ROOT_DIR /opt/WinCC_OA/3.16) -Set(CASACORE_ROOT_DIR "/opt/casacore") +set(CASACORE_ROOT_DIR "/opt/casacore") set(CASAREST_ROOT_DIR "/opt/casarest") set(PYRAP_ROOT_DIR "/opt/pyrap") set(AOFLAGGER_ROOT_DIR "/opt/aoflagger/build") diff --git a/COPYING b/COPYING deleted file mode 100644 index 94a9ed024d3859793618152ea559a168bbcbb5e2..0000000000000000000000000000000000000000 --- a/COPYING +++ /dev/null @@ -1,674 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/> - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - <one line to give the program's name and a brief idea of what it does.> - Copyright (C) <year> <name of author> - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see <http://www.gnu.org/licenses/>. - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - <program> Copyright (C) <year> <name of author> - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -<http://www.gnu.org/licenses/>. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -<http://www.gnu.org/philosophy/why-not-lgpl.html>. diff --git a/Docker/lofar-ci/Dockerfile_ci_base b/Docker/lofar-ci/Dockerfile_ci_base index 83b0b77c28794f6932642c44830df1bee076c28d..4cb0a2f0679c63ac011c8b3fcd1cd42a803073d5 100644 --- a/Docker/lofar-ci/Dockerfile_ci_base +++ b/Docker/lofar-ci/Dockerfile_ci_base @@ -1,5 +1,5 @@ # -# This base image is just some beasic dev tools on top of CentOS 7 +# This base image is just some basic dev tools on top of CentOS 7 # # base # @@ -10,5 +10,5 @@ RUN yum -y groupinstall 'Development Tools' && \ yum -y install cmake cmake3 gcc git log4cplus-devel python3 python3-devel python3-pip which wget curl atop valgrind && \ pip3 install kombu requests coverage python-qpid-proton && \ adduser lofarsys && \ - mkdir -p /opt && chown -R lofarsys:lofarsys /opt + mkdir -p /opt/lofar && chown -R lofarsys:lofarsys /opt diff --git a/Docker/lofar-ci/Dockerfile_ci_lta b/Docker/lofar-ci/Dockerfile_ci_lta index e51f33db6cd031e8a37087df8b4c2eebf3a17f9e..766dbb0f95434f4348935220f472faf05c350293 100644 --- a/Docker/lofar-ci/Dockerfile_ci_lta +++ b/Docker/lofar-ci/Dockerfile_ci_lta @@ -3,10 +3,13 @@ # # base # -FROM ci_base:latest +ARG BASE_VERSION=latest +FROM ci_base:$BASE_VERSION RUN echo "Installing packages for LTA..." +RUN yum install -y nmap-ncat + # see https://www.postgresql.org/download/linux/redhat/ on how to install postgresql-server > 9.2 on centos7 RUN yum erase -y postgresql postgresql-server postgresql-devel && \ yum install -y https://download.postgresql.org/pub/repos/yum/reporpms/EL-7-x86_64/pgdg-redhat-repo-latest.noarch.rpm && \ diff --git a/Docker/lofar-ci/Dockerfile_ci_mac b/Docker/lofar-ci/Dockerfile_ci_mac index 5b48b8c395805b8a024730b56377a67e9b04007c..b23bda761611ba93ddb5fa6d9bba1cc05cc40078 100644 --- a/Docker/lofar-ci/Dockerfile_ci_mac +++ b/Docker/lofar-ci/Dockerfile_ci_mac @@ -3,21 +3,39 @@ # # base # -FROM ci_base:latest +# !!! Note: Since Docker apparently does not allow Dockerfile-relative paths in a COPY and Gitlab CI works relative +# !!! to the repo root, this image has to be build with that context, e.g.: +# !!! docker build -f Docker/lofar-ci/Dockerfile_ci_mac -t ci_mac:latest . +# +ARG BASE_VERSION=latest +FROM ci_base:$BASE_VERSION RUN echo "Installing packages for MAC..." && \ yum -y install readline-devel boost-python36-devel hdf5-devel blas-devel lapack-devel cfitsio-devel wcslib-devel autogen postgresql-devel cmake3 libpqxx-devel qpid-cpp-server qpid-cpp-client-devel qpid-tools unittest-cpp-devel jsoncpp-devel jsoncpp libcurl-devel libcurl && \ pip3 install psycopg2 testing.postgresql lxml mock numpy kombu requests python-dateutil fabric +RUN echo "Installing WinCC3.14 build and Demo App from Nexus repo..." && \ + cd /tmp && \ + wget https://support.astron.nl/nexus/content/repositories/snapshots/nl/astron/lofar/wincc/3_16/WinCC_OA_3.16-base-rhel-0-14.x86_64.rpm && \ + wget https://support.astron.nl/nexus/content/repositories/snapshots/nl/astron/lofar/wincc/3_16/WinCC_OA_3.16-specialfunctions-rhel-0-14.x86_64.rpm && \ + wget https://support.astron.nl/nexus/content/repositories/snapshots/nl/astron/lofar/wincc/3_16/WinCC_OA_3.16-applications-rhel-0-14.x86_64.rpm && \ + wget https://support.astron.nl/nexus/content/repositories/snapshots/nl/astron/lofar/wincc/3_16/WinCC_OA_3.16-api-rhel-0-14.x86_64.rpm && \ + yum install -y WinCC_OA_3.16-base-rhel-0-14.x86_64.rpm && \ + yum install -y WinCC_OA_3.16-specialfunctions-rhel-0-14.x86_64.rpm && \ + yum install -y WinCC_OA_3.16-applications-rhel-0-14.x86_64.rpm && \ + yum install -y WinCC_OA_3.16-api-rhel-0-14.x86_64.rpm + +COPY Docker/lofar-ci/pvssInst.conf /etc/opt/pvss/pvssInst.conf + USER lofarsys -#RUN echo "Installing Casacore..." && \ -# git clone https://github.com/casacore/casacore && \ -# mkdir /casacore/build/ && \ -# cd /casacore/build/ && \ -# cmake -DCMAKE_INSTALL_PREFIX=/opt/casacore -DBUILD_PYTHON3=ON -DBUILD_PYTHON=OFF -DPYTHON_EXECUTABLE=/usr/bin/python3 -DUSE_OPENMP=ON -DUSE_FFTW3=TRUE -DUSE_HDF5=ON -DCMAKE_BUILD_TYPE=Release .. && \ -# make -j 8 && \ -# make install +RUN echo "Installing Casacore..." && \ + mkdir -p /opt/3rdparty_sources/ && cd /opt/3rdparty_sources/ && \ + git clone --branch v3.2.0 https://github.com/casacore/casacore.git casacore && \ + mkdir casacore/build/ && cd casacore/build/ && \ + cmake -DCMAKE_INSTALL_PREFIX=/opt/casacore -DBUILD_PYTHON3=ON -DBUILD_PYTHON=OFF -DPYTHON_EXECUTABLE=/usr/bin/python3 -DUSE_OPENMP=ON -DUSE_FFTW3=TRUE -DUSE_HDF5=ON -DCMAKE_BUILD_TYPE=Release .. && \ + make -j 8 && \ + make install RUN echo "Installing Blitz++" && \ mkdir -p /opt/3rdparty_sources/ && cd /opt/3rdparty_sources/ && \ @@ -27,11 +45,7 @@ RUN echo "Installing Blitz++" && \ make -j 8 lib && \ make install -RUN echo "Installing WinCC3.14 from nexus ALTA repo..." && \ - cd /tmp && \ - wget https://support.astron.nl/nexus/content/repositories/snapshots/nl/alta/buildWinCC314api.tar.gz && \ - tar -xvf buildWinCC314api.tar.gz && \ - cd opt && \ - mv WinCC_OA /opt/ +ENV LD_LIBRARY_PATH /opt/blitz/lib64/:$LD_LIBRARY_PATH -ENV LD_LIBRARY_PATH /opt/WinCC_OA/3.14/bin:$LD_LIBRARY_PATH +ENV LD_LIBRARY_PATH /opt/WinCC_OA/3.16/bin:$LD_LIBRARY_PATH +ENV PATH /opt/WinCC_OA/3.14/bin/:$PATH \ No newline at end of file diff --git a/Docker/lofar-ci/Dockerfile_ci_sas b/Docker/lofar-ci/Dockerfile_ci_sas index 35632ec04f065843eec71d62ea63853b5c4d85f1..e49f816720424fb51a0a7c139c166f2622d881c4 100644 --- a/Docker/lofar-ci/Dockerfile_ci_sas +++ b/Docker/lofar-ci/Dockerfile_ci_sas @@ -3,7 +3,8 @@ # # base # -FROM ci_base:latest +ARG BASE_VERSION=latest +FROM ci_base:$BASE_VERSION RUN echo "Installing packages for SAS..." && \ yum install -y log4cplus log4cplus-devel python3 python3-libs python3-devel boost readline-devel boost-devel binutils-devel boost-python36 boost-python36-devel gettext which openldap-devel npm nodejs git java-11-openjdk python-twisted-core diff --git a/Docker/lofar-ci/pvssInst.conf b/Docker/lofar-ci/pvssInst.conf new file mode 100644 index 0000000000000000000000000000000000000000..c923741629ae2ecfe16b4ab5a28b1b16cac4a039 --- /dev/null +++ b/Docker/lofar-ci/pvssInst.conf @@ -0,0 +1,143 @@ +[Software\ETM\PVSS II\3.16] +currentProject = "DemoApplication_3.16" +InstallationDir = "/opt/WinCC_OA/3.16" + +[Software\ETM\PVSS II\Configs\AdvS7base_3.16] +notRunnable = "1" +InstallationDate = "2020.03.11 09:56:57.000" +InstallationUser = "root" +InstallationVersion = "3.16" +proj_path = "/opt/WinCC_OA/3.16/AdvS7base_3.16" +pvss_path = "" +PVSS_II = "/opt/WinCC_OA/3.16/AdvS7base_3.16/config/config" +InstallationDir = "/opt/WinCC_OA/3.16/AdvS7base_3.16" + +[Software\ETM\PVSS II\Configs\AdvS7_3.16] +notRunnable = "1" +InstallationDate = "2020.03.11 09:56:57.000" +InstallationUser = "root" +InstallationVersion = "3.16" +proj_path = "/opt/WinCC_OA/3.16/AdvS7_3.16" +pvss_path = "" +PVSS_II = "/opt/WinCC_OA/3.16/AdvS7_3.16/config/config" +InstallationDir = "/opt/WinCC_OA/3.16/AdvS7_3.16" + +[Software\ETM\PVSS II\Configs\BACnet_3.16] +notRunnable = "1" +InstallationDate = "2020.03.11 09:56:57.000" +InstallationUser = "root" +InstallationVersion = "3.16" +proj_path = "/opt/WinCC_OA/3.16/BACnet_3.16" +pvss_path = "" +PVSS_II = "/opt/WinCC_OA/3.16/BACnet_3.16/config/config" +InstallationDir = "/opt/WinCC_OA/3.16/BACnet_3.16" + +[Software\ETM\PVSS II\Configs\BasicS7base_3.16] +notRunnable = "1" +InstallationDate = "2020.03.11 09:56:57.000" +InstallationUser = "root" +InstallationVersion = "3.16" +proj_path = "/opt/WinCC_OA/3.16/BasicS7base_3.16" +pvss_path = "" +PVSS_II = "/opt/WinCC_OA/3.16/BasicS7base_3.16/config/config" +InstallationDir = "/opt/WinCC_OA/3.16/BasicS7base_3.16" + +[Software\ETM\PVSS II\Configs\BasicS7_3.16] +notRunnable = "1" +InstallationDate = "2020.03.11 09:56:57.000" +InstallationUser = "root" +InstallationVersion = "3.16" +proj_path = "/opt/WinCC_OA/3.16/BasicS7_3.16" +pvss_path = "" +PVSS_II = "/opt/WinCC_OA/3.16/BasicS7_3.16/config/config" +InstallationDir = "/opt/WinCC_OA/3.16/BasicS7_3.16" + +[Software\ETM\PVSS II\Configs\CommCenter_3.16] +notRunnable = "1" +InstallationDate = "2020.03.11 09:56:57.000" +InstallationUser = "root" +InstallationVersion = "3.16" +proj_path = "/opt/WinCC_OA/3.16/CommCenter_3.16" +pvss_path = "" +PVSS_II = "/opt/WinCC_OA/3.16/CommCenter_3.16/config/config" +InstallationDir = "/opt/WinCC_OA/3.16/CommCenter_3.16" + +[Software\ETM\PVSS II\Configs\DBLogger_3.16] +notRunnable = "1" +InstallationDate = "2020.03.11 09:56:57.000" +InstallationUser = "root" +InstallationVersion = "3.16" +proj_path = "/opt/WinCC_OA/3.16/DBLogger_3.16" +pvss_path = "" +PVSS_II = "/opt/WinCC_OA/3.16/DBLogger_3.16/config/config" +InstallationDir = "/opt/WinCC_OA/3.16/DBLogger_3.16" + +[Software\ETM\PVSS II\Configs\Redundancy2x2_3.16] +notRunnable = "1" +InstallationDate = "2020.03.11 09:56:57.000" +InstallationUser = "root" +InstallationVersion = "3.16" +proj_path = "/opt/WinCC_OA/3.16/Redundancy2x2_3.16" +pvss_path = "" +PVSS_II = "/opt/WinCC_OA/3.16/Redundancy2x2_3.16/config/config" +InstallationDir = "/opt/WinCC_OA/3.16/Redundancy2x2_3.16" + +[Software\ETM\PVSS II\Configs\Stdlib_3.16] +notRunnable = "1" +InstallationDate = "2020.03.11 09:56:57.000" +InstallationUser = "root" +InstallationVersion = "3.16" +proj_path = "/opt/WinCC_OA/3.16/Stdlib_3.16" +pvss_path = "" +PVSS_II = "/opt/WinCC_OA/3.16/Stdlib_3.16/config/config" +InstallationDir = "/opt/WinCC_OA/3.16/Stdlib_3.16" + +[Software\ETM\PVSS II\Configs\MobileAppServer_3.16] +notRunnable = "1" +InstallationDate = "2020.03.11 09:56:57.000" +InstallationUser = "root" +InstallationVersion = "3.16" +proj_path = "/opt/WinCC_OA/3.16/MobileAppServer_3.16" +pvss_path = "" +PVSS_II = "/opt/WinCC_OA/3.16/MobileAppServer_3.16/config/config" +InstallationDir = "/opt/WinCC_OA/3.16/MobileAppServer_3.16" + +[Software\ETM\PVSS II\Configs\SmartSCADA_3.16] +notRunnable = "1" +InstallationDate = "2020.03.11 09:56:57.000" +InstallationUser = "root" +InstallationVersion = "3.16" +proj_path = "/opt/WinCC_OA/3.16/SmartSCADA_3.16" +pvss_path = "" +PVSS_II = "/opt/WinCC_OA/3.16/SmartSCADA_3.16/config/config" +InstallationDir = "/opt/WinCC_OA/3.16/SmartSCADA_3.16" + +[Software\ETM\PVSS II\Configs\TestFramework_3.16] +notRunnable = "1" +InstallationDate = "2020.03.11 09:56:57.000" +InstallationUser = "root" +InstallationVersion = "3.16" +proj_path = "/opt/WinCC_OA/3.16/TestFramework_3.16" +pvss_path = "" +PVSS_II = "/opt/WinCC_OA/3.16/TestFramework_3.16/config/config" +InstallationDir = "/opt/WinCC_OA/3.16/TestFramework_3.16" + +[Software\ETM\PVSS II\Configs\DemoApplication_3.16] +notRunnable = "0" +InstallationDate = "2020.03.11 09:57:30.000" +InstallationUser = "root" +InstallationVersion = "3.16" +proj_path = "/opt/WinCC_OA/3.16/DemoApplication_3.16" +pvss_path = "/opt/WinCC_OA/3.16" +PVSS_II = "/opt/WinCC_OA/3.16/DemoApplication_3.16/config/config" +InstallationDir = "/opt/WinCC_OA/3.16/DemoApplication_3.16" + +[Software\ETM\PVSS II\Configs\GettingStarted_3.16] +notRunnable = "0" +InstallationDate = "2020.03.11 09:57:30.000" +InstallationUser = "root" +InstallationVersion = "3.16" +proj_path = "/opt/WinCC_OA/3.16/GettingStarted_3.16" +pvss_path = "/opt/WinCC_OA/3.16" +PVSS_II = "/opt/WinCC_OA/3.16/GettingStarted_3.16/config/config" +InstallationDir = "/opt/WinCC_OA/3.16/GettingStarted_3.16" \ No newline at end of file diff --git a/Docker/lofar-pipeline/Dockerfile.tmpl b/Docker/lofar-pipeline/Dockerfile.tmpl index b2ac19d162750f877580d75b2849f290019a6acb..35c8eb32756979634dddaf5aa5c4e87cdba8dcc3 100644 --- a/Docker/lofar-pipeline/Dockerfile.tmpl +++ b/Docker/lofar-pipeline/Dockerfile.tmpl @@ -5,6 +5,8 @@ # Allow to specify a specific base image version. ARG LOFAR_BASE_IMAGE_VERSION=latest FROM lofar-base:${LOFAR_BASE_IMAGE_VERSION} +# Docker mandates that this has to be done again +ARG LOFAR_BASE_IMAGE_VERSION=latest ENV LOFAR_BASE_IMAGE_VERSION=${LOFAR_BASE_IMAGE_VERSION} # Add build date to the environment diff --git a/LCS/Messaging/python/messaging/messagebus.py b/LCS/Messaging/python/messaging/messagebus.py index c5273956ec6e68066192054c9a9261b00b74188e..65d72b100967e758a65a5c734063a05e7069c64c 100644 --- a/LCS/Messaging/python/messaging/messagebus.py +++ b/LCS/Messaging/python/messaging/messagebus.py @@ -1417,10 +1417,11 @@ class BusListener: sanitized_routing_key = self.routing_key.replace(".#","").replace(".*","").replace("#","").replace("*","") if not sanitized_routing_key: sanitized_routing_key = "all" - return "%s.queue.for.%s.%s.on.%s" % (self.exchange, - program_name(include_extension=False), - self.__class__.__name__, - sanitized_routing_key) + return "%s.queue.for.%s.%s.%s.on.%s" % (self.exchange, + program_name(include_extension=False), + self.__class__.__name__, + self._handler_type.__name__, + sanitized_routing_key) def is_running(self) -> bool: """Is this listener running its background listen/handle loops?""" diff --git a/LCS/PyCommon/CMakeLists.txt b/LCS/PyCommon/CMakeLists.txt index 4f82276090fd9bbcdc898ee21dc203390a238baa..2ab093021552dc5a10dcd660e127dd67f0be96b4 100644 --- a/LCS/PyCommon/CMakeLists.txt +++ b/LCS/PyCommon/CMakeLists.txt @@ -29,6 +29,7 @@ set(_py_files subprocess_utils.py xmlparse.py json_utils.py + locking.py test_utils.py) python_install(${_py_files} DESTINATION lofar/common) diff --git a/LCS/PyCommon/cep4_utils.py b/LCS/PyCommon/cep4_utils.py index 8489ec3e52b410b030ccf8e1d4ed66122a9ab109..5326fd90ce8c351f8858cfd4e71a9843aa33e996 100755 --- a/LCS/PyCommon/cep4_utils.py +++ b/LCS/PyCommon/cep4_utils.py @@ -317,6 +317,13 @@ def get_cep4_node_with_lowest_load(max_normalized_load=0.33, partition: str=SLUR if not nodes: # if not, then just query all up and running nodes nodes = get_cep4_up_and_running_nodes(partition=partition) + if not nodes: # still no nodes? return None + return None + + if len(nodes) == 1: + # no need to sort + return nodes[0] + node_nrs = get_cep4_available_nodes_sorted_ascending_by_load(max_normalized_load=max_normalized_load, min_nr_of_nodes=1, node_nrs=nodes, diff --git a/LCS/PyCommon/json_utils.py b/LCS/PyCommon/json_utils.py index aa98861b84a92ad04ef782723add4023b7a3715a..e23b336d8fbb3bbda6ff477d5c88deec28b2b866 100644 --- a/LCS/PyCommon/json_utils.py +++ b/LCS/PyCommon/json_utils.py @@ -18,8 +18,15 @@ from jsonschema import validators, Draft6Validator from copy import deepcopy + def _extend_with_default(validator_class): - '''see: https://python-jsonschema.readthedocs.io/en/stable/faq/#why-doesn-t-my-schema-s-default-property-set-the-default-on-my-instance''' + """ + Extend the properties validation so that it adds missing properties with their default values (where one is defined + in the schema). + Note: Make sure that items of type object or array in the schema define empty structures as defaults for this to + traverse down and add enclosed properties. + see: <https://python-jsonschema.readthedocs.io/en/stable/faq/#why-doesn-t-my-schema-s-default-property-set-the-default-on-my-instance> + """ validate_properties = validator_class.VALIDATORS["properties"] def set_defaults(validator, properties, instance, schema): @@ -36,7 +43,33 @@ def _extend_with_default(validator_class): ) +def _extend_with_required(validator_class): + """ + Extend the required properties validation so that it adds missing required properties with their default values, + (where one is defined in the schema). + (Note: the check for required properties happens before property validation, so this is required even though the + override in _extend_with_default would as well add the property.) + see: <https://python-jsonschema.readthedocs.io/en/stable/faq/#why-doesn-t-my-schema-s-default-property-set-the-default-on-my-instance> + """ + validate_required = validator_class.VALIDATORS["required"] + + def set_required_properties(validator, properties, instance, schema): + for property in properties: + subschema = schema['properties'][property] + if "default" in subschema: + instance.setdefault(property, subschema["default"]) + for error in validate_required( + validator, properties, instance, schema, + ): + yield error + + return validators.extend( + validator_class, {"required" : set_required_properties}, + ) + +# define a custom validator that fills in properties before validation _DefaultValidatingDraft6Validator = _extend_with_default(Draft6Validator) +_DefaultValidatingDraft6Validator = _extend_with_required(_DefaultValidatingDraft6Validator) def get_default_json_object_for_schema(schema: str) -> dict: '''return a valid json object for the given schema with all properties with their default values''' diff --git a/LCS/PyCommon/locking.py b/LCS/PyCommon/locking.py new file mode 100644 index 0000000000000000000000000000000000000000..de3e0f2810b603e05acc80aa5aed32638e4de248 --- /dev/null +++ b/LCS/PyCommon/locking.py @@ -0,0 +1,247 @@ +''' + Copyright (c) 2016, 2017 Timothy Savannah All Rights Reserved under terms of LGPLv3. + You should have received a copy of this as LICENSE with the source distribution, or it is always available at + http://www.gnu.org/licenses/lgpl-3.0.en.html + + See https://github.com/kata198/NamedAtomicLock for latest version + + NamedAtomicLock - A Named atomic lock local to the machine + +''' +# vim: set ts=4 sw=4 expandtab : + + +import os +import tempfile +import time + + +__all__ = ('NamedAtomicLock',) + +__version__ = '1.1.3' + +__version_tuple__ = (1, 1, 3) + +DEFAULT_POLL_TIME = .1 + +try: + FileNotFoundError +except: + FileNotFoundError = OSError + +class NamedAtomicLock(object): + + def __init__(self, name, lockDir=None, maxLockAge=None): + ''' + NamedAtomicLock - Create a NamedAtomicLock. + This uses a named directory, which is defined by POSIX as an atomic operation. + + @param name <str> - The lock name, Cannot contain directory seperator (like '/') + + @param lockDir <None/str> - Directory in which to store locks. Defaults to tempdir + + @param maxLockAge <None/float> - Maximum number of seconds lock can be held before it is considered "too old" and fair game to be taken. + You should likely define this as a reasonable number, maybe 4x as long as you think the operation will take, so that the lock doesn't get + held by a dead process. + + ''' + self.name = name + self.maxLockAge = maxLockAge + + if os.sep in name: + raise ValueError('Name cannot contain "%s"' %(os.sep,)) + + if lockDir: + if lockDir[-1] == os.sep: + lockDir = lockDir[:-1] + if not lockDir: + raise ValueError('lockDir cannot be ' + os.sep) + else: + lockDir = tempfile.gettempdir() + + self.lockDir = lockDir + + if not os.path.isdir(lockDir): + raise ValueError('lockDir %s either does not exist or is not a directory.' %(lockDir,)) + + if not os.access(lockDir, os.W_OK): + raise ValueError('Cannot write to lock directory: %s' %(lockDir,)) + self.lockPath = lockDir + os.sep + name + + self.held = False + self.acquiredAt = None + + def __enter__(self): + '''acquire the lock''' + try: + self.acquire() + except Exception as e: + self.release() + raise + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + '''release the lock''' + self.release() + + def acquire(self, timeout=None): + ''' + acquire - Acquire given lock. Can be blocking or nonblocking by providing a timeout. + Returns "True" if you got the lock, otherwise "False" + + @param timeout <None/float> - Max number of seconds to wait, or None to block until we can acquire it. + + @return <bool> - True if you got the lock, otherwise False. + ''' + if self.held is True: + # NOTE: Without some type of in-directory marker (like a uuid) we cannot + # refresh an expired lock accurately + if os.path.exists(self.lockPath): + return True + # Someone removed our lock + self.held = False + + # If we aren't going to poll at least 5 times, give us a smaller interval + if timeout: + if timeout / 5.0 < DEFAULT_POLL_TIME: + pollTime = timeout / 10.0 + else: + pollTime = DEFAULT_POLL_TIME + + endTime = time.time() + timeout + keepGoing = lambda : bool(time.time() < endTime) + else: + pollTime = DEFAULT_POLL_TIME + keepGoing = lambda : True + + + + success = False + while keepGoing(): + try: + os.mkdir(self.lockPath) + success = True + break + except: + time.sleep(pollTime) + if self.maxLockAge: + if os.path.exists(self.lockPath) and os.stat(self.lockPath).st_mtime < time.time() - self.maxLockAge: + try: + os.rmdir(self.lockPath) + except: + # If we did not remove the lock, someone else is at the same point and contending. Let them win. + time.sleep(pollTime) + + if success is True: + self.acquiredAt = time.time() + + self.held = success + return success + + def release(self, forceRelease=False): + ''' + release - Release the lock. + + @param forceRelease <bool> default False - If True, will release the lock even if we don't hold it. + + @return - True if lock is released, otherwise False + ''' + if not self.held: + if forceRelease is False: + return False # We were not holding the lock + else: + self.held = True # If we have force release set, pretend like we held its + + if not os.path.exists(self.lockPath): + self.held = False + self.acquiredAt = None + return True + + if forceRelease is False: + # We waited too long and lost the lock + if self.maxLockAge and time.time() > self.acquiredAt + self.maxLockAge: + self.held = False + self.acquiredAt = None + return False + + self.acquiredAt = None + + try: + os.rmdir(self.lockPath) + self.held = False + return True + except: + self.held = False + return False + + + def __checkExpiration(self, mtime=None): + ''' + __checkExpiration - Check if we have expired + + @param mtime <int> - Optional mtime if known, otherwise will be gathered + + @return <bool> - True if we did expire, otherwise False + ''' + if not self.maxLockAge: + return False + + if mtime is None: + try: + mtime = os.stat(self.lockPath).st_mtime + except FileNotFoundError as e: + return False + + if mtime < time.time() - self.maxLockAge: + return True + + return False + + @property + def isHeld(self): + ''' + isHeld - True if anyone holds the lock, otherwise False. + + @return bool - If lock is held by anyone + ''' + if not os.path.exists(self.lockPath): + return False + + try: + mtime = os.stat(self.lockPath).st_mtime + except FileNotFoundError as e: + return False + + if self.__checkExpiration(mtime): + return False + + return True + + @property + def hasLock(self): + ''' + hasLock - Property, returns True if we have the lock, or False if we do not. + + @return <bool> - True/False if we have the lock or not. + ''' + # If we don't hold it currently, return False + if self.held is False: + return False + + # Otherwise if we think we hold it, but it is not held, we have lost it. + if not self.isHeld: + self.acquiredAt = None + self.held = False + return False + + # Check if we expired + if self.__checkExpiration(self.acquiredAt): + self.acquiredAt = None + self.held = False + return False + + + return True + + +# vim: set ts=4 sw=4 expandtab : diff --git a/LCS/PyCommon/test/postgres.py b/LCS/PyCommon/test/postgres.py index 6ff9dc07e9f2bfdd4a811547e956cac24de87baf..f98092c3d7fd2a42f745c20ff59b2703bdb6cb43 100755 --- a/LCS/PyCommon/test/postgres.py +++ b/LCS/PyCommon/test/postgres.py @@ -30,12 +30,16 @@ from lofar.common.dbcredentials import Credentials from lofar.common.postgres import PostgresDatabaseConnection from lofar.common.testing.dbcredentials import TemporaryCredentials from lofar.common.util import find_free_port +from datetime import datetime, timedelta + +from lofar.common.locking import NamedAtomicLock class PostgresTestDatabaseInstance(): ''' A helper class which instantiates a running postgres server (not interfering with any other test/production postgres servers) Best used in a 'with'-context so the server is destroyed automagically. Derive your own sub-class and implement apply_database_schema with your own sql schema to setup your type of database. ''' + _named_lock = NamedAtomicLock('PostgresTestDatabaseInstance') def __init__(self, user: str = 'test_user', preferred_port: int=5444) -> None: self._postgresql = None @@ -68,22 +72,30 @@ class PostgresTestDatabaseInstance(): '''instantiate the isolated postgres server''' logger.info('creating test-database instance...') - factory = testing.postgresql.PostgresqlFactory(cache_initialized_db=True) - factory.settings['port'] = find_free_port(self.tmp_creds.dbcreds.port) - self._postgresql = factory() - - # fill credentials with the dynamically created postgress instance (e.g. port changes for each time) - dsn = self._postgresql.dsn() - self.tmp_creds.dbcreds.host = dsn['host'] - self.tmp_creds.dbcreds.database = dsn['database'] - self.tmp_creds.dbcreds.port = dsn['port'] - self.tmp_creds.create() - - # make the user known in the new test database - self._create_superuser(dsn) - - logger.info('Applying test-database schema...') - self.apply_database_schema() + with self._named_lock: + start_time = datetime.utcnow() + while datetime.utcnow()-start_time < timedelta(minutes=1): + try: + factory = testing.postgresql.PostgresqlFactory(cache_initialized_db=True) + factory.settings['port'] = find_free_port(self.tmp_creds.dbcreds.port) + self._postgresql = factory() + + # fill credentials with the dynamically created postgress instance (e.g. port changes for each time) + dsn = self._postgresql.dsn() + self.tmp_creds.dbcreds.host = dsn['host'] + self.tmp_creds.dbcreds.database = dsn['database'] + self.tmp_creds.dbcreds.port = dsn['port'] + self.tmp_creds.create() + + # make the user known in the new test database + self._create_superuser(dsn) + + logger.info('Applying test-database schema...') + self.apply_database_schema() + return + except Exception as e: + logger.warning("%s could not be started, retrying with next free port. Error: %s %s", self.__class__.__name__, e.__class__.__name__, e) + raise TimeoutError("%s could not be started within 60 seconds. bailing out..." % self.__class__.__name__) def _create_superuser(self, dsn): try: diff --git a/LICENSE b/LICENSE index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..94a9ed024d3859793618152ea559a168bbcbb5e2 100644 --- a/LICENSE +++ b/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/> + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + <one line to give the program's name and a brief idea of what it does.> + Copyright (C) <year> <name of author> + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see <http://www.gnu.org/licenses/>. + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + <program> Copyright (C) <year> <name of author> + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +<http://www.gnu.org/licenses/>. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +<http://www.gnu.org/philosophy/why-not-lgpl.html>. diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingestjobmanagementserver.py b/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingestjobmanagementserver.py index 853b6ec9ecc944c541529816c618c8a34695f958..de66cd5cd4ab2efe2d45ffa11ed2030f47e57bec 100644 --- a/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingestjobmanagementserver.py +++ b/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingestjobmanagementserver.py @@ -59,14 +59,17 @@ class IngestJobManager: self.__running_jobs_log_timestamp = datetime.utcnow() self.__last_putStalledJobsBackToToDo_timestamp = datetime.utcnow() + @property + def is_running(self) -> bool: + return self.__running + def quit(self): - self.__running = False + with self.__lock: + self.__running = False def run(self): - self.__running = True - - # start with full jobs dir scan to retreive state from disk - self.scanJobsdir() + if self.is_running: + return logger.info('starting listening for new jobs and notifications') @@ -83,11 +86,16 @@ class IngestJobManager: # open exchange connections... with incoming_jobs_listener, ingest_event_listener, ingest_service, self._tobus: + with self.__lock: + # start with full jobs dir scan to retreive state from disk + self.scanJobsdir() + self.__running = True + logger.info('IngestJobManager is up and running, starting to produce jobs on %s', self._tobus) + # ... and run the event loop, # produce jobs to managed job queue for ingest transfer services # receive new jobs - logger.info('starting to produce jobs') - while self.__running: + while self.is_running: try: # produce next jobs self.produceNextJobsIfPossible() diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/test/t_ingestjobmanagementserver.py b/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/test/t_ingestjobmanagementserver.py index 019e7262f5a4bc804907ebdb6811fc81d82e757c..74ae1df643a0a42bbed370c219356dcff0af08fd 100755 --- a/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/test/t_ingestjobmanagementserver.py +++ b/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/test/t_ingestjobmanagementserver.py @@ -80,6 +80,7 @@ with TemporaryExchange(testname+"_bus") as tmp_bus: test_notifier.send(event_msg) def receiveJobForTransfer(): + logger.info("trying to receive job on test consumer %s", test_consumer) msg = test_consumer.receive(timeout=1) if msg and isinstance(msg, CommandMessage): @@ -107,7 +108,8 @@ with TemporaryExchange(testname+"_bus") as tmp_bus: manager_thread.daemon = True manager_thread.start() - time.sleep(1.0) #TODO: should not wait fixed amount of time for IngestJobManager to be up and running, but poll with a timeout + while not manager.is_running: + time.sleep(0.1) assert manager.nrOfUnfinishedJobs() == 3, 'expected 3 jobs unfinished before any job was started' assert manager.nrOfJobs() == 3, 'expected 3 jobs in total before any job was started' diff --git a/MAC/GCF/PVSS/test/testFuncs.sh b/MAC/GCF/PVSS/test/testFuncs.sh index cefb0a5876466b407810348092555c728a0bf482..35ea3cc9d7815067a6f946387a6c9973b13a33a3 100755 --- a/MAC/GCF/PVSS/test/testFuncs.sh +++ b/MAC/GCF/PVSS/test/testFuncs.sh @@ -35,7 +35,7 @@ init() # This is an arbitraty choice, but for now it works. pvsshome=$( sed -n '/^\[Software\\ETM\\PVSS II\\[0-9]\+\.[0-9]\+\]$/,/^\[/ { - s,^InstallationDir *= *"\([^"]*\)"$,\1,p }' $PVSSINST_CONF | sort -u | head -n 1 + s,^InstallationDir *= *"\([^"]*\)"$,\1,p }' $PVSSINST_CONF | sort -ur | head -n 1 ) echo "Found pvsshome=$pvsshome in $PVSSINST_CONF" @@ -61,7 +61,7 @@ pvss_cmdprefix() { pvss_version=$( sed -n 's,^\[Software\\ETM\\PVSS II\\\([0-9]\+\.[0-9]\+\)\]$,\1,p' \ - $PVSSINST_CONF | sort -u | head -n 1) + $PVSSINST_CONF | sort -ur | head -n 1) pvss_version_major=$(echo $pvss_version | cut -d'.' -f1) pvss_version_minor=$(echo $pvss_version | cut -d'.' -f2) @@ -88,7 +88,7 @@ pvss_import_dplist() pvss_project_config() { currentProj=$( - sed -n 's,^currentProject *= *"\([^"]*\)"$,\1,p' $PVSSINST_CONF + sed -n 's,^currentProject *= *"\([^"]*\)"$,\1,p' $PVSSINST_CONF | sort -ur | head -n 1 ) pvss_ii=$( sed -n '/^\[.*\\'$currentProj'\]$/,/^\[/s,^PVSS_II *= *"\([^"]*\)"$,\1,p' \ diff --git a/MAC/Services/CMakeLists.txt b/MAC/Services/CMakeLists.txt index fdfc8c9ab299ee4bb3cc29cfc444a9bcc342315d..5a28a358a0b37cc84a27a1bdaa1a8b4c12eeda3d 100644 --- a/MAC/Services/CMakeLists.txt +++ b/MAC/Services/CMakeLists.txt @@ -1,6 +1,6 @@ # $Id$ -lofar_package(MAC_Services 1.0 DEPENDS PyMessaging OTDB_Services pyparameterset Docker ResourceAssignmentService TBBService) +lofar_package(MAC_Services 1.0 DEPENDS PyMessaging OTDB_Services pyparameterset Docker ResourceAssignmentService TBBService TMSSClient) add_subdirectory(src) add_subdirectory(test) diff --git a/MAC/Services/src/PipelineControl.py b/MAC/Services/src/PipelineControl.py index 94dc11f456845e6dec6b6bbf919e7902585fc78f..a564fd0b4b74644642d0908151d7a7401cd1e609 100755 --- a/MAC/Services/src/PipelineControl.py +++ b/MAC/Services/src/PipelineControl.py @@ -45,18 +45,18 @@ The execution chains are as follows: State is set to [QUEUED]. (runPipeline.sh) -> Calls - - state <- [ACTIVE] + - state <- [ACTIVE | tmss:STARTED] - getParset - (run pipeline) - success: - - state <- [COMPLETING] + - state <- [otdb:COMPLETING | tmss:FINISGING] - (wrap up) - - state <- [FINISHED] + - state <- [otdb:FINISHED | tmss:FINISHED] - failure: - - state <- [ABORTED] + - state <- [otdb:ABORTED | tmss:FINISHED] (setOTDBTreeStatus) -> Calls - - state <- [ABORTED] + - state <- [otdb:ABORTED | tmss:FINISHED] ----------------------------- Stopping a pipeline @@ -67,21 +67,23 @@ The execution chains are as follows: """ from lofar.messaging import DEFAULT_BUSNAME, DEFAULT_BROKER, RPCException -from lofar.parameterset import PyParameterValue +from lofar.parameterset import parameterset, PyParameterValue from lofar.sas.otdb.OTDBBusListener import OTDBEventMessageHandler, OTDBBusListener from lofar.sas.otdb.otdbrpc import OTDBRPC from lofar.common import isProductionEnvironment from lofar.common.subprocess_utils import communicate_returning_strings from lofar.sas.resourceassignment.resourceassignmentservice.rpc import RADBRPC from lofar.sas.otdb.config import DEFAULT_OTDB_NOTIFICATION_SUBJECT +from lofar.sas.tmss.client.tmss_http_rest_client import TMSSsession +from lofar.sas.tmss.client.tmssbuslistener import TMSSSubTaskEventMessageHandler, TMSSSubTaskBusListener import subprocess import pipes import os import re from socket import getfqdn - import logging + logger = logging.getLogger(__name__) # NDPPP seems to like to have 2 cores. @@ -92,7 +94,6 @@ NUMBER_OF_CORES_PER_NODE = 24 # We /4 because we can then run 4 pipelines, and -2 to reserve cores for TBBwriter DEFAULT_NUMBER_OF_TASKS = (NUMBER_OF_NODES // 4) * (NUMBER_OF_CORES_PER_NODE - 2) // DEFAULT_NUMBER_OF_CORES_PER_TASK - def runCommand(cmdline, input=None): logger.info("runCommand starting: %s", cmdline) @@ -337,6 +338,267 @@ class PipelineDependencies(object): return self.rarpc.getTasks(task_status=task_status, task_type=task_type) +class PipelineControlTMSSHandler(TMSSSubTaskEventMessageHandler): + + def __init__(self): + super(PipelineControlTMSSHandler, self).__init__() + + self.slurm = Slurm() + self.tmss_client = TMSSsession.create_from_dbcreds_for_ldap() + + def start_handling(self): + self.tmss_client.open() + + def stop_handling(self): + self.tmss_client.close() + + def check_scheduled_pipelines(self): + """ + In the old OTDB, PipelineControl determines itself if a pipeline can be started (scheduled and predecessors + have finished). The predecessor check now happens within TMSS, so PipelineControl can simply start any pipeline + that is scheduled. + """ + try: + logger.info("Checking for already scheduled pipelines in TMSS...") + + scheduled_subtasks = self.tmss_client.get_subtasks(state="scheduled") + scheduled_pipeline_subtask_ids = [] + for subtask in scheduled_subtasks: + try: + bits = subtask['url'].split('/') + subtask_id = int(bits[bits.index("subtask") + 1]) + scheduled_pipeline_subtask_ids.append(subtask_id) + except Exception as e: + logger.error(e) + + logger.info("Checking %s scheduled pipelines if they can start.", len(scheduled_pipeline_subtask_ids)) + + for subtask_id in scheduled_pipeline_subtask_ids: + logger.info("Checking if scheduled pipeline subtask_id=%s can start.", subtask_id) + try: + parset = Parset(parameterset.fromString(self.tmss_client.get_subtask_parset(subtask_id))) + if not parset or not self._shouldHandle(parset): + continue + self._startPipeline(subtask_id, parset) + except Exception as e: + logger.error(e) + except Exception as e: + logger.error(e) + + def onSubTaskScheduled(self, subtask_id: int, old_state: str, new_state: str): + try: + parset = self.tmss_client.get_subtask_parset(subtask_id) + parset = parameterset.fromString(parset) + parset = Parset(parset) + if parset and self._shouldHandle(parset): + self._startPipeline(subtask_id, parset) + except Exception as e: + logger.error(e) + + @staticmethod + def _shouldHandle(parset): + try: + if not parset.isPipeline(): + logger.info("Not processing tree: is not a pipeline") + return False + + if parset.processingCluster() == "CEP2": + logger.info("Not processing tree: is a CEP2 pipeline") + return False + except KeyError as e: + # Parset not complete + logger.error("Parset incomplete, ignoring: %s", e) + return False + + return True + + @staticmethod + def _jobName(subtask_id): + return str(subtask_id) + + def _startPipeline(self, subtask_id, parset): + """ + Schedule "docker-runPipeline.sh", which will fetch the parset and run the pipeline within + a SLURM job. + """ + + # Avoid race conditions by checking whether we haven't already sent the job + # to SLURM. Our QUEUED status update may still be being processed. + if self.slurm.isQueuedOrRunning(subtask_id): + logger.info("Pipeline %s is already queued or running in SLURM.", subtask_id) + return + + logger.info("***** START Subtask ID %s *****", subtask_id) + + # Determine SLURM parameters + sbatch_params = [ + # Only run job if all nodes are ready + "--wait-all-nodes=1", + + # Enforce the dependencies, instead of creating lingering jobs + "--kill-on-invalid-dep=yes", + + # Annotate the job + "--comment=%s" % pipes.quote(pipes.quote(parset.description())), + + # Lower priority to drop below inspection plots + "--nice=1000", + + "--partition=%s" % parset.processingPartition(), + "--ntasks=%s" % parset.processingNumberOfTasks(), + "--cpus-per-task=%s" % parset.processingNumberOfCoresPerTask(), + + # Define better places to write the output + os.path.expandvars("--output=/data/log/pipeline-%s-%%j.log" % (subtask_id,)), + ] + + def setStatus_cmdline(status): + return ( + "ssh {myhostname} '" + "source {lofarroot}/lofarinit.sh && " + "tmss_set_subtask_state {subtaskid} {status}" + "'" + .format( + myhostname=getfqdn(), + lofarroot=os.environ.get("LOFARROOT", ""), + subtaskid=subtask_id, + status=status + )) + + def getParset_cmdline(): + return ( + "ssh {myhostname} '" + "source {lofarroot}/lofarinit.sh && " + "tmss_get_subtask_parset {subtaskid}'" + .format( + myhostname=getfqdn(), + lofarroot=os.environ.get("LOFARROOT", ""), + subtaskid=subtask_id, + )) + + try: + logger.info("Handing over pipeline %s to SLURM", subtask_id) + + # Schedule runPipeline.sh + slurm_job_id = self.slurm.submit(self._jobName(subtask_id), + """ + # Run a command, but propagate SIGINT and SIGTERM + function runcmd {{ + trap 'kill -s SIGTERM $PID' SIGTERM + trap 'kill -s SIGINT $PID' SIGINT + + "$@" & + PID=$! + wait $PID # returns the exit status of "wait" if interrupted + wait $PID # returns the exit status of $PID + CMDRESULT=$? + + trap - SIGTERM SIGINT + + return $CMDRESULT + }} + + # print some info + echo Running on $SLURM_NODELIST + + # notify TMSS that we're running + runcmd {setStatus_started} + + # notify ganglia + wget -O - -q "http://ganglia.control.lofar/ganglia/api/events.php?action=add&start_time=now&summary=Pipeline {obsid} ACTIVE&host_regex=" + + # fetch parset + runcmd {getParset} > {parset_file} + + # run the pipeline + runcmd docker-run-slurm.sh --rm --net=host \ + -e LOFARENV={lofarenv} \ + -v $HOME/.ssh:$HOME/.ssh:ro \ + -e SLURM_JOB_ID=$SLURM_JOB_ID \ + -v /data:/data \ + {image} \ + runPipeline.sh -o {obsid} -c /opt/lofar/share/pipeline/pipeline.cfg.{cluster} -P {parset_dir} -p {parset_file} + RESULT=$? + + # notify that we're tearing down + runcmd {setStatus_finishing} + + if [ $RESULT -eq 0 ]; then + # !!! TODO: Review behavior for TMSS + # wait for MoM to pick up feedback before we set finished status + # AS: I increased this to 300 sec to be in line with the wait time after observation finished + # and because we still note quite a lot of feedback issues in MoM + runcmd sleep 300 + + # if we reached this point, the pipeline ran succesfully + runcmd {setStatus_finished} + + # notify ganglia + wget -O - -q "http://ganglia.control.lofar/ganglia/api/events.php?action=add&start_time=now&summary=Pipeline {obsid} FINISHED&host_regex=" + else + # !!! TODO: Review behavior for TMSS + # If we are killed by the pipeline being set to finished, we just went from finished->finishing + # but our abort_trigger may already have been cancelled. Set the status here too to avoid lingering + # in finishing + runcmd {setStatus_finished} + fi + + # report status back to SLURM + echo "Pipeline exited with status $RESULT" + exit $RESULT + """.format( + lofarenv=os.environ.get("LOFARENV", ""), + obsid=subtask_id, + parset_dir="/data/parsets", + parset_file="/data/parsets/Observation%s.parset" % (subtask_id,), + repository=parset.dockerRepository(), + image=parset.dockerImage(), + cluster=parset.processingCluster(), + + getParset=getParset_cmdline(), + setStatus_started=setStatus_cmdline("started"), + setStatus_finishing=setStatus_cmdline("finishing"), + setStatus_finished=setStatus_cmdline("finished"), + ), + + sbatch_params=sbatch_params + ) + logger.info("Scheduled SLURM job %s for subtask_id=%s", slurm_job_id, subtask_id) + + # Schedule pipelineAborted.sh + logger.info("Scheduling SLURM job for pipelineAborted.sh") + slurm_cancel_job_id = self.slurm.submit("%s-abort-trigger" % self._jobName(subtask_id), + """ + # notify TMSS + {setStatus_finished} + + # notify ganglia + wget -O - -q "http://ganglia.control.lofar/ganglia/api/events.php?action=add&start_time=now&summary=Pipeline {obsid} ABORTED&host_regex=" + """ + .format( + setStatus_finished=setStatus_cmdline("finished"), + obsid=subtask_id, + ), + + sbatch_params=[ + "--partition=%s" % parset.processingPartition(), + "--cpus-per-task=1", + "--ntasks=1", + "--dependency=afternotok:%s" % slurm_job_id, + "--kill-on-invalid-dep=yes", + "--requeue", + "--output=/data/log/abort-trigger-%s.log" % (subtask_id,), + ] + ) + logger.info("Scheduled SLURM job %s for abort trigger for subtask_id=%s", slurm_cancel_job_id, subtask_id) + + logger.info("Handed over pipeline %s to SLURM, setting status to QUEUED", subtask_id) + self.tmss_client.set_subtask_status(subtask_id, "queued") + except Exception as e: + logger.error(str(e)) + self.tmss_client.set_subtask_status(subtask_id, "finished") + + class PipelineControlHandler( OTDBEventMessageHandler): def __init__(self, exchange, broker): super(PipelineControlHandler, self).__init__() @@ -493,30 +755,30 @@ class PipelineControlHandler( OTDBEventMessageHandler): function runcmd {{ trap 'kill -s SIGTERM $PID' SIGTERM trap 'kill -s SIGINT $PID' SIGINT - + "$@" & PID=$! wait $PID # returns the exit status of "wait" if interrupted wait $PID # returns the exit status of $PID CMDRESULT=$? - + trap - SIGTERM SIGINT - + return $CMDRESULT }} - + # print some info echo Running on $SLURM_NODELIST - + # notify OTDB that we're running runcmd {setStatus_active} - + # notify ganglia wget -O - -q "http://ganglia.control.lofar/ganglia/api/events.php?action=add&start_time=now&summary=Pipeline {obsid} ACTIVE&host_regex=" # fetch parset runcmd {getParset} > {parset_file} - + # run the pipeline runcmd docker-run-slurm.sh --rm --net=host \ -e LOFARENV={lofarenv} \ @@ -526,19 +788,19 @@ class PipelineControlHandler( OTDBEventMessageHandler): {image} \ runPipeline.sh -o {obsid} -c /opt/lofar/share/pipeline/pipeline.cfg.{cluster} -P {parset_dir} -p {parset_file} RESULT=$? - + # notify that we're tearing down runcmd {setStatus_completing} - + if [ $RESULT -eq 0 ]; then # wait for MoM to pick up feedback before we set finished status # AS: I increased this to 300 sec to be in line with the wait time after observation finished # and because we still note quite a lot of feedback issues in MoM runcmd sleep 300 - + # if we reached this point, the pipeline ran succesfully runcmd {setStatus_finished} - + # notify ganglia wget -O - -q "http://ganglia.control.lofar/ganglia/api/events.php?action=add&start_time=now&summary=Pipeline {obsid} FINISHED&host_regex=" else @@ -547,7 +809,7 @@ class PipelineControlHandler( OTDBEventMessageHandler): # in completing runcmd {setStatus_aborted} fi - + # report status back to SLURM echo "Pipeline exited with status $RESULT" exit $RESULT @@ -577,7 +839,7 @@ class PipelineControlHandler( OTDBEventMessageHandler): """ # notify OTDB {setStatus_aborted} - + # notify ganglia wget -O - -q "http://ganglia.control.lofar/ganglia/api/events.php?action=add&start_time=now&summary=Pipeline {obsid} ABORTED&host_regex=" """ @@ -705,5 +967,25 @@ class PipelineControl(OTDBBusListener): +class PipelineControlTMSS(TMSSSubTaskBusListener): + def __init__(self, handler_type: PipelineControlHandler.__class__ = PipelineControlHandler, + handler_kwargs: dict = None, + exchange: str = DEFAULT_BUSNAME, broker: str = DEFAULT_BROKER, + num_threads: int = 1): + if not issubclass(handler_type, PipelineControlTMSSHandler): + raise TypeError("handler_type should be a PipelineControlTMSSHandler subclass") + + super().__init__(handler_type=handler_type, handler_kwargs=handler_kwargs, + exchange=exchange, + num_threads=num_threads, broker=broker) + def start_listening(self): + # HACK: create a temporary extra handler which is not connected to this listener, + # and hence not responding to incoming messages, + # and use this extra handler to initially check all already scheduled pipelines + with self._create_handler() as helper_handler: + helper_handler.check_scheduled_pipelines() + + # everything has been check, now start_listening, and let the normal handlers respond to otdb events + super().start_listening() diff --git a/MAC/Services/src/pipelinecontrol b/MAC/Services/src/pipelinecontrol index 90bdc73eace0701caa09602b09442c923c6da7f5..6871cb2eff4cf5f6558349e7f61578be054daa99 100644 --- a/MAC/Services/src/pipelinecontrol +++ b/MAC/Services/src/pipelinecontrol @@ -21,7 +21,7 @@ # $Id: JobsToSchedule.py 33364 2016-01-21 21:21:12Z mol $ import logging -from lofar.mac.PipelineControl import PipelineControl +from lofar.mac.PipelineControl import PipelineControl, PipelineControlTMSS from lofar.messaging import DEFAULT_BROKER, DEFAULT_BUSNAME from lofar.common.util import waitForInterrupt @@ -42,6 +42,8 @@ if __name__ == "__main__": logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.DEBUG if options.verbose else logging.INFO) + # todo: Do we want to run OTDB and TMSS in parallel? with PipelineControl(exchange=options.exchange, broker=options.broker) as pipelineControl: - waitForInterrupt() + with PipelineControlTMSS(exchange=options.exchange, broker=options.broker) as pipelineControlTMSS: + waitForInterrupt() diff --git a/MAC/Services/test/tPipelineControl.py b/MAC/Services/test/tPipelineControl.py index 42d442817cd11c31463d440dac7e01e1135f8788..d5e510f681a85f402962234c13ab4706f3ab32db 100644 --- a/MAC/Services/test/tPipelineControl.py +++ b/MAC/Services/test/tPipelineControl.py @@ -1,6 +1,7 @@ #!/usr/bin/env python3 import time +import json from lofar.mac.PipelineControl import * from lofar.sas.otdb.config import DEFAULT_OTDB_NOTIFICATION_SUBJECT, DEFAULT_OTDB_SERVICENAME @@ -11,7 +12,7 @@ from lofar.messaging import ServiceMessageHandler, TemporaryQueue, RPCService, E import subprocess import unittest -from unittest.mock import patch +from unittest.mock import patch, MagicMock, call import datetime from lofar.common.test_utils import integration_test, unit_test @@ -393,6 +394,77 @@ class TestPipelineControl(unittest.TestCase): # Check if job was scheduled self.assertIn("1", self.mock_slurm.scheduled_jobs) self.assertIn("1-abort-trigger", self.mock_slurm.scheduled_jobs) +@unit_test +class TestPipelineControlTMSSClassMethods(unittest.TestCase): + def test_shouldHandle(self): + """ Test whether we filter the right OTDB trees. """ + + logger.warning('TEST_SHOULDHANDLE') + + trials = [{"type": "Observation", "cluster": "CEP2", "shouldHandle": False}, + {"type": "Observation", "cluster": "CEP4", "shouldHandle": False}, + {"type": "Observation", "cluster": "foo", "shouldHandle": False}, + {"type": "Observation", "cluster": "", "shouldHandle": False}, + {"type": "Pipeline", "cluster": "CEP2", "shouldHandle": False}, + {"type": "Pipeline", "cluster": "CEP4", "shouldHandle": True}, + {"type": "Pipeline", "cluster": "foo", "shouldHandle": True}, + {"type": "Pipeline", "cluster": "", "shouldHandle": False}, + ] + + for t in trials: + parset = {"ObsSW.Observation.processType": t["type"], + "ObsSW.Observation.Cluster.ProcessingCluster.clusterName": t["cluster"]} + self.assertEqual(PipelineControlTMSSHandler._shouldHandle(Parset(parset)), t["shouldHandle"]) + + logger.warning('END TEST_SHOULDHANDLE') + + +@unit_test +class TestPipelineControlTMSS(unittest.TestCase): + + def test_check_scheduled_pipelines(self): + """ Test whether we start pipelines that have status scheduled in TMSS. """ + + logger.warning('TEST_CHECKSCHEDULED') + + with unittest.mock.patch('lofar.mac.PipelineControl.TMSSsession.get_subtask_parset') as mock_get_subtask_parset: + mock_get_subtask_parset.side_effect = lambda id: "ObsSW.Observation.processType=Pipeline\n" \ + "ObsSW.Observation.Cluster.ProcessingCluster.clusterName=CEP4\n" + + with unittest.mock.patch('lofar.mac.PipelineControl.TMSSsession.get_subtasks') as mock_get_subtasks: + mock_get_subtasks.side_effect = lambda state: json.loads('[{"url":"http://localhost:8008/api/subtask/2000001/?format=json","tags":["TMSS","TESTING"],"created_at":"2020-05-11T06:39:01.907446","updated_at":"2020-05-11T17:49:47.455010","start_time":null,"stop_time":null,"specifications_doc":{"demixer":{"baselines":"CS*,RS*&","time_steps":1,"demix_always":[],"ignore_target":false,"demix_if_needed":[],"frequency_steps":4,"demix_time_steps":1,"demix_frequency_steps":4},"aoflagger":{"strategy":"HBAdefault"},"preflagger0":{"channels":"0..nchan/32-1,31*nchan/32..nchan-1"},"preflagger1":{"corrtype":"auto"},"storagemanager":"dysco"},"do_cancel":null,"priority":1,"scheduler_input_doc":{},"state":"http://localhost:8008/api/subtask_state/scheduled/?format=json","task_blueprint":"http://localhost:8008/api/task_blueprint/20/?format=json","specifications_template":"http://localhost:8008/api/subtask_template/2/?format=json","schedule_method":"http://localhost:8008/api/schedule_method/manual/?format=json","cluster":"http://localhost:8008/api/cluster/1/?format=json"},' \ + '{"url":"http://localhost:8008/api/subtask/2000042/?format=json","tags":["TMSS","TESTING"],"created_at":"2020-05-11T06:39:01.907446","updated_at":"2020-05-11T17:49:47.455010","start_time":null,"stop_time":null,"specifications_doc":{"demixer":{"baselines":"CS*,RS*&","time_steps":1,"demix_always":[],"ignore_target":false,"demix_if_needed":[],"frequency_steps":4,"demix_time_steps":1,"demix_frequency_steps":4},"aoflagger":{"strategy":"HBAdefault"},"preflagger0":{"channels":"0..nchan/32-1,31*nchan/32..nchan-1"},"preflagger1":{"corrtype":"auto"},"storagemanager":"dysco"},"do_cancel":null,"priority":1,"scheduler_input_doc":{},"state":"http://localhost:8008/api/subtask_state/scheduled/?format=json","task_blueprint":"http://localhost:8008/api/task_blueprint/20/?format=json","specifications_template":"http://localhost:8008/api/subtask_template/2/?format=json","schedule_method":"http://localhost:8008/api/schedule_method/manual/?format=json","cluster":"http://localhost:8008/api/cluster/1/?format=json"}]') + + with unittest.mock.patch('lofar.mac.PipelineControl.PipelineControlTMSSHandler._startPipeline') as mock_startPipeline: + handler = PipelineControlTMSSHandler() + handler.check_scheduled_pipelines() + + mock_get_subtasks.assert_called_with(state="scheduled") + + self.assertTrue(call(2000001) in mock_get_subtask_parset.call_args_list) + self.assertTrue(call(2000042) in mock_get_subtask_parset.call_args_list) + + mock_startPipeline.assert_called() + + logger.warning('END TEST_CHECKSCHEDULED') + + + def test_onSubTaskScheduled(self): + """ Test whether pipelines are started with correct Parset and ID TMSS. """ + logger.warning('TEST_ONSCHEDULED') + + with unittest.mock.patch('lofar.mac.PipelineControl.TMSSsession.get_subtask_parset') as mock_get_subtask_parset: + mock_get_subtask_parset.side_effect = lambda id: "ObsSW.Observation.processType=Pipeline\n" \ + "ObsSW.Observation.Cluster.ProcessingCluster.clusterName=CEP4\n" + + with unittest.mock.patch('lofar.mac.PipelineControl.PipelineControlTMSSHandler._startPipeline') as mock_startPipeline: + handler = PipelineControlTMSSHandler() + handler.onSubTaskScheduled(1234, "scheduling", "scheduled") + + mock_get_subtask_parset.assert_called_with(1234) + mock_startPipeline.assert_called() + + logger.warning('END TEST_ONSCHEDULED') if __name__ == "__main__": diff --git a/QA/QA_Common/bin/show_hdf5_info b/QA/QA_Common/bin/show_hdf5_info index 63c2f8c98dfdb1ce7d9c014a30ec0dbddd5d572f..1914d492a601afdb0018f1a16d89d9f196c1f8c1 100755 --- a/QA/QA_Common/bin/show_hdf5_info +++ b/QA/QA_Common/bin/show_hdf5_info @@ -38,7 +38,7 @@ if __name__ == '__main__': (options, args) = parser.parse_args() - if len(args) != 1: + if len(args) < 1: parser.print_help() exit(-1) diff --git a/QA/QA_Service/CMakeLists.txt b/QA/QA_Service/CMakeLists.txt index 3da6a2d95811cc7bc01fe5147727e1a6edf4d9c0..37e8061110e7450dc8549439a7aac2938c79931a 100644 --- a/QA/QA_Service/CMakeLists.txt +++ b/QA/QA_Service/CMakeLists.txt @@ -17,7 +17,7 @@ # $Id$ -lofar_package(QA_Service 1.0 DEPENDS QA_Common PyMessaging OTDB_Services pyparameterset) +lofar_package(QA_Service 1.0 DEPENDS QA_Common PyMessaging OTDB_Services pyparameterset TMSS TMSSClient) add_subdirectory(lib) add_subdirectory(bin) diff --git a/QA/QA_Service/lib/config.py b/QA/QA_Service/lib/config.py index 3c975678d40a758dd78468de7763cd822f18270d..6b1b1089f2170569e6ada3a9207573ec68eb3988 100644 --- a/QA/QA_Service/lib/config.py +++ b/QA/QA_Service/lib/config.py @@ -17,4 +17,4 @@ # $Id$ -DEFAULT_QA_NOTIFICATION_SUBJECT_PREFIX='QA' +DEFAULT_QA_NOTIFICATION_SUBJECT_PREFIX='QA.notification' diff --git a/QA/QA_Service/lib/qa_service.py b/QA/QA_Service/lib/qa_service.py index e4f5ba654d471f4a09eb5bc28fb158d169bc4f27..e8283195becfe50615448a3d5ed1d72f8716a2ad 100644 --- a/QA/QA_Service/lib/qa_service.py +++ b/QA/QA_Service/lib/qa_service.py @@ -18,57 +18,47 @@ # $Id: qa_service.py 43930 2019-08-30 07:57:17Z klazema $ import os.path -import logging -from subprocess import call, Popen, PIPE, STDOUT +import json +from subprocess import call from optparse import OptionParser, OptionGroup -from threading import Thread from lofar.common.util import waitForInterrupt -from lofar.sas.otdb.OTDBBusListener import OTDBBusListener, OTDBEventMessageHandler, DEFAULT_OTDB_NOTIFICATION_SUBJECT -from lofar.messaging import UsingToBusMixin, BusListener -from lofar.messaging.messages import EventMessage +from lofar.sas.otdb.OTDBBusListener import OTDBBusListener, OTDBEventMessageHandler +from lofar.sas.tmss.client.tmssbuslistener import TMSSSubTaskEventMessageHandler, TMSSSubTaskBusListener +from lofar.messaging import UsingToBusMixin, BusListener, ToBus, AbstractMessageHandler +from lofar.messaging.messages import EventMessage, CommandMessage from lofar.messaging import DEFAULT_BROKER, DEFAULT_BUSNAME from lofar.qa.service.config import DEFAULT_QA_NOTIFICATION_SUBJECT_PREFIX from lofar.common.cep4_utils import * from lofar.parameterset import parameterset from lofar.sas.otdb.otdbrpc import OTDBRPC +from lofar.sas.tmss.client.tmss_http_rest_client import TMSSsession +import logging logger = logging.getLogger(__name__) -QA_LUSTRE_BASE_DIR = '/data/qa' -QA_NFS_BASE_DIR = '/qa' -DEFAULT_FILTERED_OTDB_NOTIFICATION_SUBJECT = "filtered.%s" % (DEFAULT_OTDB_NOTIFICATION_SUBJECT,) - -#TODO: idea: convert periodically while observing? +_DEFAULT_QA_COMMAND_SUBJECT_TEMPLATE = "QA.Command.%s" +DEFAULT_DO_QAFILE_CONVERSION_SUBJECT = _DEFAULT_QA_COMMAND_SUBJECT_TEMPLATE % "QAFileConversion" +DEFAULT_DO_QAFILE_FINALIZE_SUBJECT = _DEFAULT_QA_COMMAND_SUBJECT_TEMPLATE % "QAFileFinalize" +DEFAULT_DO_QAPLOTS_SUBJECT = _DEFAULT_QA_COMMAND_SUBJECT_TEMPLATE % "QAPlots" +DEFAULT_DO_QAFILE_CONVERSION_AND_PLOTS_SUBJECT = _DEFAULT_QA_COMMAND_SUBJECT_TEMPLATE % "QAFileConversionAndPlots" +DEFAULT_QA_COMMANDS_SUBJECT = _DEFAULT_QA_COMMAND_SUBJECT_TEMPLATE % "#" class QAFilteringOTDBBusListener(OTDBBusListener): class QAFilteringOTDBEventMessageHandler(UsingToBusMixin, OTDBEventMessageHandler): - def _send_filtered_event_message(self, otdb_id: int, modificationTime: datetime, state: str): - try: - with OTDBRPC.create(exchange=self.exchange, broker=self.broker, timeout=2) as otdbrpc: - parset = parameterset(otdbrpc.taskGetSpecification(otdb_id=otdb_id).get("specification", '')) - task_type = parset.get("ObsSW.Observation.processType") - priority = 6 if task_type == "Observation" else 2 - except Exception as e: - logger.warning('Could not determine task type for otdb_id=%s, using default priority=4: %s', otdb_id, e) - priority = 4 - + def _send_qa_command_message(self, otdb_id: int, command_subject: str): try: - content = {"treeID": otdb_id, - "state": state, - "time_of_change": modificationTime} - msg = EventMessage(subject=DEFAULT_FILTERED_OTDB_NOTIFICATION_SUBJECT, - content=content, - priority=priority) - logger.info('sending filtered event message subject:\'%s\' content: %s', msg.subject, content) + content = {"otdb_id": otdb_id } + msg = CommandMessage(subject=command_subject, content=content) + logger.info('sending command message subject:\'%s\' content: %s', msg.subject, content) self.send(msg) except Exception as e: logger.error('Could not send event message: %s', e) def onObservationCompleting(self, otdb_id, modificationTime): - self._send_filtered_event_message(otdb_id, modificationTime, 'completing') + self._send_qa_command_message(otdb_id, DEFAULT_DO_QAFILE_CONVERSION_AND_PLOTS_SUBJECT) def onObservationFinished(self, otdb_id, modificationTime): - self._send_filtered_event_message(otdb_id, modificationTime, 'finished') + self._send_qa_command_message(otdb_id, DEFAULT_DO_QAFILE_FINALIZE_SUBJECT) def __init__(self, exchange: str = DEFAULT_BUSNAME, broker: str = DEFAULT_BROKER): super().__init__(handler_type=QAFilteringOTDBBusListener.QAFilteringOTDBEventMessageHandler, @@ -77,68 +67,131 @@ class QAFilteringOTDBBusListener(OTDBBusListener): broker=broker) -class QAFilteredOTDBBusListener(BusListener): - class QAFilteredOTDBEventMessageHandler(UsingToBusMixin, OTDBEventMessageHandler): - ''' - QAFilteredOTDBEventMessageHandler listens on the lofar otdb message bus for NotificationMessages and starts qa processes - upon observation/pipeline completion. The qa processes convert MS (measurement sets) to hdf5 qa files, - and then starts generating plots from the hdf5 file. - ''' - def __init__(self): +class QAFilteringTMSSSubTaskBusListener(TMSSSubTaskBusListener): + class QAFilteringTMSSSubTaskEventMessageHandler(UsingToBusMixin, TMSSSubTaskEventMessageHandler): + def _send_qa_command_message(self, subtask_id: int, command_subject: str): + with TMSSsession.create_from_dbcreds_for_ldap() as tmsssession: + tmsssession.set_subtask_status(subtask_id, 'queueing') + + try: + content = {"subtask_id": subtask_id } + msg = CommandMessage(subject=command_subject, content=content) + logger.info('sending command message subject:\'%s\' content: %s', msg.subject, content) + self.send(msg) + except Exception as e: + logger.error('Could not send event message: %s', e) + + tmsssession.set_subtask_status(subtask_id, 'queued') + + def onSubTaskScheduled(self, subtask_id: int, old_state: str, new_state:str): + with TMSSsession.create_from_dbcreds_for_ldap() as tmsssession: + subtask = tmsssession.get_subtask(subtask_id) + spec = tmsssession.get_url_as_json_object(subtask['specifications_template']) + if '/qa_files/' in spec['type']: + self._send_qa_command_message(subtask_id, DEFAULT_DO_QAFILE_CONVERSION_SUBJECT) + elif '/qa_plots/' in spec['type']: + self._send_qa_command_message(subtask_id, DEFAULT_DO_QAPLOTS_SUBJECT) + + def __init__(self, exchange: str = DEFAULT_BUSNAME, broker: str = DEFAULT_BROKER): + super().__init__(handler_type=QAFilteringTMSSSubTaskBusListener.QAFilteringTMSSSubTaskEventMessageHandler, + exchange=exchange, + num_threads=1, + broker=broker) + + +class QACommandsBusListener(BusListener): + class QACommandsMessageHandler(AbstractMessageHandler): + def __init__(self, qa_service): super().__init__() - self._unfinished_otdb_id_map = {} + self.qa_service = qa_service - def onObservationCompleting(self, otdb_id, modificationTime): - ''' - this mehod is called automatically upon receiving a Completion NotificationMessage - :param int otdb_id: the task's otdb database id - :param datetime modificationTime: timestamp when the task's status changed to completing - :return: None - ''' - logger.info("task with otdb_id %s completed.", otdb_id) - - # immediately do qa when the obs is completing, because the data is already on disk... - # and do the handling of the feedback in onObservationFinished - self.do_qa(otdb_id=otdb_id) + def handle_message(self, msg: CommandMessage): + if not isinstance(msg, CommandMessage): + raise ValueError("%s: Ignoring non-CommandMessage: %s" % (self.__class__.__name__, msg)) - def onObservationFinished(self, otdb_id, modificationTime): - ''' - this mehod is called automatically upon receiving a Finished NotificationMessage - :param int otdb_id: the task's otdb database id - :param datetime modificationTime: timestamp when the task's status changed to finished - :return: None - ''' - logger.info("task with otdb_id %s finished. trying to add parset (with feedback) to h5 file", otdb_id) - - # lookup the hdf5_file_path for the given otdb_id - # and (re)add the parset to the file (which now includes feedback) - hdf5_file_path = self._unfinished_otdb_id_map.get(otdb_id) - if hdf5_file_path: - del self._unfinished_otdb_id_map[otdb_id] + logger.debug("%s.handleMessage: %s" % (self.__class__.__name__, str(msg))) - try: - cmd = ['add_parset_to_hdf5', hdf5_file_path] - cmd = wrap_command_for_docker(cmd, 'adder', 'latest') - cmd = wrap_command_in_cep4_random_node_ssh_call(cmd, partition=SLURM_CPU_PARTITION, via_head=True) + if msg.subject == DEFAULT_DO_QAFILE_CONVERSION_SUBJECT: + self.qa_service.do_qafile_conversion(otdb_id=msg.content.get('otdb_id'), subtask_id=msg.content.get('subtask_id')) + elif msg.subject == DEFAULT_DO_QAPLOTS_SUBJECT: + self.qa_service.do_qaplots(otdb_id=msg.content.get('otdb_id'), subtask_id=msg.content.get('subtask_id')) + elif msg.subject == DEFAULT_DO_QAFILE_CONVERSION_AND_PLOTS_SUBJECT: + if msg.content.get('subtask_id'): + raise ValueError("%s: cannot do qa file conversion and plotting in one call for TMSS subtask id=%s. These steps are modelled seperately" % (self.__class__.__name__, msg.content.get('subtask_id'))) - logger.info(' '.join(cmd)) - if call(cmd) == 0: - self._copy_hdf5_to_nfs_dir(hdf5_file_path) - except Exception as e: - logger.warning("Cannot add parset with feedback for otdb=%s. error: %s", otdb_id, e) + self.qa_service.do_qa(otdb_id=msg.content.get('otdb_id')) + elif msg.subject == DEFAULT_DO_QAFILE_FINALIZE_SUBJECT: + self.qa_service.finalize_qa(otdb_id=msg.content.get('otdb_id'), subtask_id=msg.content.get('subtask_id')) else: - logger.info("Could not find the h5 file for task with otdb_id %s to add the parset to.", otdb_id) + raise ValueError("%s: cannot handle CommandMessage with subject: %s" % (self.__class__.__name__, msg.subject)) - def do_qa(self, otdb_id): - ''' - try to do all qa (quality assurance) steps for the given otdb_id - resulting in an h5 MS-extract file and inspection plots - :param int otdb_id: observation/pipeline otdb id for which the conversion needs to be done. - :return: None - ''' + def __init__(self, qa_service, exchange: str = DEFAULT_BUSNAME, broker: str = DEFAULT_BROKER): + super().__init__(handler_type=QACommandsBusListener.QACommandsMessageHandler, + handler_kwargs={'qa_service': qa_service}, + exchange=exchange, + routing_key=DEFAULT_QA_COMMANDS_SUBJECT, + num_threads=1, + broker=broker) - hdf5_file_path = None +class QAService: + QA_LUSTRE_BASE_DIR = '/data/qa' + QA_NFS_BASE_DIR = '/qa' + def __init__(self, exchange: str=DEFAULT_BUSNAME, broker: str=DEFAULT_BROKER): + """ + :param exchange: valid message exchange address + :param broker: valid broker host (default: None, which means localhost) + """ + self.exchange = exchange + self.broker = broker + self.tobus = ToBus(exchange, broker) + self.filtering_otdbbuslistener = QAFilteringOTDBBusListener(exchange = exchange, broker = broker) + self.filtering_tmssbuslistener = QAFilteringTMSSSubTaskBusListener(exchange = exchange, broker = broker) + self.commands_buslistener = QACommandsBusListener(qa_service=self, exchange = exchange, broker = broker) + self._unfinished_otdb_id_map = {} + self.tmsssession = None + + def __enter__(self): + self.tmsssession = TMSSsession.create_from_dbcreds_for_ldap() + self.tmsssession.open() + self.tobus.open() + self.filtering_otdbbuslistener.start_listening() + self.filtering_tmssbuslistener.start_listening() + self.commands_buslistener.start_listening() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.filtering_otdbbuslistener.stop_listening() + self.filtering_tmssbuslistener.stop_listening() + self.commands_buslistener.stop_listening() + self.tobus.close() + self.tmsssession.close() + + @staticmethod + def h5_lustre_filepath(observation_id) -> str: + h5_filename = 'L%s.MS_extract.h5' % observation_id + return os.path.join(QAService.QA_LUSTRE_BASE_DIR, 'qa_h5_files', h5_filename) + + @staticmethod + def plots_lustre_dirpath(observation_id) -> str: + plots_dirname = 'L%s' % observation_id + return os.path.join(QAService.QA_LUSTRE_BASE_DIR, 'plots', plots_dirname) + + def do_qa(self, otdb_id=None): + ''' + convert a MS or BeamFormed observation to a qa h5 file, and create plots. + ''' + if self.do_qafile_conversion(otdb_id=otdb_id): + self.do_qaplots(otdb_id=otdb_id) + + def do_qafile_conversion(self, otdb_id=None, subtask_id=None): + ''' + convert a MS or BeamFormed observation to a qa h5 file + ''' + + hdf5_file_path = None + + if otdb_id: with OTDBRPC.create(exchange=self.exchange, broker=self.broker, timeout=5) as otdbrpc: parset = parameterset(otdbrpc.taskGetSpecification(otdb_id=otdb_id).get("specification", '')) @@ -153,279 +206,332 @@ class QAFilteredOTDBBusListener(BusListener): else: logger.info("No uv or cs dataproducts avaiblable to convert for otdb_id %s", otdb_id) return + elif subtask_id: + self.tmsssession.set_subtask_status(subtask_id, 'starting') + self.tmsssession.set_subtask_status(subtask_id, 'started') - if hdf5_file_path: - # keep a note of where the h5 file was stored for this unfinished otdb_id - self._unfinished_otdb_id_map[otdb_id] = hdf5_file_path + hdf5_file_path = self._convert_ms2hdf5(otdb_id=otdb_id, subtask_id=subtask_id) - # cluster it - self._cluster_h5_file(hdf5_file_path, otdb_id) + if hdf5_file_path: + # cluster it + self._cluster_h5_file(hdf5_file_path, otdb_id=otdb_id, subtask_id=subtask_id) - self._copy_hdf5_to_nfs_dir(hdf5_file_path) + self._copy_hdf5_to_nfs_dir(hdf5_file_path) - plot_dir_path = self._create_plots_for_h5_file(hdf5_file_path, otdb_id) - plot_dir_path = self._move_plots_to_nfs_dir(plot_dir_path) + if subtask_id: + self.tmsssession.set_subtask_status(subtask_id, 'finishing') + self.tmsssession.set_subtask_status(subtask_id, 'finished') - # and notify that we're finished - self._send_event_message('Finished', {'otdb_id': otdb_id, - 'hdf5_file_path': hdf5_file_path, - 'plot_dir_path': plot_dir_path or ''}) + return hdf5_file_path - def _send_event_message(self, subject_suffix, content): - try: - subject = '%s.%s' % (DEFAULT_QA_NOTIFICATION_SUBJECT_PREFIX, subject_suffix) - msg = EventMessage(subject=subject, content=content) - logger.info('sending event message %s: %s', subject, content) - self.send(msg) - except Exception as e: - logger.error('Could not send event message: %s', e) + if subtask_id: + self.tmsssession.set_subtask_status(subtask_id, 'error') - def _convert_ms2hdf5(self, otdb_id): - ''' - convert the MS for the given otdb_id to an h5 MS-extract file. - The conversion will run via ssh on cep4 with massive parellelization. - When running on cep4, it is assumed that a docker image called adder exists on head.cep4 - When running locally, it is assumed that ms2hdf5 is installed locally. - :param int otdb_id: observation/pipeline otdb id for which the conversion needs to be done. - :return string: path to the generated h5 file. - ''' - try: - # define default h5 filename use default cep4 qa output dir - h5_filename = 'L%s.MS_extract.h5' % otdb_id - h5_dir_path = os.path.join(QA_LUSTRE_BASE_DIR, 'ms_extract') - hdf5_path = os.path.join(h5_dir_path, h5_filename) + return None - cmd = ['ls', hdf5_path] - cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + def do_qaplots(self, otdb_id=None, subtask_id=None): + if subtask_id: + self.tmsssession.set_subtask_status(subtask_id, 'starting') + self.tmsssession.set_subtask_status(subtask_id, 'started') - if call(cmd) == 0: - logger.info('uv dataset with otdb_id %s was already converted to hdf5 file %s', otdb_id, hdf5_path) - return hdf5_path + hdf5_path = self.h5_lustre_filepath(otdb_id or subtask_id) + plot_dir_path = self._create_plots_for_h5_file(hdf5_path, otdb_id, subtask_id) - logger.info('trying to convert MS uv dataset with otdb_id %s if any', otdb_id) - cmd = ['ms2hdf5', '-o', str(otdb_id), '--cep4', '-p', '-20', '-t', '256'] - cmd += ['--output_dir', h5_dir_path] - cmd += ['--output_filename', h5_filename] + if plot_dir_path: + if subtask_id: + self.tmsssession.set_subtask_status(subtask_id, 'finishing') - # wrap the command in a cep4 docker ssh call - cmd = wrap_command_for_docker(cmd, 'adder', 'latest') - cmd = wrap_command_in_cep4_available_node_with_lowest_load_ssh_call(cmd, partition=SLURM_CPU_PARTITION, via_head=True) + plot_dir_path = self._move_plots_to_nfs_dir(plot_dir_path) - logger.info('starting ms2hdf5, executing: %s', ' '.join(cmd)) + if subtask_id: + self.tmsssession.set_subtask_status(subtask_id, 'finished') - if call(cmd) == 0: - logger.info('converted uv dataset with otdb_id %s to hdf5 file %s', otdb_id, hdf5_path) - self._send_event_message('ConvertedMS2Hdf5', {'otdb_id': otdb_id, 'hdf5_file_path': hdf5_path}) - return hdf5_path - else: - msg = 'could not convert dataset with otdb_id %s' % otdb_id - logger.error(msg) - self._send_event_message('Error', {'otdb_id': otdb_id, 'message': msg}) + # and notify that we're finished + self._send_event_message('Finished', {'otdb_id': otdb_id, + 'subtask_id': subtask_id, + 'hdf5_file_path': hdf5_path, + 'plot_dir_path': plot_dir_path or ''}) + else: + if subtask_id: + self.tmsssession.set_subtask_status(subtask_id, 'error') + + def finalize_qa(self, otdb_id=None, subtask_id=None): + ''' + this mehod is called automatically upon receiving a Finished NotificationMessage + :param int otdb_id: the task's otdb database id + :param datetime modificationTime: timestamp when the task's status changed to finished + :return: None + ''' + logger.info("task with otdb_id %s finished. trying to add parset (with feedback) to h5 file", otdb_id) + + # lookup the hdf5_file_path for the given otdb_id + # and (re)add the parset to the file (which now includes feedback) + hdf5_file_path = self._unfinished_otdb_id_map.get(otdb_id) + if hdf5_file_path: + del self._unfinished_otdb_id_map[otdb_id] - except Exception as e: - logging.exception('error in _convert_ms2hdf5: %s', e) - self._send_event_message('Error', {'otdb_id': otdb_id, 'message': str(e)}) - return None - - def _create_plots_for_h5_file(self, hdf5_path, otdb_id=None): - ''' - create plots for the given h5 file. The plots are created via an ssh call to cep4 - where the plots are created in parallel in the docker image. - :param hdf5_path: the full path to the hdf5 file for which we want the plots. - :param otdb_id: the otdb_id of the converted observation/pipeline (is used for logging only) - :return: the full directory path to the directory containing the created plots. - ''' try: - #use default cep4 qa output dir. - plot_dir_path = os.path.join(QA_LUSTRE_BASE_DIR, 'plots') - task_plot_dir_path = '' - all_plots_succeeded = True - - for plot_options in [['-1', '-acb'], # 'hot' autocor/crosscor, per baseline scaling with distinct polarization scales, in dB - ['-1', '-acg'], # 'complex' autocor/crosscor, all baseline scaling with same polarization scales, in dB - ['-1', '-acn', '--raw'], # normalized 'hot' autocor/crosscor, raw - ['-4']]: # delay-rate - cmd = ['plot_hdf5_dynamic_spectra', '-o %s' % (plot_dir_path,), '--force', '--cep4'] + plot_options + [hdf5_path] - - # wrap the command in a cep4 ssh call to docker container - cmd = wrap_command_for_docker(cmd, 'adder', 'latest') - cmd = wrap_command_in_cep4_available_node_with_lowest_load_ssh_call(cmd, partition=SLURM_CPU_PARTITION, via_head=True) - - logger.info('generating plots for otdb_id %s, executing: %s', otdb_id, ' '.join(cmd)) - - if call(cmd) == 0: - task_plot_dir_path = os.path.join(plot_dir_path, 'L%s' % otdb_id) - logger.info('generated plots for otdb_id %s in %s with command=%s', otdb_id, - task_plot_dir_path, - ' '.join(cmd)) - else: - all_plots_succeeded &= False - msg = 'could not generate plots for otdb_id %s cmd=%s' % (otdb_id, ' '.join(cmd)) - logger.error(msg) - self._send_event_message('Error', {'otdb_id': otdb_id, - 'message': msg}) - - - self._send_event_message('CreatedInspectionPlots', {'otdb_id': otdb_id, - 'hdf5_file_path': hdf5_path, - 'plot_dir_path': task_plot_dir_path}) - return task_plot_dir_path + cmd = ['add_parset_to_hdf5', hdf5_file_path] + cmd = wrap_command_for_docker(cmd, 'adder', 'latest') + cmd = wrap_command_in_cep4_random_node_ssh_call(cmd, partition=SLURM_CPU_PARTITION, via_head=True) + + logger.info(' '.join(cmd)) + + self._copy_hdf5_to_nfs_dir(hdf5_file_path) except Exception as e: - logging.exception('error in _create_plots_for_h5_file: %s', e) - self._send_event_message('Error', {'otdb_id': otdb_id, 'message': str(e)}) - return None - - def _convert_bf2hdf5(self, otdb_id): - ''' - convert the beamformed h5 dataset for the given otdb_id to an h5 MS-extract file. - When running on cep4, it is assumed that a docker image called adder exists on head.cep4 - When running locally, it is assumed that ms2hdf5 is installed locally. - :param int otdb_id: observation/pipeline otdb id for which the conversion needs to be done. - :return string: path to the generated h5 file. - ''' - try: - # define default h5 filename use default cep4 qa output dir - h5_filename = 'L%s.MS_extract.h5' % otdb_id - h5_dir_path = os.path.join(QA_LUSTRE_BASE_DIR, 'ms_extract') - hdf5_path = os.path.join(h5_dir_path, h5_filename) + logger.warning("Cannot add parset with feedback for otdb=%s. error: %s", otdb_id, e) + else: + logger.info("Could not find the h5 file for task with otdb_id %s to add the parset to.", otdb_id) + + def _send_event_message(self, subject_suffix, content): + try: + subject = '%s.%s' % (DEFAULT_QA_NOTIFICATION_SUBJECT_PREFIX, subject_suffix) + msg = EventMessage(subject=subject, content=content) + logger.info('sending event message %s: %s', subject, content) + self.tobus.send(msg) + except Exception as e: + logger.error('Could not send event message: %s', e) + + def _convert_ms2hdf5(self, otdb_id=None, subtask_id=None): + ''' + convert the MS for the given otdb_id to an h5 MS-extract file. + The conversion will run via ssh on cep4 with massive parellelization. + When running on cep4, it is assumed that a docker image called adder exists on head.cep4 + When running locally, it is assumed that ms2hdf5 is installed locally. + :param int otdb_id: observation/pipeline otdb id for which the conversion needs to be done. + :return string: path to the generated h5 file. + ''' + try: + obs_id = otdb_id or subtask_id - cmd = ['ls', hdf5_path] - cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + # define default h5 filename use default cep4 qa output dir + hdf5_path = self.h5_lustre_filepath(obs_id) + h5_dir_path, h5_filename = os.path.split(hdf5_path) + + nr_of_timestamps = -1 + nr_of_subbands = -1 - if call(cmd, stdout=None, stderr=None) == 0: - logger.info('bf dataset with otdb_id %s was already converted to hdf5 file %s', otdb_id, hdf5_path) - return hdf5_path + if subtask_id: + subtask = self.tmsssession.get_subtask(subtask_id=subtask_id) + nr_of_timestamps = subtask['specifications_doc'].get('nr_of_timestamps', -1) + nr_of_subbands = subtask['specifications_doc'].get('nr_of_subbands', -1) - logger.info('trying to convert beamformed dataset with otdb_id %s if any', otdb_id) + logger.info('trying to convert MS uv dataset with otdb_id=%s subtask_id=%s if any', otdb_id, subtask_id) + cmd = ['ms2hdf5', '-o', str(obs_id), '--cep4', '-p', '-22', '-t', str(nr_of_timestamps), '-s', str(nr_of_subbands), ] + cmd += ['--output_dir', h5_dir_path] + cmd += ['--output_filename', h5_filename] - cmd = ['bf2hdf5', '-o', str(otdb_id)] - cmd += ['--output_dir', h5_dir_path] - cmd += ['--output_filename', h5_filename] + # wrap the command in a cep4 docker ssh call + cmd = wrap_command_for_docker(cmd, 'adder', 'latest') + cmd = wrap_command_in_cep4_available_node_with_lowest_load_ssh_call(cmd, partition=SLURM_CPU_PARTITION, via_head=True) - # wrap the command in a cep4 docker ssh call + logger.info('starting ms2hdf5, executing: %s', ' '.join(cmd)) + + if call(cmd) == 0: + logger.info('converted uv dataset with otdb_id=%s subtask_id=%s to hdf5 file %s', otdb_id, subtask_id, hdf5_path) + self._send_event_message('ConvertedMS2Hdf5', {'otdb_id': otdb_id, 'subtask_id': subtask_id, 'hdf5_file_path': hdf5_path}) + return hdf5_path + else: + msg = 'could not convert dataset with otdb_id=%s subtask_id=%s' % (otdb_id, subtask_id) + logger.error(msg) + self._send_event_message('Error', {'otdb_id': otdb_id, 'subtask_id': subtask_id, 'message': msg}) + + except Exception as e: + logging.exception('error in _convert_ms2hdf5: %s', e) + self._send_event_message('Error', {'otdb_id': otdb_id, 'subtask_id': subtask_id, 'message': str(e)}) + return None + + def _create_plots_for_h5_file(self, hdf5_path, otdb_id=None, subtask_id=None): + ''' + create plots for the given h5 file. The plots are created via an ssh call to cep4 + where the plots are created in parallel in the docker image. + :param hdf5_path: the full path to the hdf5 file for which we want the plots. + :param otdb_id: the otdb_id of the converted observation/pipeline (is used for logging only) + :return: the full directory path to the directory containing the created plots. + ''' + try: + #use default cep4 qa output dir. + task_plot_dir_path = QAService.plots_lustre_dirpath(otdb_id or subtask_id) + base_plot_dir_path = os.path.dirname(task_plot_dir_path) + + if subtask_id: + subtask = self.tmsssession.get_subtask(subtask_id=subtask_id) + #TODO: use settings from subtask to tweak plot_hdf5_dynamic_spectra options + + for plot_options in [['-1', '-acb'], # 'hot' autocor/crosscor, per baseline scaling with distinct polarization scales, in dB + ['-1', '-acg'], # 'complex' autocor/crosscor, all baseline scaling with same polarization scales, in dB + ['-1', '-acn', '--raw'], # normalized 'hot' autocor/crosscor, raw + ['-4']]: # delay-rate + cmd = ['plot_hdf5_dynamic_spectra', '-o %s' % (base_plot_dir_path,), '--force', '--cep4'] + plot_options + [hdf5_path] + + # wrap the command in a cep4 ssh call to docker container cmd = wrap_command_for_docker(cmd, 'adder', 'latest') cmd = wrap_command_in_cep4_available_node_with_lowest_load_ssh_call(cmd, partition=SLURM_CPU_PARTITION, via_head=True) - logger.info('starting bf2hdf5, executing: %s', ' '.join(cmd)) + logger.info('generating plots for otdb_id=%s subtask_id=%s, executing: %s',otdb_id, subtask_id, ' '.join(cmd)) if call(cmd) == 0: - hdf5_path = os.path.join(h5_dir_path, h5_filename) - logger.info('converted bf dataset with otdb_id %s to hdf5 file %s', otdb_id, hdf5_path) - self._send_event_message('ConvertedBF2Hdf5', {'otdb_id': otdb_id, 'hdf5_file_path': hdf5_path}) - return hdf5_path + logger.info('generated plots for otdb_id=%s subtask_id=%s in %s with command=%s', otdb_id, subtask_id, + task_plot_dir_path, + ' '.join(cmd)) else: - msg = 'could not convert dataset with otdb_id %s' % otdb_id + msg = 'could not generate plots for otdb_id=%s subtask_id=%s cmd=%s' % (otdb_id, subtask_id, ' '.join(cmd)) logger.error(msg) - self._send_event_message('Error', {'otdb_id': otdb_id, 'message': msg}) + self._send_event_message('Error', {'otdb_id': otdb_id, 'subtask_id': subtask_id, + 'message': msg}) + return None + + self._send_event_message('CreatedInspectionPlots', {'otdb_id': otdb_id, 'subtask_id': subtask_id, + 'hdf5_file_path': hdf5_path, + 'plot_dir_path': task_plot_dir_path}) + return task_plot_dir_path + except Exception as e: + logging.exception('error in _create_plots_for_h5_file: %s', e) + self._send_event_message('Error', {'otdb_id': otdb_id, 'subtask_id': subtask_id, 'message': str(e)}) + return None + + def _convert_bf2hdf5(self, otdb_id=None, subtask_id=None): + ''' + convert the beamformed h5 dataset for the given otdb_id to an h5 MS-extract file. + When running on cep4, it is assumed that a docker image called adder exists on head.cep4 + When running locally, it is assumed that ms2hdf5 is installed locally. + :param int otdb_id: observation/pipeline otdb id for which the conversion needs to be done. + :return string: path to the generated h5 file. + ''' + try: + # define default h5 filename use default cep4 qa output dir + hdf5_path = self.h5_lustre_filepath(otdb_id or subtask_id) + h5_dir_path, h5_filename = os.path.split(hdf5_path) - except Exception as e: - logging.exception('error in _convert_ms2hdf5: %s', e) - self._send_event_message('Error', {'otdb_id': otdb_id, 'message': str(e)}) - return None + cmd = ['ls', hdf5_path] + cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) - def _copy_hdf5_to_nfs_dir(self, hdf5_path): - try: - hdf5_filename = os.path.basename(hdf5_path) - hdf5_nfs_path = os.path.join(QA_NFS_BASE_DIR, 'h5', hdf5_filename) - cmd = ['cp', hdf5_path, hdf5_nfs_path] - cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + if call(cmd, stdout=None, stderr=None) == 0: + logger.info('bf dataset with otdb_id=%s subtask_id=%s was already converted to hdf5 file %s', otdb_id, subtask_id, hdf5_path) + return hdf5_path - logger.debug('copying h5 file to nfs dir: %s', ' '.join(cmd)) - if call(cmd) == 0: - logger.info('copied h5 file to nfs dir: %s -> %s', hdf5_path, hdf5_nfs_path) - return hdf5_nfs_path - except Exception as e: - logging.exception('error in _copy_hdf5_to_nfs_dir: %s', e) + logger.info('trying to convert beamformed dataset with otdb_id=%s subtask_id=%s if any', otdb_id, subtask_id) - def _move_plots_to_nfs_dir(self, plot_dir_path): - try: - plot_dir_name = os.path.basename(plot_dir_path) - plot_nfs_path = os.path.join(QA_NFS_BASE_DIR, 'plots', plot_dir_name) - cmd = ['cp', '-rf', plot_dir_path, plot_nfs_path] - cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + cmd = ['bf2hdf5', '-o', str(otdb_id)] + cmd += ['--output_dir', h5_dir_path] + cmd += ['--output_filename', h5_filename] - logger.info('copying plots: %s', ' '.join(cmd)) - if call(cmd) == 0: - logger.info('copied plots from %s to nfs dir: %s', plot_dir_path, plot_nfs_path) + # wrap the command in a cep4 docker ssh call + cmd = wrap_command_for_docker(cmd, 'adder', 'latest') + cmd = wrap_command_in_cep4_available_node_with_lowest_load_ssh_call(cmd, partition=SLURM_CPU_PARTITION, via_head=True) - cmd = ['rm', '-rf', plot_dir_path] - cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + logger.info('starting bf2hdf5, executing: %s', ' '.join(cmd)) - logger.debug('removing plots: %s', ' '.join(cmd)) - if call(cmd) == 0: - logger.info('removed plots from %s after they were copied to nfs dir %s', plot_dir_path, plot_nfs_path) - return plot_nfs_path - except Exception as e: - logging.exception('error in _copy_hdf5_to_nfs_dir: %s', e) - - def _cluster_h5_file(self, hdf5_path, otdb_id=None): - ''' - Try to cluster the baselines based on visibilities in the h5 file - using the clustering docker image developed by e-science. - This method assumes the adder_clustering docker image is available on cep4. If not, or if anything else - goes wrong, then the qa steps can just continue on the un-clustered h5 file. - The docker image can be build from the source on github: - https://github.com/NLeSC/lofar-predictive-maintenance - This is a private repo until the project has been published. At astron, jorrit has access. - In the future, we might incorporate the clustering code from the github repo in to the LOFAR source tree. - :param hdf5_path: the full path to the hdf5 file for which we want the plots. - :param otdb_id: the otdb_id of the converted observation/pipeline (is used for logging only) - :return: None - ''' - try: - cmd = ['show_hdf5_info', hdf5_path, '|', 'grep', 'clusters'] - cmd = wrap_command_for_docker(cmd, 'adder', 'latest') + if call(cmd) == 0: + hdf5_path = os.path.join(h5_dir_path, h5_filename) + logger.info('converted bf dataset with otdb_id=%s subtask_id=%s to hdf5 file %s', otdb_id, subtask_id, hdf5_path) + self._send_event_message('ConvertedBF2Hdf5', {'otdb_id': otdb_id, 'subtask_id': subtask_id, 'hdf5_file_path': hdf5_path}) + return hdf5_path + else: + msg = 'could not convert dataset with otdb_id=%s subtask_id=%s' % (otdb_id, subtask_id) + logger.error(msg) + self._send_event_message('Error', {'otdb_id': otdb_id, 'subtask_id': subtask_id, 'message': msg}) + + except Exception as e: + logging.exception('error in _convert_ms2hdf5: %s', e) + self._send_event_message('Error', {'otdb_id': otdb_id, 'subtask_id': subtask_id, 'message': str(e)}) + return None + + def _copy_hdf5_to_nfs_dir(self, h5_path): + try: + h5_org_dir_path, h5_org_filename = os.path.split(h5_path) + h5_nfs_path = os.path.join(QAService.QA_NFS_BASE_DIR, 'h5', h5_org_filename) + h5_nfs_dir, h5_nfs_filename = os.path.split(h5_nfs_path) + + # create nfs dir if needed + cmd = ['mkdir', '-p', h5_nfs_dir] + cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + logger.info('create nfs dir if needed: %s', ' '.join(cmd)) + call(cmd) + + cmd = ['cp', '-f', h5_path, h5_nfs_path] + cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + + logger.info('copying h5 file to nfs dir: %s', ' '.join(cmd)) + if call(cmd) == 0: + logger.info('copied h5 file to nfs dir: %s -> %s', h5_path, h5_nfs_path) + return h5_nfs_path + except Exception as e: + logging.exception('error in _copy_hdf5_to_nfs_dir: %s', e) + + def _move_plots_to_nfs_dir(self, plot_dir_path): + try: + plot_dir_name = os.path.basename(plot_dir_path) + plot_nfs_base_path = os.path.join(QAService.QA_NFS_BASE_DIR, 'plots') + + # create nfs dir if needed + cmd = ['mkdir', '-p', plot_nfs_base_path] + cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + logger.info('create nfs dir if needed: %s', ' '.join(cmd)) + call(cmd) + + plot_nfs_path = os.path.join(plot_nfs_base_path, plot_dir_name) + cmd = ['cp', '-rf', plot_dir_path, plot_nfs_path] + cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + + logger.info('copying plots: %s', ' '.join(cmd)) + if call(cmd) == 0: + logger.info('copied plots from %s to nfs dir: %s', plot_dir_path, plot_nfs_path) + + cmd = ['rm', '-rf', plot_dir_path] cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + logger.debug('removing plots: %s', ' '.join(cmd)) if call(cmd) == 0: - logger.info('hdf5 file %s otdb_id %s was already clustered', hdf5_path, otdb_id) - return + logger.info('removed plots from %s after they were copied to nfs dir %s', plot_dir_path, plot_nfs_path) + return plot_nfs_path - # the command to cluster the given h5 file (executed in the e-science adder docker image) - cmd = ['cluster_this.py', hdf5_path] - cmd = wrap_command_for_docker(cmd, 'adder_clustering', 'latest') - cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + # move failed, so plots are still in original dir + return plot_dir_path + except Exception as e: + logging.exception('error in _copy_hdf5_to_nfs_dir: %s', e) - logger.info('clustering hdf5 file %s otdb_id %s, executing: %s', hdf5_path, otdb_id, ' '.join(cmd)) + def _cluster_h5_file(self, hdf5_path, otdb_id=None, subtask_id=None): + ''' + Try to cluster the baselines based on visibilities in the h5 file + using the clustering docker image developed by e-science. + This method assumes the adder_clustering docker image is available on cep4. If not, or if anything else + goes wrong, then the qa steps can just continue on the un-clustered h5 file. + The docker image can be build from the source on github: + https://github.com/NLeSC/lofar-predictive-maintenance + This is a private repo until the project has been published. At astron, jorrit has access. + In the future, we might incorporate the clustering code from the github repo in to the LOFAR source tree. + :return: None + ''' + try: + cmd = ['show_hdf5_info', hdf5_path, '|', 'grep', 'clusters', '>&', '/dev/null'] + cmd = wrap_command_for_docker(cmd, 'adder', 'latest') + cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) - if call(cmd) == 0: - logger.info('clustered hdf5 file %s otdb_id %s', hdf5_path, otdb_id) + if call(cmd) == 0: + logger.info('skipping clustering of hdf5 file %s otdb_id=%s subtask_id=%s which was already clustered', hdf5_path, otdb_id, subtask_id) + return - self._send_event_message('Clustered', {'otdb_id': otdb_id, - 'hdf5_file_path': hdf5_path}) - else: - msg = 'could not cluster hdf5 file %s otdb_id %s' % (hdf5_path, otdb_id) - logger.error(msg) - self._send_event_message('Error', {'otdb_id': otdb_id, 'message': msg}) - except Exception as e: - logging.exception('error in _cluster_h5_file: %s', e) - self._send_event_message('Error', {'otdb_id': otdb_id, 'message': str(e)}) + # the command to cluster the given h5 file (executed in the e-science adder docker image) + cmd = ['cluster_this.py', hdf5_path] + cmd = wrap_command_for_docker(cmd, 'adder_clustering', 'latest') + cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) - def __init__(self, exchange: str = DEFAULT_BUSNAME, broker: str = DEFAULT_BROKER): - super().__init__(handler_type=QAFilteredOTDBBusListener.QAFilteredOTDBEventMessageHandler, - handler_kwargs={}, - exchange=exchange, - routing_key="%s.#" % (DEFAULT_FILTERED_OTDB_NOTIFICATION_SUBJECT,), - num_threads=1, - broker=broker) + logger.info('clustering hdf5 file %s otdb_id=%s subtask_id=%s, executing: %s', hdf5_path, otdb_id, subtask_id, ' '.join(cmd)) -class QAService: - def __init__(self, exchange: str=DEFAULT_BUSNAME, broker: str=DEFAULT_BROKER): - """ - :param exchange: valid message exchange address - :param broker: valid broker host (default: None, which means localhost) - """ - self.filtering_buslistener = QAFilteringOTDBBusListener(exchange = exchange, broker = broker) - self.filtered_buslistener = QAFilteredOTDBBusListener(exchange = exchange, broker = broker) + if call(cmd) == 0: + logger.info('clustered hdf5 file %s otdb_id=%s subtask_id=%s', hdf5_path, otdb_id, subtask_id) - def __enter__(self): - self.filtering_buslistener.start_listening() - self.filtered_buslistener.start_listening() - return self + self._send_event_message('Clustered', {'otdb_id': otdb_id, 'subtask_id': subtask_id, + 'hdf5_file_path': hdf5_path}) + else: + msg = 'could not cluster hdf5 file %s otdb_id=%s subtask_id=%s' % (hdf5_path, otdb_id, subtask_id) + logger.error(msg) + self._send_event_message('Error', {'otdb_id': otdb_id, 'subtask_id': subtask_id, 'message': msg}) + except Exception as e: + logging.exception('error in _cluster_h5_file: %s', e) + self._send_event_message('Error', {'otdb_id': otdb_id, 'subtask_id': subtask_id, 'message': str(e)}) - def __exit__(self, exc_type, exc_val, exc_tb): - self.filtering_buslistener.stop_listening() - self.filtered_buslistener.stop_listening() def main(): ''' @@ -437,11 +543,11 @@ def main(): description='run the qa_service which listens for observations/pipelines finished events on ' 'the bus and then starts the QA (Quality Assurance) processes to convert MS to ' 'hdf5 files and generate inspection plots.') - group = OptionGroup(parser, 'QPid Messaging options') - group.add_option('-b', '--broker', dest='broker', type='string', default='localhost', help='Address of the qpid broker, default: %default') + group = OptionGroup(parser, 'Messaging options') + group.add_option('-b', '--broker', dest='broker', type='string', default='localhost', help='Address of the message broker, default: %default') group.add_option('-e', "--exchange", dest="exchange", type="string", default=DEFAULT_BUSNAME, - help="Bus or queue where the OTDB notifications are published. [default: %default]") + help="Bus or queue where the QA notifications are published. [default: %default]") parser.add_option_group(group) (options, args) = parser.parse_args() diff --git a/QA/QA_Service/test/t_qa_service.py b/QA/QA_Service/test/t_qa_service.py index a9a5b401b6e38dbfc3ee790882ddd964ea972107..3318d0cf092869223bd4382b34009c27a5082bee 100755 --- a/QA/QA_Service/test/t_qa_service.py +++ b/QA/QA_Service/test/t_qa_service.py @@ -26,6 +26,10 @@ import os from datetime import datetime import logging + +from lofar.sas.tmss.client.tmssbuslistener import DEFAULT_TMSS_SUBTASK_NOTIFICATION_PREFIX +from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment + logger = logging.getLogger(__name__) from lofar.qa.service.qa_service import QAService @@ -35,6 +39,7 @@ from lofar.messaging.messagebus import TemporaryExchange, BusListenerJanitor from lofar.messaging.messages import EventMessage from lofar.sas.otdb.config import DEFAULT_OTDB_NOTIFICATION_SUBJECT from lofar.common.test_utils import unit_test, integration_test +from lofar.common.json_utils import add_defaults_to_json_object_for_schema # the tests below test is multi threaded (even multi process) # define a SynchronizationQABusListener-derivative to handle synchronization (set the *_events) @@ -85,23 +90,33 @@ class TestQAService(unittest.TestCase): ''' Tests for the QAService class ''' + @classmethod + def setUpClass(cls) -> None: + cls.TEST_UUID = uuid.uuid1() + cls.TEST_OTDB_ID = 999999 + + cls.tmp_exchange = TemporaryExchange("%s_%s" % (cls.__name__, cls.TEST_UUID)) + cls.tmp_exchange.open() + + cls.tmss_test_env = TMSSTestEnvironment(exchange=cls.tmp_exchange.address) + cls.tmss_test_env.start() + + @classmethod + def tearDownClass(cls) -> None: + cls.tmss_test_env.stop() + cls.tmp_exchange.close() + + def setUp(self): ''' quite complicated setup to setup test message-exchanges/queues and mock away ssh calls to cep4 and mock away dockerized commands ''' - self.TEST_UUID = uuid.uuid1() - self.TEST_OTDB_ID = 999999 - - self.tmp_exchange = TemporaryExchange("%s_%s" % (__class__.__name__, self.TEST_UUID)) - self.tmp_exchange.open() - self.addCleanup(self.tmp_exchange.close) - # where to store the test results - self.TEST_DIR = '/tmp/qa_service_%s' % self.TEST_UUID - self.TEST_H5_FILE = 'L%s.MS_extract.h5' % (self.TEST_OTDB_ID,) - self.TEST_H5_PATH = os.path.join(self.TEST_DIR, 'ms_extract', self.TEST_H5_FILE) + self.TEST_DIR = '/tmp/test_qa_service_%s' % self.TEST_UUID + QAService.QA_LUSTRE_BASE_DIR = os.path.join(self.TEST_DIR, 'lustre') + QAService.QA_NFS_BASE_DIR = os.path.join(self.TEST_DIR, 'nfs') # mock the calls to ssh cep4 and docker def mocked_wrap_command_for_docker(cmd, image_name=None, image_label=None): @@ -109,12 +124,12 @@ class TestQAService(unittest.TestCase): return cmd def mocked_wrap_command_in_cep4_head_node_ssh_call(cmd): - logger.info('mocked_wrap_command_in_cep4_head_node_ssh_call returning original command: %s', ' '.join(cmd)) + logger.info('mocked_wrap_command_in_cep4_head_node_ssh_call returning original command (without ssh): %s', ' '.join(cmd)) return cmd def mocked_wrap_command_in_cep4_node_ssh_call(cmd, cpu_node_nr, partition, via_head): logger.info('mocked_wrap_command_in_cep4_node_ssh_call for %s node nr %s via head=%s ' \ - 'returning original command: %s', partition, cpu_node_nr, via_head, ' '.join(cmd)) + 'returning original command (without ssh): %s', partition, cpu_node_nr, via_head, ' '.join(cmd)) return cmd def mocked_get_cep4_available_nodes(partition): @@ -201,10 +216,9 @@ class TestQAService(unittest.TestCase): # by a call to the create_test_hypercube which fakes the ms2hdf5 conversion for this test. if 'ms2hdf5' in cmd: # the create_test_hypercube executable should be available in the PATH environment - create_test_hypercube_path = 'create_test_hypercube' - - mocked_cmd = [create_test_hypercube_path, '-s 4', '-S 8', '-t 16', - '-o', str(self.TEST_OTDB_ID), self.TEST_H5_PATH] + hdf5_path = QAService.h5_lustre_filepath(self.TEST_OTDB_ID) + mocked_cmd = ['create_test_hypercube', '-s 4', '-S 8', '-t 16', + '-o', str(self.TEST_OTDB_ID), hdf5_path] logger.info('''mocked_wrap_command_for_docker returning mocked command to create test h5 file: '%s', instead of original command: '%s' ''', ' '.join(mocked_cmd), ' '.join(cmd)) return mocked_cmd @@ -233,7 +247,7 @@ class TestQAService(unittest.TestCase): # start the QAService (the object under test) qaservice = QAService(exchange=self.tmp_exchange.address) - with qaservice, BusListenerJanitor(qaservice.filtering_buslistener), BusListenerJanitor(qaservice.filtered_buslistener): + with qaservice, BusListenerJanitor(qaservice.filtering_otdbbuslistener), BusListenerJanitor(qaservice.filtering_tmssbuslistener), BusListenerJanitor(qaservice.commands_buslistener): # start listening for QA event messages from the QAService with BusListenerJanitor(SynchronizationQABusListener(exchange=self.tmp_exchange.address)) as qa_listener: @@ -335,7 +349,7 @@ class TestQAService(unittest.TestCase): # start the QAService (the object under test) qaservice = QAService(exchange=self.tmp_exchange.address) - with qaservice, BusListenerJanitor(qaservice.filtering_buslistener), BusListenerJanitor(qaservice.filtered_buslistener): + with qaservice, BusListenerJanitor(qaservice.filtering_otdbbuslistener), BusListenerJanitor(qaservice.filtering_tmssbuslistener), BusListenerJanitor(qaservice.commands_buslistener): # start listening for QA event messages from the QAService with BusListenerJanitor(SynchronizationQABusListener(exchange=self.tmp_exchange.address)) as qa_listener: @@ -376,11 +390,11 @@ class TestQAService(unittest.TestCase): # replace the ms2hdf5 command which runs normally in the docker container # by a call to the create_test_hypercube which fakes the ms2hdf5 conversion for this test. # the create_test_hypercube executable should be available in the PATH environment - create_test_hypercube_path = 'create_test_hypercube' - mocked_cmd = [create_test_hypercube_path, '-s 4', '-S 8', '-t 16', - '-o', str(self.TEST_OTDB_ID), self.TEST_H5_PATH] - logger.info('mocked_wrap_command_for_docker returning mocked command to create test h5 file: %s', - ' '.join(mocked_cmd)) + hdf5_path = QAService.h5_lustre_filepath(self.TEST_OTDB_ID) + mocked_cmd = ['create_test_hypercube', '-s 4', '-S 8', '-t 16', + '-o', str(self.TEST_OTDB_ID), hdf5_path] + logger.info('''mocked_wrap_command_for_docker returning mocked command to create test h5 file: '%s', instead of original command: '%s' ''', + ' '.join(mocked_cmd), ' '.join(cmd)) return mocked_cmd if 'cluster_this.py' in cmd: @@ -405,7 +419,7 @@ class TestQAService(unittest.TestCase): # start the QAService (the object under test) qaservice = QAService(exchange=self.tmp_exchange.address) - with qaservice, BusListenerJanitor(qaservice.filtering_buslistener), BusListenerJanitor(qaservice.filtered_buslistener): + with qaservice, BusListenerJanitor(qaservice.filtering_otdbbuslistener), BusListenerJanitor(qaservice.filtering_tmssbuslistener), BusListenerJanitor(qaservice.commands_buslistener): # start listening for QA event messages from the QAService with BusListenerJanitor(SynchronizationQABusListener(exchange=self.tmp_exchange.address)) as qa_listener: @@ -452,13 +466,13 @@ class TestQAService(unittest.TestCase): def mocked_wrap_command_in_cep4_node_ssh_call(cmd, cpu_node_nr, partition, via_head): logger.info('mocked_wrap_command_in_cep4_node_ssh_call for cpu node nr %s via head=%s ' \ 'returning call to bash false', cpu_node_nr, via_head) - return ['false', ';'] + return ['false'] self.wrap_command_in_cep4_node_ssh_call_mock.side_effect = mocked_wrap_command_in_cep4_node_ssh_call # start the QAService (the object under test) qaservice = QAService(exchange=self.tmp_exchange.address) - with qaservice, BusListenerJanitor(qaservice.filtering_buslistener), BusListenerJanitor(qaservice.filtered_buslistener): + with qaservice, BusListenerJanitor(qaservice.filtering_otdbbuslistener), BusListenerJanitor(qaservice.filtering_tmssbuslistener), BusListenerJanitor(qaservice.commands_buslistener): # start listening for QA event messages from the QAService with BusListenerJanitor(SynchronizationQABusListener(exchange=self.tmp_exchange.address)) as qa_listener: @@ -482,6 +496,159 @@ class TestQAService(unittest.TestCase): self.ssh_cmd_list_mock1.assert_not_called() self.ssh_cmd_list_mock2.assert_not_called() + def test_05_qa_service_for_expected_behaviour_on_tmss_events(self): + ''' + This is a "duplicate" of test_01_qa_service_for_expected_behaviour, but then for TMSS. + This test starts a QAService, triggers a TMSS test observation finished event, + and tests if the generated h5 file and plots are as expected. + It is an end-to-end test which does not check the intermediate results. It is assumed that + the intermediate steps are tested in other tests/modules. + ''' + logger.info(' -- test_05_qa_service_for_expected_behaviour_on_tmss_events -- ') + + from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator + from lofar.sas.tmss.tmss.tmssapp.subtasks import create_observation_to_qafile_subtask, create_qafile_to_qaplots_subtask + + tmss_client = self.tmss_test_env.create_tmss_client() + with tmss_client: + # make sure we have the proper templates in the database + self.assertIsNotNone(tmss_client.get_subtask_template(name="QA file conversion")) + self.assertIsNotNone(tmss_client.get_subtask_template(name="QA plots")) + + # override the mock behaviour from setUp for this specific test + def mocked_wrap_command_for_docker(cmd, image_name=None, image_label=None): + # replace the ms2hdf5 command which runs normally in the docker container + # by a call to the create_test_hypercube which fakes the ms2hdf5 conversion for this test. + if 'ms2hdf5' in cmd: + # the create_test_hypercube executable should be available in the PATH environment + hdf5_path = os.path.join(cmd[cmd.index('--output_dir')+1], cmd[cmd.index('--output_filename')+1]) + mocked_cmd = ['create_test_hypercube', '-s 4', '-S 8', '-t 16', hdf5_path] + logger.info('''mocked_wrap_command_for_docker returning mocked command to create test h5 file: '%s', instead of original command: '%s' ''', + ' '.join(mocked_cmd), ' '.join(cmd)) + return mocked_cmd + + if 'cluster_this.py' in cmd: + # replace the cluster command which runs normally in the docker container + # by a call to bash true, so the 'cluster_this' call returns 0 exit code + mocked_cmd = ['true'] + logger.info('''mocked_wrap_command_for_docker returning mocked command: '%s', instead of original command: '%s' ''', + ' '.join(mocked_cmd), ' '.join(cmd)) + return mocked_cmd + + #TODO: merge adder branch into trunk so we can use plot_hdf5_dynamic_spectra on the test-h5 file to create plots + if 'plot_hdf5_dynamic_spectra' in cmd: + # replace the plot_hdf5_dynamic_spectra command which runs normally in the docker container + # by a call to bash true, so the 'plot_hdf5_dynamic_spectra' call returns 0 exit code + mocked_cmd = ['true'] + logger.info('''mocked_wrap_command_for_docker returning mocked command: '%s', instead of original command: '%s' ''', + ' '.join(mocked_cmd), ' '.join(cmd)) + return mocked_cmd + + logger.info('''mocked_wrap_command_for_docker returning original command: '%s' ''', ' '.join(cmd)) + return cmd + + self.wrap_command_for_docker_mock.side_effect = mocked_wrap_command_for_docker + + # start the QAService (the object under test) + qaservice = QAService(exchange=self.tmp_exchange.address) + + with qaservice, tmss_client, BusListenerJanitor(qaservice.filtering_otdbbuslistener), BusListenerJanitor(qaservice.filtering_tmssbuslistener), BusListenerJanitor(qaservice.commands_buslistener): + # start listening for QA event messages from the QAService + with BusListenerJanitor(SynchronizationQABusListener(exchange=self.tmp_exchange.address)) as qa_listener: + tdc = TMSSRESTTestDataCreator(django_api_url=self.tmss_test_env.django_server.url, + auth=(self.tmss_test_env.client_credentials.dbcreds.user, + self.tmss_test_env.client_credentials.dbcreds.password)) + + qafile_subtask_template = tmss_client.get_subtask_template(name="QA file conversion") + qafile_subtask_spec_doc = add_defaults_to_json_object_for_schema({}, qafile_subtask_template['schema']) + + subtask_url = tdc.post_data_and_get_url(tdc.Subtask(specifications_template_url=qafile_subtask_template['url'], + specifications_doc=qafile_subtask_spec_doc), + '/subtask/') + subtask_id = subtask_url.split('/')[-2] + + qaplots_subtask_template = tmss_client.get_subtask_template(name="QA plots") + qaplots_subtask_spec_doc = add_defaults_to_json_object_for_schema({}, qaplots_subtask_template['schema']) + + subtask_url2 = tdc.post_data_and_get_url(tdc.Subtask(specifications_template_url=qaplots_subtask_template['url'], + specifications_doc=qaplots_subtask_spec_doc), '/subtask/') + subtask_id2 = subtask_url2.split('/')[-2] + + # trigger a qa process by setting the tmss subtask to scheduled + # this will result in the QAService actually doing its magic + tmss_client.set_subtask_status(subtask_id, 'scheduled') + + # start waiting until ConvertedMS2Hdf5 event message received (or timeout) + qa_listener.converted_event.wait(30) + + # ConvertedMS2Hdf5 event message should have been sent, so converted_event should have been set + self.assertTrue(qa_listener.converted_event.is_set()) + + # check the converted_msg_content + self.assertTrue('subtask_id' in qa_listener.converted_msg_content) + self.assertTrue('hdf5_file_path' in qa_listener.converted_msg_content) + + + # start waiting until Clustered event message received (or timeout) + qa_listener.clustered_event.wait(30) + + + # Clustered event message should have been sent, so clustered_event should have been set + self.assertTrue(qa_listener.clustered_event.is_set()) + + # check the clustered_msg_content + self.assertTrue('subtask_id' in qa_listener.clustered_msg_content) + self.assertTrue('hdf5_file_path' in qa_listener.clustered_msg_content) + + tmss_client.set_subtask_status(subtask_id2, 'scheduled') + + # start waiting until CreatedInspectionPlots event message received (or timeout) + qa_listener.plotted_event.wait(30) + + # CreatedInspectionPlots event message should have been sent, so plotted_event should have been set + self.assertTrue(qa_listener.plotted_event.is_set()) + + # check the plotted_msg_content + self.assertTrue('otdb_id' in qa_listener.plotted_msg_content) + self.assertTrue('hdf5_file_path' in qa_listener.plotted_msg_content) + self.assertTrue('plot_dir_path' in qa_listener.plotted_msg_content) + + # TODO: merge adder branch into trunk so we can use plot_hdf5_dynamic_spectra on the test-h5 file to create plots, then re-enable the checks on created plots + # # check if the output dirs/files exist + # self.assertTrue(os.path.exists(qa_listener.plotted_msg_content['hdf5_file_path'])) + # logger.info(qa_listener.plotted_msg_content['plot_dir_path']) + # self.assertTrue(os.path.exists(qa_listener.plotted_msg_content['plot_dir_path'])) + # plot_file_names = [f for f in os.listdir(qa_listener.plotted_msg_content['plot_dir_path']) + # if f.endswith('png')] + # self.assertEqual(10, len(plot_file_names)) + # + # auto_correlation_plot_file_names = [f for f in plot_file_names + # if 'auto' in f] + # self.assertEqual(4, len(auto_correlation_plot_file_names)) + # + # complex_plot_file_names = [f for f in plot_file_names + # if 'complex' in f] + # self.assertEqual(6, len(complex_plot_file_names)) + + # start waiting until QAFinished event message received (or timeout) + qa_listener.finished_event.wait(30) + + # QAFinished event message should have been sent, so finished_event should have been set + self.assertTrue(qa_listener.finished_event.is_set()) + + # check the result_msg_content + self.assertTrue('otdb_id' in qa_listener.finished_msg_content) + self.assertTrue('hdf5_file_path' in qa_listener.finished_msg_content) + self.assertTrue('plot_dir_path' in qa_listener.finished_msg_content) + + self.wrap_command_for_docker_mock.assert_called() + self.wrap_command_in_cep4_node_ssh_call_mock.assert_called() + self.wrap_command_in_cep4_head_node_ssh_call_mock.assert_called() + self.get_cep4_available_cpu_nodes_mock.assert_called() + self.ssh_cmd_list_mock1.assert_not_called() + self.ssh_cmd_list_mock2.assert_not_called() + + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) diff --git a/README b/README index 81f0e513a58d366f9297d7a923e6aea4fc2ff70e..a6ce4c0eeb982831c7736c6e15de9e44306c4ae5 100644 --- a/README +++ b/README @@ -7,9 +7,9 @@ the LOFAR radio telescope and process its measurement output. LOFAR is operated by ASTRON, the Netherlands Institute for Radio Astronomy. For more information, see http://www.astron.nl/ and http://www.lofar.org/ -Source code is available at https://svn.astron.nl/LOFAR +Source code is available at https://git.astron.nl/ro/LOFAR.git -Repository web interface: https://svn.astron.nl/viewvc/LOFAR/ +Repository web interface: https://git.astron.nl/ro/LOFAR/ Top-level LOFAR Project Content (incomplete summary) diff --git a/RTCP/Cobalt/GPUProc/src/scripts/stopBGL.sh b/RTCP/Cobalt/GPUProc/src/scripts/stopBGL.sh index ed5dfb79fe1c59fdcdc59f8c16b777b674982958..938bc6d1bde108269554c60af11781d902d5a02c 100755 --- a/RTCP/Cobalt/GPUProc/src/scripts/stopBGL.sh +++ b/RTCP/Cobalt/GPUProc/src/scripts/stopBGL.sh @@ -36,7 +36,7 @@ function writecommand { # Send the stop command for a graceful shutdown COMMAND_HOST=`getCobaltHosts -C $PARSET` - timeout 60s ssh $COMMAND_HOST "timeout 60s echo $CMD > $PIPE" + timeout 60s ssh $COMMAND_HOST "timeout 60s bash -c 'echo $CMD > $PIPE'" } ( diff --git a/SAS/DataManagement/Cleanup/CleanupService/service.py b/SAS/DataManagement/Cleanup/CleanupService/service.py index 23d93a3fed4c163ef45dedc7ea8c28b1cf8226af..243fc0a5c9679ad6932e619b2134316eeb2a0770 100644 --- a/SAS/DataManagement/Cleanup/CleanupService/service.py +++ b/SAS/DataManagement/Cleanup/CleanupService/service.py @@ -242,8 +242,8 @@ class CleanupHandler(ServiceMessageHandler): claims = radbrpc.getResourceClaims(task_ids=task['id'], resource_type='storage') cep4_storage_claim_ids = [c['id'] for c in claims if c['resource_id'] == cep4_storage_resource['id']] for claim_id in cep4_storage_claim_ids: - logger.info("setting endtime for claim %s on resource %s %s to now", claim_id, cep4_storage_resource['id'], cep4_storage_resource['name']) - radbrpc.updateResourceClaim(claim_id, endtime=datetime.utcnow()) + logger.info("setting endtime for claim %s on resource %s %s to task's endtime '%s' (resulting in a deleted claim)", claim_id, cep4_storage_resource['id'], cep4_storage_resource['name'], task['endtime']) + radbrpc.updateResourceClaim(claim_id, endtime=task['endtime']) except Exception as e: logger.error(str(e)) diff --git a/SAS/DataManagement/DataManagementCommon/path.py b/SAS/DataManagement/DataManagementCommon/path.py index d1653fc96f55d8b128b693e9142ecb74099d594d..36c15d93513d97b9ce8310cc47c5196370ad50a3 100644 --- a/SAS/DataManagement/DataManagementCommon/path.py +++ b/SAS/DataManagement/DataManagementCommon/path.py @@ -183,7 +183,7 @@ class PathResolver: return result def pathExists(self, path): - cmd = ['lfs', 'ls', path] + cmd = ['ls', path] cmd = wrap_command_in_cep4_head_node_ssh_call_if_needed(cmd) logger.debug(' '.join(cmd)) proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) diff --git a/SAS/TMSS/CMakeLists.txt b/SAS/TMSS/CMakeLists.txt index b41d67a1d9df1b8a505a793a8485e69c78e30df9..8c0d7575c2b5ab1194e3115d8faca0debdc0586a 100644 --- a/SAS/TMSS/CMakeLists.txt +++ b/SAS/TMSS/CMakeLists.txt @@ -1,10 +1,14 @@ -lofar_package(TMSS 0.1 DEPENDS PyCommon pyparameterset) +lofar_package(TMSS 0.1 DEPENDS PyCommon pyparameterset PyMessaging) +lofar_add_package(TMSSClient client) add_subdirectory(src) add_subdirectory(bin) add_subdirectory(test) add_subdirectory(frontend) +add_subdirectory(services) lofar_add_docker_files(docker-compose-tmss.yml) + + diff --git a/SAS/TMSS/bin/tmss b/SAS/TMSS/bin/tmss index b4898cee0919ae362c6bad77d5e10d222c5bf2a6..078fab655d8c0313eabb965a6c83d9a1687bb97a 100755 --- a/SAS/TMSS/bin/tmss +++ b/SAS/TMSS/bin/tmss @@ -24,18 +24,27 @@ PORT=8008 CREDENTIALS="tmss" LDAP_CREDENTIALS="tmss_ldap" +EXCHANGE="lofar" +BROKER="localhost" # Parse args: -while getopts "p:C:L:h" opt; do +while getopts "p:C:L:e:b:h" opt; do case ${opt} in p ) PORT=${OPTARG} ;; C ) CREDENTIALS=${OPTARG} ;; L ) LDAP_CREDENTIALS=${OPTARG} ;; + e ) EXCHANGE=${OPTARG} ;; + b ) BROKER=${OPTARG} ;; h ) echo "usage: tmss [OPTIONS]" echo " where options are:" echo " -p <port> the port where django runs the rest http interface on. default=$PORT" echo " -C <credentials-name> the name of the database credentials in ~/.lofar/dbcredentials. default=$CREDENTIALS" echo " -L <credentials-name> the name of the ldap credentials in ~/.lofar/dbcredentials. default=$LDAP_CREDENTIALS" + echo "" + echo " Messaging options:" + echo " -b BROKER, Address of the message broker, default: $BROKER" + echo " -e EXCHANGE Bus or queue where the TMSS messages are published. [default: $EXCHANGE]" + exit 0 ;; esac @@ -43,10 +52,12 @@ done echo "!!! This tmss application is for testing only, properly deploy in Nginx or Apache for production use !!!" -echo "Using Django port=$PORT credentials=$CREDENTIALS ldap_credentials=$LDAP_CREDENTIALS" +echo "Using Django port=$PORT credentials=$CREDENTIALS ldap_credentials=$LDAP_CREDENTIALS exchange=$EXCHANGE broker=$BROKER" export TMSS_DBCREDENTIALS=$CREDENTIALS export TMSS_LDAPCREDENTIALS=$LDAP_CREDENTIALS +export TMSS_EXCHANGE=$EXCHANGE +export TMSS_BROKER=$BROKER DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" diff --git a/SAS/TMSS/client/CMakeLists.txt b/SAS/TMSS/client/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..573d9749e4fa2ac258540634672514382c6a4e38 --- /dev/null +++ b/SAS/TMSS/client/CMakeLists.txt @@ -0,0 +1,6 @@ +lofar_package(TMSSClient 0.1 DEPENDS PyCommon pyparameterset PyMessaging) + +lofar_find_package(PythonInterp 3.4 REQUIRED) + +add_subdirectory(lib) +add_subdirectory(bin) diff --git a/SAS/TMSS/client/bin/CMakeLists.txt b/SAS/TMSS/client/bin/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..a7142728b7503dd6acabf1ac85e4a611c7e8b7c7 --- /dev/null +++ b/SAS/TMSS/client/bin/CMakeLists.txt @@ -0,0 +1,7 @@ +lofar_add_bin_scripts(tmss_set_subtask_state) +lofar_add_bin_scripts(tmss_get_subtask_parset) +lofar_add_bin_scripts(tmss_get_subtask) +lofar_add_bin_scripts(tmss_get_subtasks) +lofar_add_bin_scripts(tmss_get_subtask_predecessors) +lofar_add_bin_scripts(tmss_get_subtask_successors) +lofar_add_bin_scripts(tmss_schedule_subtask) diff --git a/SAS/TMSS/client/bin/tmss_get_subtask b/SAS/TMSS/client/bin/tmss_get_subtask new file mode 100755 index 0000000000000000000000000000000000000000..61ad27dbca05a70f447f9f28357c95247018658f --- /dev/null +++ b/SAS/TMSS/client/bin/tmss_get_subtask @@ -0,0 +1,23 @@ +#!/usr/bin/python3 + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +from lofar.sas.tmss.client.mains import main_get_subtask + +if __name__ == "__main__": + main_get_subtask() diff --git a/SAS/TMSS/client/bin/tmss_get_subtask_parset b/SAS/TMSS/client/bin/tmss_get_subtask_parset new file mode 100755 index 0000000000000000000000000000000000000000..56cd6b8ff47d52e444ec1869f85af8ce7f3e31e3 --- /dev/null +++ b/SAS/TMSS/client/bin/tmss_get_subtask_parset @@ -0,0 +1,26 @@ +#!/usr/bin/python3 + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + + +# Script to create, setup, and run a temporary ldap service with fixtures for easy functional testing + +from lofar.sas.tmss.client.mains import main_get_subtask_parset + +if __name__ == "__main__": + main_get_subtask_parset() diff --git a/SAS/TMSS/client/bin/tmss_get_subtask_predecessors b/SAS/TMSS/client/bin/tmss_get_subtask_predecessors new file mode 100755 index 0000000000000000000000000000000000000000..7944ab528d3929dd4587f072ab7ef839b20fc033 --- /dev/null +++ b/SAS/TMSS/client/bin/tmss_get_subtask_predecessors @@ -0,0 +1,23 @@ +#!/usr/bin/python3 + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +from lofar.sas.tmss.client.mains import main_get_subtask_predecessors + +if __name__ == "__main__": + main_get_subtask_predecessors() diff --git a/SAS/TMSS/client/bin/tmss_get_subtask_successors b/SAS/TMSS/client/bin/tmss_get_subtask_successors new file mode 100755 index 0000000000000000000000000000000000000000..ed50a3edf182e71dd716233a460b23189e23a39d --- /dev/null +++ b/SAS/TMSS/client/bin/tmss_get_subtask_successors @@ -0,0 +1,23 @@ +#!/usr/bin/python3 + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +from lofar.sas.tmss.client.mains import main_get_subtask_successors + +if __name__ == "__main__": + main_get_subtask_successors() diff --git a/SAS/TMSS/client/bin/tmss_get_subtasks b/SAS/TMSS/client/bin/tmss_get_subtasks new file mode 100755 index 0000000000000000000000000000000000000000..88d6233b52a7bcdb79e396fc4bb72c69d497477d --- /dev/null +++ b/SAS/TMSS/client/bin/tmss_get_subtasks @@ -0,0 +1,23 @@ +#!/usr/bin/python3 + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +from lofar.sas.tmss.client.mains import main_get_subtasks + +if __name__ == "__main__": + main_get_subtasks() diff --git a/SAS/TMSS/client/bin/tmss_schedule_subtask b/SAS/TMSS/client/bin/tmss_schedule_subtask new file mode 100755 index 0000000000000000000000000000000000000000..ec0a2df943bbf8e7181913fc7267da0bce316f2b --- /dev/null +++ b/SAS/TMSS/client/bin/tmss_schedule_subtask @@ -0,0 +1,23 @@ +#!/usr/bin/python3 + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +from lofar.sas.tmss.client.mains import main_schedule_subtask + +if __name__ == "__main__": + main_schedule_subtask() diff --git a/SAS/TMSS/client/bin/tmss_set_subtask_state b/SAS/TMSS/client/bin/tmss_set_subtask_state new file mode 100755 index 0000000000000000000000000000000000000000..0f63ad453bfa681b5ff5766340a5808e9e53a54b --- /dev/null +++ b/SAS/TMSS/client/bin/tmss_set_subtask_state @@ -0,0 +1,26 @@ +#!/usr/bin/python3 + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + + +# Script to create, setup, and run a temporary ldap service with fixtures for easy functional testing + +from lofar.sas.tmss.client.mains import main_set_subtask_state + +if __name__ == "__main__": + main_set_subtask_state() diff --git a/SAS/TMSS/client/lib/CMakeLists.txt b/SAS/TMSS/client/lib/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..94606c743637ebf74951b6d15efd87ec369737eb --- /dev/null +++ b/SAS/TMSS/client/lib/CMakeLists.txt @@ -0,0 +1,12 @@ +lofar_find_package(PythonInterp 3.4 REQUIRED) +include(PythonInstall) + +set(_py_files + tmssbuslistener.py + mains.py + tmss_http_rest_client.py + ) + +python_install(${_py_files} + DESTINATION lofar/sas/tmss/client) + diff --git a/SAS/TMSS/client/lib/mains.py b/SAS/TMSS/client/lib/mains.py new file mode 100644 index 0000000000000000000000000000000000000000..f645b9643a195d213ee4411a7cd3e964419afcbe --- /dev/null +++ b/SAS/TMSS/client/lib/mains.py @@ -0,0 +1,127 @@ +import json +import argparse +from pprint import pprint +from lofar.sas.tmss.client.tmss_http_rest_client import TMSSsession +from lofar.common.datetimeutils import parseDatetime + + +def main_get_subtask_parset(): + parser = argparse.ArgumentParser() + parser.add_argument("subtask_id", help="The ID of the TMSS subtask to get the parset from") + args = parser.parse_args() + + try: + with TMSSsession.create_from_dbcreds_for_ldap() as session: + print(session.get_subtask_parset(args.subtask_id)) + except Exception as e: + print(e) + exit(1) + + +def main_get_subtask_predecessors(): + parser = argparse.ArgumentParser() + parser.add_argument("subtask_id", type=int, help="The ID of the TMSS subtask to get the predecessors for") + parser.add_argument('-s', '--state', help="only get predecessors with this state") + args = parser.parse_args() + + try: + with TMSSsession.create_from_dbcreds_for_ldap() as session: + pprint(session.get_subtask_predecessors(args.subtask_id, state=args.state)) + except Exception as e: + print(e) + exit(1) + + +def main_get_subtask_successors(): + parser = argparse.ArgumentParser() + parser.add_argument("subtask_id", type=int, help="The ID of the TMSS subtask to get the successors for") + parser.add_argument('-s', '--state', help="only get successors with this state") + args = parser.parse_args() + + try: + with TMSSsession.create_from_dbcreds_for_ldap() as session: + pprint(session.get_subtask_successors(args.subtask_id, state=args.state)) + except Exception as e: + print(e) + exit(1) + + +def main_get_subtask(): + parser = argparse.ArgumentParser() + parser.add_argument("subtask_id", type=int, help="The ID of the TMSS subtask to get") + args = parser.parse_args() + + try: + with TMSSsession.create_from_dbcreds_for_ldap() as session: + pprint(session.get_subtask(args.subtask_id)) + except Exception as e: + print(e) + exit(1) + + +def main_get_subtasks(): + parser = argparse.ArgumentParser() + parser.add_argument('-s', '--state', help="only get subtasks with this state") + parser.add_argument('-c', '--cluster', help="only get subtasks for this cluster") + parser.add_argument('--start_time_less_then', help="only get subtasks with a start time less then this timestamp") + parser.add_argument('--start_time_greater_then', help="only get subtasks with a start time greater then this timestamp") + parser.add_argument('--stop_time_less_then', help="only get subtasks with a stop time less then this timestamp") + parser.add_argument('--stop_time_greater_then', help="only get subtasks with a stop time greater then this timestamp") + args = parser.parse_args() + + try: + with TMSSsession.create_from_dbcreds_for_ldap() as session: + result = session.get_subtasks(state=args.state, + cluster=args.cluster, + start_time_less_then=parseDatetime(args.start_time_less_then) if args.start_time_less_then else None, + start_time_greater_then=parseDatetime(args.start_time_greater_then) if args.start_time_greater_then else None, + stop_time_less_then=parseDatetime(args.stop_time_less_then) if args.stop_time_less_then else None, + stop_time_greater_then=parseDatetime(args.stop_time_greater_then) if args.stop_time_greater_then else None) + pprint(result) + except Exception as e: + print(e) + exit(1) + + +def main_set_subtask_state(): + parser = argparse.ArgumentParser() + parser.add_argument("subtask_id", type=int, help="The ID of the TMSS subtask to set the status on") + parser.add_argument("state", help="The state to set") + args = parser.parse_args() + + try: + with TMSSsession.create_from_dbcreds_for_ldap() as session: + changed_subtask = session.set_subtask_status(args.subtask_id, args.state) + print("%s now has state %s" % (changed_subtask['url'], changed_subtask['state'])) + except Exception as e: + print(e) + exit(1) + + +def main_specify_observation_task(): + """ + Ask user for parameter 'taskid' and execute API-call to specify observation + """ + parser = argparse.ArgumentParser() + parser.add_argument("task_id", help="The ID of the TMSS task to specify for observation") + args = parser.parse_args() + + try: + with TMSSsession.create_from_dbcreds_for_ldap() as session: + result = session.specify_observation_task(args.task_id) + except Exception as e: + print(e) + exit(1) + + +def main_schedule_subtask(): + parser = argparse.ArgumentParser() + parser.add_argument("subtask_id", type=int, help="The ID of the TMSS subtask to be scheduled") + args = parser.parse_args() + + try: + with TMSSsession.create_from_dbcreds_for_ldap() as session: + pprint(session.schedule_subtask(args.subtask_id)) + except Exception as e: + print(e) + exit(1) diff --git a/SAS/TMSS/client/lib/tmss_http_rest_client.py b/SAS/TMSS/client/lib/tmss_http_rest_client.py new file mode 100644 index 0000000000000000000000000000000000000000..867a29c05bc2c371bed2c51dfeb88d690f230bd4 --- /dev/null +++ b/SAS/TMSS/client/lib/tmss_http_rest_client.py @@ -0,0 +1,218 @@ +import logging +logger = logging.getLogger(__file__) + +import requests +from http.client import responses +import os +import json +from datetime import datetime +from lofar.common.datetimeutils import formatDatetime + +# usage example: +# +# with TMSSsession('paulus', 'pauluspass', 'localhost', 8000) as tmsssession: +# response = tmsssession.session.get(url='http://localhost/api/task_draft/') +# print(response) + + +#TODO: add unittests! +class TMSSsession(object): + + OPENID = "openid" + BASICAUTH = "basicauth" + + def __init__(self, username, password, host, port: int=8000, authentication_method=OPENID): + self.session = requests.session() + self.username = username + self.password = password + self.base_url = "http://%s:%d/api" % (host, port) + self.authentication_method = authentication_method + + @staticmethod + def create_from_dbcreds_for_ldap(dbcreds_name: str=None): + '''Factory method to create a TMSSSession object which uses the credentials in the ~/.lofar/dbcredentials/<dbcreds_name>.ini file + (mis)use the DBCredentials to get a url with user/pass for tmss + the contents below are used to contruct a url like this: http://localhost:8000/api + [database:TMSS] + host=localhost + user=<username> + password=<password> + type=http + port=8000 + ''' + if dbcreds_name is None: + dbcreds_name = os.environ.get("TMSS_CLIENT_DBCREDENTIALS", "TMSSClient") + + from lofar.common.dbcredentials import DBCredentials + dbcreds = DBCredentials().get(dbcreds_name) + return TMSSsession(username=dbcreds.user, password=dbcreds.password, + host=dbcreds.host, + port=dbcreds.port, + authentication_method=TMSSsession.BASICAUTH) + + def __enter__(self): + self.open() + + # return the request session for use within the context + return self + + def __exit__(self, type, value, traceback): + self.close() + + def open(self): + '''open the request session and login''' + self.session.__enter__() + self.session.verify = False + + if self.authentication_method == self.OPENID: + # get authentication page of OIDC through TMSS redirect + response = self.session.get(self.base_url.replace('/api', '/oidc/authenticate/'), allow_redirects=True) + csrftoken = self.session.cookies['csrftoken'] + + # post user credentials to login page, also pass csrf token + data = {'username': self.username, 'password': self.password, 'csrfmiddlewaretoken': csrftoken} + response = self.session.post(url=response.url, data=data, allow_redirects=True) + + # raise when sth went wrong + if "The username and/or password you specified are not correct" in response.content.decode('utf8'): + raise ValueError("The username and/or password you specified are not correct") + if response.status_code != 200: + raise ConnectionError(response.content.decode('utf8')) + + if self.authentication_method == self.BASICAUTH: + self.session.auth = (self.username, self.password) + + def close(self): + '''close the request session and logout''' + try: + # logout user + self.session.get(self.base_url + '/logout/', allow_redirects=True) + self.session.close() + except: + pass + + def set_subtask_status(self, subtask_id: int, status: str) -> {}: + '''set the status for the given subtask, and return the subtask with its new state, or raise on error''' + response = self.session.patch(url='%s/subtask/%s/' % (self.base_url, subtask_id), + json={'state': "%s/subtask_state/%s/" % (self.base_url, status)}, + params={'format':'json'}) + + if response.status_code >= 200 and response.status_code < 300: + return json.loads(response.content.decode('utf-8')) + + content = response.content.decode('utf-8') + raise Exception("Could not set status with url %s - %s %s - %s" % (response.request.url, response.status_code, responses.get(response.status_code), content)) + + def get_subtask_parset(self, subtask_id) -> str: + '''get the lofar parameterset (as text) for the given subtask''' + result = self.session.get(url='%s/subtask/%s/parset' % (self.base_url, subtask_id)) + if result.status_code >= 200 and result.status_code < 300: + return result.content.decode('utf-8') + raise Exception("Could not get parameterset for subtask %s.\nResponse: %s" % (subtask_id, result)) + + def get_subtask_predecessors(self, subtask_id: int, state: str=None) -> list: + '''get the subtask's predecessors as list of dict for the given subtask''' + + clauses = {} + if state is not None: + clauses["state__value"] = state + + path = 'subtask/%s/predecessors' % (subtask_id,) + return self.get_path_as_json_object(path, clauses) + + def get_subtask_successors(self, subtask_id: int, state: str=None) -> list: + '''get the subtask's successors as list of dict for the given subtask''' + clauses = {} + if state is not None: + clauses["state__value"] = state + + path = 'subtask/%s/successors' % (subtask_id,) + return self.get_path_as_json_object(path, clauses) + + def get_subtask(self, subtask_id: int) -> dict: + '''get the subtask as dict for the given subtask''' + path = 'subtask/%s' % (subtask_id,) + return self.get_path_as_json_object(path) + + def get_subtasks(self, state: str=None, + cluster: str=None, + start_time_less_then: datetime=None, start_time_greater_then: datetime=None, + stop_time_less_then: datetime = None, stop_time_greater_then: datetime = None) -> list: + '''get subtasks (as list of dicts) filtered by the given parameters''' + clauses = {} + if state is not None: + clauses["state__value"] = state + if cluster is not None: + clauses["cluster__name"] = cluster + if start_time_less_then is not None: + clauses["start_time__lt="] = formatDatetime(start_time_less_then) + if start_time_greater_then is not None: + clauses["start_time__gt"] = formatDatetime(start_time_greater_then) + if stop_time_less_then is not None: + clauses["stop_time__lt"] = formatDatetime(stop_time_less_then) + if stop_time_greater_then is not None: + clauses["stop_time__gt"] = formatDatetime(stop_time_greater_then) + + return self.get_path_as_json_object("subtask", clauses) + + def get_path_as_json_object(self, path: str, params={}) -> object: + '''get resource at the given path, interpret it as json, and return it as as native object (usually a dict or a list of dicts)''' + full_url = '%s/%s/' % (self.base_url, path) + return self.get_url_as_json_object(full_url, params=params) + + def get_url_as_json_object(self, full_url: str, params={}) -> object: + '''get resource at the given full url (including http://<base_url>, interpret it as json, and return it as as native object (usually a dict or a list of dicts)''' + if "format=json" not in full_url or params.get("format") != "json": + params['format'] ='json' + + response = self.session.get(url=full_url, params=params) + logger.info("[%s] %s %s on %s", response.request.method.upper(), response.status_code, responses.get(response.status_code), response.request.url) + + if response.status_code >= 200 and response.status_code < 300: + result = json.loads(response.content.decode('utf-8')) + if isinstance(result, dict): + result_object = result.get('results', result) # return the 'results' list if any, or lese just the object itself + + if result.get('next'): + # recurse, get the 'next' url, and return a concatenation of the results + return result_object + self.get_url_as_json_object(result['next']) + return result_object + return result + + # ugly error message parsing + content = response.content.decode('utf-8') + try: + error_msg = content.split('\n')[1] # magic! error message is at 2nd line of response... + except: + error_msg= content + + raise Exception("Could not get %s - %s %s - %s" % (full_url, response.status_code, responses.get(response.status_code), error_msg)) + + def get_subtask_template(self, name: str, version: str=None) -> dict: + '''get the subtask_template as dict for the given name (and version)''' + clauses = {} + if name is not None: + clauses["name"] = name + if version is not None: + clauses["version"] = version + result = self.get_path_as_json_object('subtask_template', clauses) + if isinstance(result, list): + if len(result) > 1: + raise ValueError("Found more then one SubtaskTemplate for clauses: %s" % (clauses,)) + elif len(result) == 1: + return result[0] + return None + return result + + def specify_observation_task(self, task_id: int) -> requests.Response: + """specify observation for the given draft task by just doing a REST API call """ + result = self.session.get(url='%s/api/task/%s/specify_observation' % (self.base_url, task_id)) + if result.status_code >= 200 and result.status_code < 300: + return result.content.decode('utf-8') + raise Exception("Could not specify observation for task %s.\nResponse: %s" % (task_id, result)) + + def schedule_subtask(self, subtask_id: int) -> {}: + """schedule the subtask for the given subtask_id. + returns the scheduled subtask upon success, or raises.""" + return self.get_path_as_json_object('subtask/%s/schedule' % subtask_id) + diff --git a/SAS/TMSS/client/lib/tmssbuslistener.py b/SAS/TMSS/client/lib/tmssbuslistener.py new file mode 100644 index 0000000000000000000000000000000000000000..30c49bb7ce9ae2bd70093821088dbd1a4667e607 --- /dev/null +++ b/SAS/TMSS/client/lib/tmssbuslistener.py @@ -0,0 +1,218 @@ +#!/usr/bin/env python3 + +# TMSSBusListener.py +# +# Copyright (C) 2015 +# ASTRON (Netherlands Institute for Radio Astronomy) +# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it +# and/or modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be +# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +# +# $Id: TMSSBusListener.py 1580 2015-09-30 14:18:57Z loose $ + +""" +TMSSBusListener listens on the lofar notification message bus and calls (empty) on<SomeMessage> methods when such a message is received. +Typical usage is to derive your own subclass from TMSSBusListener and implement the specific on<SomeMessage> methods that you are interested in. +""" + +from lofar.messaging.messagebus import BusListener, AbstractMessageHandler +from lofar.messaging import DEFAULT_BROKER, DEFAULT_BUSNAME, EventMessage +from lofar.common.util import waitForInterrupt, single_line_with_single_spaces + +import logging +logger = logging.getLogger(__name__) + + +_DEFAULT_TMSS_NOTIFICATION_PREFIX_TEMPLATE = 'TMSS.%s.notification' +DEFAULT_TMSS_TASK_NOTIFICATION_PREFIX = _DEFAULT_TMSS_NOTIFICATION_PREFIX_TEMPLATE % 'Task' +DEFAULT_TMSS_SUBTASK_NOTIFICATION_PREFIX = _DEFAULT_TMSS_NOTIFICATION_PREFIX_TEMPLATE % 'SubTask' +DEFAULT_TMSS_ALL_NOTIFICATION_PREFIX = _DEFAULT_TMSS_NOTIFICATION_PREFIX_TEMPLATE + '#' + + +class TMSSSubTaskEventMessageHandler(AbstractMessageHandler): + ''' + Base-type messagehandler for handling TMSS event messages. + Typical usage is to derive your own subclass from TMSSSubTaskEventMessageHandler and implement the specific on<SomeMessage> methods that you are interested in. + ''' + def handle_message(self, msg: EventMessage): + if not isinstance(msg, EventMessage): + raise ValueError("%s: Ignoring non-EventMessage: %s" % (self.__class__.__name__, msg)) + + stripped_subject = msg.subject.replace("%s." % DEFAULT_TMSS_SUBTASK_NOTIFICATION_PREFIX, '') + + logger.info("%s.on%s: %s" % (self.__class__.__name__, stripped_subject, single_line_with_single_spaces(msg.content))) + + if stripped_subject == 'Defining': + self.onSubTaskDefining(**msg.content) + elif stripped_subject == 'Defined': + self.onSubTaskDefined(**msg.content) + elif stripped_subject == 'Scheduling': + self.onSubTaskScheduling(**msg.content) + elif stripped_subject == 'Scheduled': + self.onSubTaskScheduled(**msg.content) + elif stripped_subject == 'Queueing': + self.onSubTaskQueueing(**msg.content) + elif stripped_subject == 'Queued': + self.onSubTaskQueued(**msg.content) + elif stripped_subject == 'Starting': + self.onSubTaskStarting(**msg.content) + elif stripped_subject == 'Started': + self.onSubTaskStarted(**msg.content) + elif stripped_subject == 'Finishing': + self.onSubTaskFinishing(**msg.content) + elif stripped_subject == 'Finished': + self.onSubTaskFinished(**msg.content) + elif stripped_subject == 'Cancelling': + self.onSubTaskCancelling(**msg.content) + elif stripped_subject == 'Cancelled': + self.onSubTaskCancelled(**msg.content) + elif stripped_subject == 'Error': + self.onSubTaskError(**msg.content) + else: + raise ValueError("TMSSBusListener.handleMessage: unknown subject: %s" % msg.subject) + + def onSubTaskDefining(self, subtask_id: int, old_state: str, new_state:str): + '''onSubTaskDefining is called upon receiving a SubTaskDefining message, which is sent when a SubTasks changes state to "Defining". + :param subtask_id: the TMSS id of the SubTask + :param old_state: the previous state of the SubTask + :param new_state: the new state of the SubTask + ''' + pass + + def onSubTaskDefined(self, subtask_id: int, old_state: str, new_state:str): + '''onSubTaskDefined is called upon received a SubTaskDefined message, which is sent when a SubTasks changes state to "Defined". + :param subtask_id: the TMSS id of the SubTask + :param old_state: the previous state of the SubTask + :param new_state: the new state of the SubTask + ''' + pass + + def onSubTaskScheduling(self, subtask_id: int, old_state: str, new_state:str): + '''onSubTaskScheduling is called upon receiving a SubTaskScheduling message, which is sent when a SubTasks changes state to "Scheduling". + :param subtask_id: the TMSS id of the SubTask + :param old_state: the previous state of the SubTask + :param new_state: the new state of the SubTask + ''' + pass + + def onSubTaskScheduled(self, subtask_id: int, old_state: str, new_state:str): + '''onSubTaskScheduled is called upon received a SubTaskScheduled message, which is sent when a SubTasks changes state to "Scheduled". + :param subtask_id: the TMSS id of the SubTask + :param old_state: the previous state of the SubTask + :param new_state: the new state of the SubTask + ''' + pass + + def onSubTaskQueueing(self, subtask_id: int, old_state: str, new_state:str): + '''onSubTaskQueueing is called upon receiving a SubTaskQueueing message, which is sent when a SubTasks changes state to "Queueing". + :param subtask_id: the TMSS id of the SubTask + :param old_state: the previous state of the SubTask + :param new_state: the new state of the SubTask + ''' + pass + + def onSubTaskQueued(self, subtask_id: int, old_state: str, new_state:str): + '''onSubTaskQueued is called upon received a SubTaskQueued message, which is sent when a SubTasks changes state to "Queued". + :param subtask_id: the TMSS id of the SubTask + :param old_state: the previous state of the SubTask + :param new_state: the new state of the SubTask + ''' + pass + + def onSubTaskStarting(self, subtask_id: int, old_state: str, new_state:str): + '''onSubTaskStarting is called upon receiving a SubTaskStarting message, which is sent when a SubTasks changes state to "Starting". + :param subtask_id: the TMSS id of the SubTask + :param old_state: the previous state of the SubTask + :param new_state: the new state of the SubTask + ''' + pass + + def onSubTaskStarted(self, subtask_id: int, old_state: str, new_state:str): + '''onSubTaskStarted is called upon received a SubTaskStarted message, which is sent when a SubTasks changes state to "Started". + :param subtask_id: the TMSS id of the SubTask + :param old_state: the previous state of the SubTask + :param new_state: the new state of the SubTask + ''' + pass + + def onSubTaskFinishing(self, subtask_id: int, old_state: str, new_state:str): + '''onSubTaskFinishing is called upon receiving a SubTaskFinishing message, which is sent when a SubTasks changes state to "Finishing". + :param subtask_id: the TMSS id of the SubTask + :param old_state: the previous state of the SubTask + :param new_state: the new state of the SubTask + ''' + pass + + def onSubTaskFinished(self, subtask_id: int, old_state: str, new_state:str): + '''onSubTaskFinished is called upon received a SubTaskFinished message, which is sent when a SubTasks changes state to "Finished". + :param subtask_id: the TMSS id of the SubTask + :param old_state: the previous state of the SubTask + :param new_state: the new state of the SubTask + ''' + pass + + def onSubTaskCancelling(self, subtask_id: int, old_state: str, new_state:str): + '''onSubTaskCancelling is called upon receiving a SubTaskCancelling message, which is sent when a SubTasks changes state to "Cancelling". + :param subtask_id: the TMSS id of the SubTask + :param old_state: the previous state of the SubTask + :param new_state: the new state of the SubTask + ''' + pass + + def onSubTaskCancelled(self, subtask_id: int, old_state: str, new_state:str): + '''onSubTaskCancelled is called upon received a SubTaskCancelled message, which is sent when a SubTasks changes state to "Cancelled". + :param subtask_id: the TMSS id of the SubTask + :param old_state: the previous state of the SubTask + :param new_state: the new state of the SubTask + ''' + pass + + def onSubTaskError(self, subtask_id: int, old_state: str, new_state:str): + '''onSubTaskError is called upon receiving a SubTaskError message, which is sent when a SubTasks changes state to "Error". + :param subtask_id: the TMSS id of the SubTask + :param old_state: the previous state of the SubTask + :param new_state: the new state of the SubTask + ''' + pass + + +class TMSSSubTaskBusListener(BusListener): + def __init__(self, + handler_type: TMSSSubTaskEventMessageHandler.__class__ = TMSSSubTaskEventMessageHandler, + handler_kwargs: dict = None, + exchange: str = DEFAULT_BUSNAME, + routing_key: str = DEFAULT_TMSS_SUBTASK_NOTIFICATION_PREFIX+".#", + num_threads: int = 1, + broker: str = DEFAULT_BROKER): + """ + TMSSSubTaskBusListener listens on the lofar notification message bus and calls on<SomeMessage> methods in the TMSSSubTaskEventMessageHandler when such a message is received. + Typical usage is to derive your own subclass from TMSSSubTaskEventMessageHandler and implement the specific on<SomeMessage> methods that you are interested in. + """ + if not issubclass(handler_type, TMSSSubTaskEventMessageHandler): + raise TypeError("handler_type should be a TMSSSubTaskEventMessageHandler subclass") + + super().__init__(handler_type, handler_kwargs, exchange, routing_key, num_threads, broker) + + +if __name__ == '__main__': + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) + + class ExampleTMSSSubTaskEventMessageHandler(TMSSSubTaskEventMessageHandler): + def onSubTaskDefined(self, **kwargs): + logger.debug("MyTMSSSubTaskEventMessageHandler.onSubTaskDefined(%s)", kwargs) + + with TMSSSubTaskBusListener(handler_type=ExampleTMSSSubTaskEventMessageHandler): + waitForInterrupt() + diff --git a/SAS/TMSS/services/CMakeLists.txt b/SAS/TMSS/services/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..4520a7b4e826a8ad61f4feec3b7d1d263f10495a --- /dev/null +++ b/SAS/TMSS/services/CMakeLists.txt @@ -0,0 +1,2 @@ +lofar_add_package(TMSSSubtaskSchedulingService subtask_scheduling) + diff --git a/SAS/TMSS/services/subtask_scheduling/CMakeLists.txt b/SAS/TMSS/services/subtask_scheduling/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..460e356bc2c99121eb41a48fc27fad7d20a51fac --- /dev/null +++ b/SAS/TMSS/services/subtask_scheduling/CMakeLists.txt @@ -0,0 +1,8 @@ +lofar_package(TMSSSubtaskSchedulingService 0.1 DEPENDS TMSSClient PyCommon pyparameterset PyMessaging) + +lofar_find_package(PythonInterp 3.4 REQUIRED) + +add_subdirectory(lib) +add_subdirectory(bin) +add_subdirectory(test) + diff --git a/SAS/TMSS/services/subtask_scheduling/bin/CMakeLists.txt b/SAS/TMSS/services/subtask_scheduling/bin/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..07e30a532f710dd1242ba026ad12e9ce014f1125 --- /dev/null +++ b/SAS/TMSS/services/subtask_scheduling/bin/CMakeLists.txt @@ -0,0 +1,4 @@ +lofar_add_bin_scripts(tmss_subtask_scheduling_service) + +# supervisord config files +lofar_add_sysconf_files(tmss_subtask_scheduling_service.ini DESTINATION supervisord.d) diff --git a/SAS/TMSS/services/subtask_scheduling/bin/tmss_subtask_scheduling_service b/SAS/TMSS/services/subtask_scheduling/bin/tmss_subtask_scheduling_service new file mode 100755 index 0000000000000000000000000000000000000000..2ecd686a25fd88e45094bf4cda143e41de1fb61d --- /dev/null +++ b/SAS/TMSS/services/subtask_scheduling/bin/tmss_subtask_scheduling_service @@ -0,0 +1,24 @@ +#!/usr/bin/python3 + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + + +from lofar.sas.tmss.services.subtask_scheduling import main + +if __name__ == "__main__": + main() diff --git a/SAS/TMSS/services/subtask_scheduling/bin/tmss_subtask_scheduling_service.ini b/SAS/TMSS/services/subtask_scheduling/bin/tmss_subtask_scheduling_service.ini new file mode 100644 index 0000000000000000000000000000000000000000..e43c0d3e66f4534b32c6d6129397a0309a2b95e7 --- /dev/null +++ b/SAS/TMSS/services/subtask_scheduling/bin/tmss_subtask_scheduling_service.ini @@ -0,0 +1,9 @@ +[program:tmss_subtask_scheduling_service] +command=/bin/bash -c 'source $LOFARROOT/lofarinit.sh;exec tmss_subtask_scheduling_service' +user=lofarsys +stopsignal=INT ; KeyboardInterrupt +stopasgroup=true ; bash does not propagate signals +stdout_logfile=%(program_name)s.log +redirect_stderr=true +stderr_logfile=NONE +stdout_logfile_maxbytes=0 diff --git a/SAS/TMSS/services/subtask_scheduling/lib/CMakeLists.txt b/SAS/TMSS/services/subtask_scheduling/lib/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..7cf0b591612ccb75bc2a73c1a6f9d1d8a2c2d9da --- /dev/null +++ b/SAS/TMSS/services/subtask_scheduling/lib/CMakeLists.txt @@ -0,0 +1,10 @@ +lofar_find_package(PythonInterp 3.4 REQUIRED) +include(PythonInstall) + +set(_py_files + subtask_scheduling.py + ) + +python_install(${_py_files} + DESTINATION lofar/sas/tmss/services) + diff --git a/SAS/TMSS/services/subtask_scheduling/lib/subtask_scheduling.py b/SAS/TMSS/services/subtask_scheduling/lib/subtask_scheduling.py new file mode 100644 index 0000000000000000000000000000000000000000..40912203e7bc7bfbfe20ead7c4cf1c5c6ce4fc17 --- /dev/null +++ b/SAS/TMSS/services/subtask_scheduling/lib/subtask_scheduling.py @@ -0,0 +1,109 @@ +#!/usr/bin/env python3 + +# subtask_scheduling.py +# +# Copyright (C) 2015 +# ASTRON (Netherlands Institute for Radio Astronomy) +# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it +# and/or modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be +# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +# +# $Id: subtask_scheduling.py 1580 2015-09-30 14:18:57Z loose $ + +""" +The subtask_scheduling service schedules TMSS subtasks. +It listens on the lofar notification message bus for state changes of TMSS subtasks; when a task finished, +it schedules (rest action) all successors that are in state 'defined'. +""" + +import os +from optparse import OptionParser +import logging +logger = logging.getLogger(__name__) + +from lofar.sas.tmss.client.tmssbuslistener import * +from lofar.sas.tmss.client.tmss_http_rest_client import TMSSsession + +class TMSSSubTaskSchedulingEventMessageHandler(TMSSSubTaskEventMessageHandler): + ''' + ''' + def __init__(self, tmss_client_credentials_id: str=None): + super().__init__() + self.tmss_client = TMSSsession.create_from_dbcreds_for_ldap(tmss_client_credentials_id) + + def start_handling(self): + self.tmss_client.open() + super().start_handling() + + def stop_handling(self): + super().stop_handling() + self.tmss_client.close() + + def onSubTaskFinished(self, subtask_id: int, old_state: str, new_state:str): + '''onSubTaskFinished is called upon received a SubTaskFinished message, which is sent when a SubTasks changes state to "Finished". + :param subtask_id: the TMSS id of the SubTask + :param old_state: the previous state of the SubTask + :param new_state: the new state of the SubTask + ''' + logger.info("subtask %s finished. Trying to schedule defined successor subtasks...", subtask_id) + + successors = self.tmss_client.get_subtask_successors(subtask_id, state="defined") + + if not successors: + logger.info("subtask %s finished. No (defined) successor subtasks to schedule...", subtask_id) + + for successor in successors: + try: + suc_subtask_id = successor['url'].split('/')[successor['url'].split('/').index('subtask')+1] #ugly + suc_subtask_state = successor['state'].split('/')[successor['state'].split('/').index('subtask_state')+1] #ugly + + if suc_subtask_state == "defined": + logger.info("trying to schedule successor subtask %s for finished subtask %s", suc_subtask_id, subtask_id) + scheduled_successor = self.tmss_client.schedule_subtask(suc_subtask_id) + suc_subtask_state = scheduled_successor['state'].split('/')[scheduled_successor['state'].split('/').index('subtask_state')+1] #ugly + logger.info("successor subtask %s for finished subtask %s is now has state '%s'", suc_subtask_id, subtask_id, suc_subtask_state) + else: + logger.warning("skipping scheduling of successor subtask %s for finished subtask %s because its state is '%s'", suc_subtask_id, subtask_id, suc_subtask_state) + + except Exception as e: + logger.error(e) + +def create_service(exchange: str=DEFAULT_BUSNAME, broker: str=DEFAULT_BROKER, tmss_client_credentials_id: str=None): + return TMSSSubTaskBusListener(handler_type=TMSSSubTaskSchedulingEventMessageHandler, + handler_kwargs={'tmss_client_credentials_id': tmss_client_credentials_id}, + exchange=exchange, + broker=broker) + +def main(): + # make sure we run in UTC timezone + os.environ['TZ'] = 'UTC' + + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) + + # Check the invocation arguments + parser = OptionParser('%prog [options]', + description='run the tmss_subtask_scheduling_service which automatically schedules the defined successor tasks for finished subtasks') + parser.add_option('-q', '--broker', dest='broker', type='string', default=DEFAULT_BROKER, help='Address of the messaging broker, default: %default') + parser.add_option('--exchange', dest='exchange', type='string', default=DEFAULT_BUSNAME, help='Name of the exchange on the messaging broker, default: %default') + parser.add_option('-t', '--tmss_client_credentials_id', dest='tmss_client_credentials_id', type='string', + default=os.environ.get("TMSS_CLIENT_DBCREDENTIALS", "TMSSClient"), + help='the credentials id for the file in ~/.lofar/dbcredentials which holds the TMSS http REST api url and credentials, default: %default') + (options, args) = parser.parse_args() + + with create_service(options.exchange, options.broker, options.tmss_client_credentials_id): + waitForInterrupt() + +if __name__ == '__main__': + main() diff --git a/SAS/TMSS/services/subtask_scheduling/test/CMakeLists.txt b/SAS/TMSS/services/subtask_scheduling/test/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..b9da06a5dc6b27fde81e26c6cc5ba027cae2d821 --- /dev/null +++ b/SAS/TMSS/services/subtask_scheduling/test/CMakeLists.txt @@ -0,0 +1,7 @@ +# $Id: CMakeLists.txt 32679 2015-10-26 09:31:56Z schaap $ + +if(BUILD_TESTING) + include(LofarCTest) + + lofar_add_test(t_subtask_scheduling_service) +endif() diff --git a/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.py b/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.py new file mode 100755 index 0000000000000000000000000000000000000000..b171a012430ea1e10f976dc793bc0ead1fa47c04 --- /dev/null +++ b/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.py @@ -0,0 +1,119 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +import unittest +import uuid + +import logging +logger = logging.getLogger(__name__) + +from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment +from lofar.sas.tmss.test.tmss_test_environment_unittest_setup import * +from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator + +from lofar.messaging.messagebus import TemporaryExchange, BusListenerJanitor +from lofar.sas.tmss.services.subtask_scheduling import create_service +from lofar.common.test_utils import integration_test +from time import sleep +from datetime import datetime, timedelta + +@integration_test +class TestSubtaskSchedulingService(unittest.TestCase): + ''' + Tests for the SubtaskSchedulingService + ''' + @classmethod + def setUpClass(cls) -> None: + cls.TEST_UUID = uuid.uuid1() + + cls.tmp_exchange = TemporaryExchange("%s_%s" % (cls.__name__, cls.TEST_UUID)) + cls.tmp_exchange.open() + + cls.tmss_test_env = TMSSTestEnvironment(exchange=cls.tmp_exchange.address) + cls.tmss_test_env.start() + + cls.test_data_creator = TMSSRESTTestDataCreator(cls.tmss_test_env.django_server.url, + (tmss_test_env.ldap_server.dbcreds.user, tmss_test_env.ldap_server.dbcreds.password)) + + @classmethod + def tearDownClass(cls) -> None: + cls.tmss_test_env.stop() + cls.tmp_exchange.close() + + def test_01_for_expected_behaviour(self): + ''' + This test starts a scheduling service and tmss, creates a chain of subtasks, finishes the first, and checks if the successors are then scheduled. + ''' + + logger.info(' -- test_01_for_expected_behaviour -- ') + + # create and start the service (the object under test) + service = create_service(exchange=self.tmp_exchange.address, tmss_client_credentials_id=self.tmss_test_env.client_credentials.dbcreds_id) + with BusListenerJanitor(service): + # ------------------------- + # long setup of objects.... + + # setup proper template + subtask_template_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.SubtaskTemplate(subtask_type_url=self.test_data_creator.django_api_url + '/subtask_type/qa_files/'), '/subtask_template/') + + # create two subtasks + subtask1_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.Subtask(state="defined", specifications_template_url=subtask_template_url), '/subtask/') + subtask2_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.Subtask(state="defined", specifications_template_url=subtask_template_url), '/subtask/') + + # ugly + subtask1_id = subtask1_url.split('/')[subtask1_url.split('/').index('subtask') + 1] + subtask2_id = subtask2_url.split('/')[subtask2_url.split('/').index('subtask') + 1] + + # connect them + output_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.SubtaskOutput(subtask1_url), '/subtask_output/') + input_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.SubtaskInput(subtask_url=subtask2_url, subtask_output_url=output_url), '/subtask_input/') + + # ... end of long setup of objects + # -------------------------------- + + # now for the real test: set subtask1_id status to finished, and check that subtask2 is then properly scheduled + with self.tmss_test_env.create_tmss_client() as tmss_client: + subtask1 = tmss_client.get_subtask(subtask1_id) + subtask2 = tmss_client.get_subtask(subtask2_id) + + subtask1_status = subtask1['state'].split('/')[subtask1['state'].split('/').index('subtask_state')+1] #ugly + subtask2_status = subtask2['state'].split('/')[subtask2['state'].split('/').index('subtask_state')+1] #ugly + self.assertEqual(subtask1_status, 'defined') + self.assertEqual(subtask2_status, 'defined') + + # the first subtask ran, and is now finished... set it's status. This should trigger the scheduling service to schedule the second subtask. + tmss_client.set_subtask_status(subtask1_id, 'finished') + + # allow some time for the scheduling service to do its thing... + start = datetime.utcnow() + while subtask2_status != 'scheduled': + subtask2 = tmss_client.get_subtask(subtask2_id) + subtask2_status = subtask2['state'].split('/')[subtask2['state'].split('/').index('subtask_state')+1] #ugly + sleep(0.5) + if datetime.utcnow() - start > timedelta(seconds=2): + raise TimeoutError() + + # subtask2 should now be scheduled + self.assertEqual(subtask2_status, 'scheduled') + +logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) + +if __name__ == '__main__': + #run the unit tests + unittest.main() diff --git a/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.run b/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.run new file mode 100755 index 0000000000000000000000000000000000000000..a38aefc96f84db6b0d634f11e0524ff4513191b5 --- /dev/null +++ b/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.run @@ -0,0 +1,6 @@ +#!/bin/bash + +# Run the unit test +source python-coverage.sh +python_coverage_test "*tmss*" t_subtask_scheduling_service.py + diff --git a/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.sh b/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.sh new file mode 100755 index 0000000000000000000000000000000000000000..60abec462c84d1a99cf2df03b1368271772dec55 --- /dev/null +++ b/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +./runctest.sh t_subtask_scheduling_service \ No newline at end of file diff --git a/SAS/TMSS/src/CMakeLists.txt b/SAS/TMSS/src/CMakeLists.txt index 89ee7dbb05ece8f0f0f21fbf65f1f5c099adf9e3..fd5a8389a74c27f43c3def1fadb5a87813d9212f 100644 --- a/SAS/TMSS/src/CMakeLists.txt +++ b/SAS/TMSS/src/CMakeLists.txt @@ -26,7 +26,6 @@ find_python_module(swagger_spec_validator REQUIRED) # pip install swagger-spec-v set(_py_files manage.py remakemigrations.py - util.py ) python_install(${_py_files} diff --git a/SAS/TMSS/src/Dockerfile-tmss b/SAS/TMSS/src/Dockerfile-tmss index f5e6d92b5543b2bd178ff01e4e5fa4b2659a16d3..6a51128e3f1c8203b1fbda9b26f79637c6674d8d 100644 --- a/SAS/TMSS/src/Dockerfile-tmss +++ b/SAS/TMSS/src/Dockerfile-tmss @@ -7,8 +7,10 @@ # # docker build [-t image_name:tag] -f docker/Dockerfile-tmss . # -FROM ci_sas:latest +ARG SAS_VERSION=latest +FROM ci_sas:$SAS_VERSION +USER lofarsys RUN mkdir -p /opt/lofar # Adding backend directory to make absolute filepaths consistent across services diff --git a/SAS/TMSS/src/templates/josdejong_jsoneditor_widget.html b/SAS/TMSS/src/templates/josdejong_jsoneditor_widget.html index 3a00237bfbedb946e1218c3e7ccc1678d4be5075..d715008e7355ad179527119f2b5a3b43003f446b 100644 --- a/SAS/TMSS/src/templates/josdejong_jsoneditor_widget.html +++ b/SAS/TMSS/src/templates/josdejong_jsoneditor_widget.html @@ -2,10 +2,9 @@ <!-- EXTERNAL RESOURCES --> <!--<link rel="stylesheet" id="theme" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css">--> <!--<script src="https://unpkg.com/react@16/umd/react.development.js"></script>--> -<!--<script src="https://unpkg.com/react-dom@16/umd/react-dom.development.js"></script> +<!--<script src="https://unpkg.com/react-dom@16/umd/react-dom.development.js"></script>--> <!--<script src="https://unpkg.com/babel-standalone@6.15.0/babel.min.js"></script>--> <link rel="stylesheet" id="theme" href="https://cdnjs.cloudflare.com/ajax/libs/jsoneditor/5.24.7/jsoneditor.css"> -<link rel="stylesheet" id="custom" href="custom.css"> <!--<script src="https://cdnjs.cloudflare.com/ajax/libs/jsoneditor/5.24.7/img/jsoneditor-icons.svg"></script>--> <script src="https://cdnjs.cloudflare.com/ajax/libs/jsoneditor/5.24.7/jsoneditor.js"></script> @@ -16,10 +15,10 @@ {% if field.label %}{{ field.label }}{% endif %} </label> - <input id="helper_input" name="{{ field.name }}" type="hidden" {% if field.value != None %} value="{{ field.value|safe }}"{% endif %}"> + <input id="helper_input_{{ field.name }}" name="{{ field.name }}" type="hidden" {% if field.value != None %} value="{{ field.value|safe }}"{% endif %}"> <div class="col-sm-10 clearfix"> - <div id="app"></div> + <div id="app_{{ field.name }}"></div> </div> <!--<pre> {% debug %} </pre>--> @@ -38,16 +37,20 @@ } console.log("Initial data: " + JSON.stringify(formData)); + // init input field value + document.getElementById("helper_input_{{ field.name }}").value = JSON.stringify(formData); + + // Read schema (added to widget style by Serializer) var schema = {{ field.style.schema|safe }}; console.log('Schema is: ' + JSON.stringify(schema)); // create the editor - var container = document.getElementById("app"); + var container = document.getElementById("app_{{ field.name }}"); var options = { // Updates the hidden input that Django will read the modified JSON from onChangeJSON: function (json) { - document.getElementById("helper_input").value = JSON.stringify(json); + document.getElementById("helper_input_{{ field.name }}").value = JSON.stringify(json); console.log(JSON.stringify(json)); }, "mode": "tree", @@ -55,6 +58,7 @@ "schema": schema }; var editor = new JSONEditor(container, options, formData); + console.log('Rendering done'); </script> diff --git a/SAS/TMSS/src/tmss/exceptions.py b/SAS/TMSS/src/tmss/exceptions.py index 0ea1ea394479bb5b63e10ce3a689f739c135ab7c..c77b7226f0474e10745600e2b71f4284b141f8a8 100644 --- a/SAS/TMSS/src/tmss/exceptions.py +++ b/SAS/TMSS/src/tmss/exceptions.py @@ -7,3 +7,12 @@ class SchemaValidationException(TMSSException): class ConversionException(TMSSException): pass + +class SchedulingException(TMSSException): + pass + +class SubtaskSchedulingException(SchedulingException): + pass + +class TaskSchedulingException(SchedulingException): + pass diff --git a/SAS/TMSS/src/tmss/settings.py b/SAS/TMSS/src/tmss/settings.py index 3431adaf55bc704056f6547db9080f1a9da37bf1..908a5814f3b2e00ce6827b5e963930096cc18655 100644 --- a/SAS/TMSS/src/tmss/settings.py +++ b/SAS/TMSS/src/tmss/settings.py @@ -30,7 +30,10 @@ LOGGING = { 'formatters': { 'django.server': { '()': 'django.utils.log.ServerFormatter', - 'format': '[%(server_time)s] %(message)s', + 'format': '%(asctime)s %(levelname)s %(message)s', + }, + 'lofar': { + 'format': '%(asctime)s %(levelname)s %(message)s', }, }, 'handlers': { @@ -49,6 +52,11 @@ LOGGING = { 'filters': ['require_debug_false'], 'class': 'django.utils.log.AdminEmailHandler' }, + 'lofar': { + 'level': 'DEBUG', + 'class': 'logging.StreamHandler', + 'formatter': 'lofar', + }, }, 'loggers': { 'django': { @@ -69,6 +77,10 @@ LOGGING = { 'level': 'DEBUG', # change debug level as appropiate 'propagate': False, }, + 'lofar': { + 'handlers': ['lofar'], + 'level': 'INFO', + }, } } @@ -253,7 +265,7 @@ if "OIDC_RP_CLIENT_ID" in os.environ.keys(): AUTHENTICATION_BACKENDS += ('mozilla_django_oidc.auth.OIDCAuthenticationBackend',) MIDDLEWARE.append('mozilla_django_oidc.middleware.SessionRefresh') -if len(AUTHENTICATION_BACKENDS) is 1: +if len(AUTHENTICATION_BACKENDS) == 1: REST_FRAMEWORK['DEFAULT_PERMISSION_CLASSES'].append('rest_framework.permissions.AllowAny') # todo: Whoo! This seems unsafe! Maybe we should at least have users explicitly disable authentication on startup?! logger.warning("No authentication configured! please set either OIDC_RP_CLIENT_ID or TMSS_LDAPCREDENTIALS environment variable.") @@ -286,19 +298,14 @@ AUTH_PASSWORD_VALIDATORS = [ # https://docs.djangoproject.com/en/2.0/topics/i18n/ LANGUAGE_CODE = 'en-us' - TIME_ZONE = 'UTC' - USE_I18N = True - USE_L10N = True - USE_TZ = False # We don't want timezone support since everything is UTC anyway and this caused trouble in the past # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.0/howto/static-files/ - STATIC_URL = '/static/' STATIC_ROOT = '/staticfiles/' @@ -307,3 +314,13 @@ STATIC_ROOT = '/staticfiles/' # Setup support for proxy USE_X_FORWARDED_HOST = True SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') + +SWAGGER_SETTINGS = { + + 'SECURITY_DEFINITIONS': { + 'basic': { + 'type': 'basic' + } + }, + +} \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/tmssapp/CMakeLists.txt b/SAS/TMSS/src/tmss/tmssapp/CMakeLists.txt index 2d831505833f3c1d4380e83f51abe22ec62f10fa..3a7daaa829f09722bd78609d1ec653ec2240d133 100644 --- a/SAS/TMSS/src/tmss/tmssapp/CMakeLists.txt +++ b/SAS/TMSS/src/tmss/tmssapp/CMakeLists.txt @@ -8,6 +8,8 @@ set(_py_files views.py populate.py validation.py + subtasks.py + tasks.py ) python_install(${_py_files} @@ -16,6 +18,7 @@ python_install(${_py_files} add_subdirectory(migrations) add_subdirectory(models) +add_subdirectory(renderers) add_subdirectory(serializers) add_subdirectory(viewsets) add_subdirectory(adapters) diff --git a/SAS/TMSS/src/tmss/tmssapp/adapters/parset.py b/SAS/TMSS/src/tmss/tmssapp/adapters/parset.py index 34e61699ca764827017e63df32a2c6298520292b..86200b5ed56655c90b52f9807093e045578c73f1 100644 --- a/SAS/TMSS/src/tmss/tmssapp/adapters/parset.py +++ b/SAS/TMSS/src/tmss/tmssapp/adapters/parset.py @@ -22,6 +22,7 @@ from lofar.parameterset import parameterset from lofar.common.datetimeutils import formatDatetime from lofar.common.json_utils import add_defaults_to_json_object_for_schema from lofar.sas.tmss.tmss.exceptions import * +from datetime import datetime def _convert_to_parset_for_observationcontrol_schema(subtask: models.Subtask) -> parameterset: # make sure the spec is complete (including all non-filled in properties with default) @@ -45,8 +46,8 @@ def _convert_to_parset_for_observationcontrol_schema(subtask: models.Subtask) -> parset["Observation.processType"] = subtask.specifications_template.type.value.capitalize() parset["Observation.processSubtype"] = "Beam Observation" # TODO: where to derive the processSubtype from? parset["Observation.Campaign.name"] = "TMSS_test" #toDo: replace by project name - parset["Observation.startTime"] = formatDatetime(subtask.start_time) - parset["Observation.stopTime"] = formatDatetime(subtask.stop_time) + parset["Observation.startTime"] = formatDatetime(subtask.start_time) if isinstance(subtask.start_time, datetime) else datetime, subtask.start_time + parset["Observation.stopTime"] = formatDatetime(subtask.stop_time) if isinstance(subtask.stop_time, datetime) else datetime, subtask.stop_time parset["Observation.VirtualInstrument.minimalNrStations"] = 1 # maybe not mandatory? parset["Observation.VirtualInstrument.stationSet"] = "Custom" # maybe not mandatory? parset["Observation.VirtualInstrument.stationList"] = "[%s]" % ','.join(s for s in spec["stations"]["station_list"]) @@ -103,6 +104,7 @@ def _convert_to_parset_for_observationcontrol_schema(subtask: models.Subtask) -> parset["Observation.DataProducts.Output_Correlated.locations"] = [] # TODO: do not use SubtaskOutput.objects.filter but make subtask.subtask_outputs work subtask_outputs = list(models.SubtaskOutput.objects.filter(subtask_id=subtask.id)) + # TODO don't we have to append to dataproducts here and then fill in the combined list in the end? for subtask_output in subtask_outputs: dataproducts = list(models.Dataproduct.objects.filter(producer_id=subtask_output.id)) parset["Observation.DataProducts.Output_Correlated.filenames"] = "[%s]" % ','.join(dp.filename for dp in dataproducts) @@ -136,8 +138,171 @@ def _convert_to_parset_for_observationcontrol_schema(subtask: models.Subtask) -> parset = parameterset(parset) return parset + +def _convert_to_parset_for_pipelinecontrol_schema(subtask: models.Subtask) -> parameterset: + # see https://support.astron.nl/confluence/pages/viewpage.action?spaceKey=TMSS&title=UC1+JSON + + # make sure the spec is complete (including all non-filled in properties with default) + spec = add_defaults_to_json_object_for_schema(subtask.specifications_doc, subtask.specifications_template.schema) + + # ----------------------------------------------------------------------------------------------- + # Historic rationale: in TMSS-183 we made MAC run an actual observation from a TMSS specification. + # With the help of Auke and Jan-David I could generate the parset as defined below. + # MAC turned out to be very sensitive for having specific keys with very specific prefixes etc. + # As a result, the generated parset contains many "duplicate"(nested) keys. + # We all agree that this is ugly, and we should not want this, but hey... it works. + # We decided to keep it like this, and maybe do more tuning/pruning later in the TMSS project. + # Or, we can just get rid of this to-parset-adaper when MAC has been rewritten to the new station API. + # ----------------------------------------------------------------------------------------------- + + parset = dict() # parameterset has no proper assignment operators, so take detour via dict... + + # General + parset["ObsSW.Observation.processType"] = "Pipeline" + parset["ObsSW.Observation.ObservationControl.PythonControl.pythonProgram"] = "preprocessing_pipeline.py" + parset["ObsSW.Observation.ObservationControl.PythonControl.softwareVersion"] = "" + parset["ObsSW.Observation.Campaign.name"] = "<project_name>" # todo, but how? + parset["ObsSW.Observation.Scheduler.taskName"] = subtask.task_blueprint.name + parset["ObsSW.Observation.Scheduler.predecessors"] = [] + parset["ObsSW.Observation.Cluster.ProcessingCluster.clusterName"] = subtask.cluster.name + parset["ObsSW.Observation.Cluster.ProcessingCluster.clusterPartition"] = 'cpu' + + # DPPP steps + dppp_steps = [] + if "preflagger0" in spec: + dppp_steps.append('preflagger[0]') + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].chan"] = "[%s]" % spec["preflagger0"]["channels"] + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].abstime"] = "[]" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].azimuth"] = "[]" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].baseline"] = "" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].blrange"] = "[]" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].corrtype"] = "" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].count.path"] = "-" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].count.save"] = "false" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].elevation"] = "[]" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].expr"] = "" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].freqrange"] = "[]" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].lst"] = "[]" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].reltime"] = "[]" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].timeofday"] = "[]" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].timeslot"] = "[]" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].type"] = "preflagger" + + if 'preflagger1' in spec: + dppp_steps.append('preflagger[1]') + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].corrtype"] = spec["preflagger1"]["corrtype"] + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].abstime"] = "[]" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].azimuth"] = "[]" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].baseline"] = "" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].blrange"] = "[]" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].chan"] = "[]" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].count.path"] = "-" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].count.save"] = "false" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].elevation"] = "[]" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].expr"] = "" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].freqrange"] = "[]" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].lst"] = "[]" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].reltime"] = "[]" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].timeofday"] = "[]" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].timeslot"] = "[]" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].type"] = "preflagger" + + if 'aoflagger' in spec: + dppp_steps.append('aoflagger') + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.strategy"] = spec["aoflagger"]["strategy"] + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.autocorr"] = "F" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.count.path"] = "-" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.count.save"] = "FALSE" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.keepstatistics"] = "T" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.memorymax"] = "10" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.memoryperc"] = "0" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.overlapmax"] = "0" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.overlapperc"] = "0" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.pedantic"] = "F" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.pulsar"] = "F" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.timewindow"] = "0" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.type"] = "aoflagger" + + if "demixer" in spec: + dppp_steps.append('demixer') + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.baseline"] = spec["demixer"]["baselines"] + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.demixfreqstep"] = spec["demixer"]["demix_frequency_steps"] + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.demixtimestep"] = spec["demixer"]["demix_time_steps"] + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.freqstep"] = spec["demixer"]["frequency_steps"] + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.ignoretarget"] = spec["demixer"]["ignore_target"] + parset["ObsSW.Observation.ObservationControl.PythonControl.PreProcessing.demix_always"] = spec["demixer"]["demix_always"] + parset["ObsSW.Observation.ObservationControl.PythonControl.PreProcessing.demix_if_needed"] = spec["demixer"]["demix_if_needed"] + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.blrange"] = "[]" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.corrtype"] = "cross" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.elevationcutoff"] = "0.0deg" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.instrumentmodel"] = "instrument" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.modelsources"] = "[]" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.ntimechunk"] = "0" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.othersources"] = "[]" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.skymodel"] = "sky" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.subtractsources"] = "" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.targetsource"] = "" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.type"] = "demixer" + + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.steps"] = "[%s]" % ",".join(dppp_steps) + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msout.storagemanager.name"] = spec["storagemanager"] # todo: needs to be emptystring when standard/basic/non-dysco? + + # Dataproducts + parset["ObsSW.Observation.DataProducts.Input_Correlated.enabled"] = "true" + + in_dataproducts = [] + for subtask_input in subtask.inputs.all(): + in_dataproducts = subtask_input.dataproducts.all() + parset["ObsSW.Observation.DataProducts.Input_Correlated.filenames"] = "[%s]" % ",".join([dp.filename for dp in in_dataproducts]) + parset["ObsSW.Observation.DataProducts.Input_Correlated.locations"] = "[%s]" % ",".join([dp.directory for dp in in_dataproducts]) + parset["ObsSW.Observation.DataProducts.Input_Correlated.skip"] = "[%s]" % ",".join(['0']*len(in_dataproducts)) + + # TODO: do not use SubtaskOutput.objects.filter but make subtask.subtask_outputs work + subtask_outputs = list(models.SubtaskOutput.objects.filter(subtask_id=subtask.id)) + out_dataproducts = [] + for subtask_output in subtask_outputs: + out_dataproducts = list(models.Dataproduct.objects.filter(producer_id=subtask_output.id)) + parset["ObsSW.Observation.DataProducts.Output_Correlated.enabled"] = "true" + parset["ObsSW.Observation.DataProducts.Output_Correlated.filenames"] = "[%s]" % ",".join([dp.filename for dp in out_dataproducts]) + parset["ObsSW.Observation.DataProducts.Output_Correlated.locations"] = "[%s]" % ",".join([dp.directory for dp in out_dataproducts]) + parset["ObsSW.Observation.DataProducts.Output_Correlated.skip"] = "[%s]" % ",".join(['0']*len(out_dataproducts)) + + # Other + parset["ObsSW.Observation.ObservationControl.PythonControl.PreProcessing.SkyModel"] = "Ateam_LBA_CC" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.checkparset"] = "-1" + + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.autoweight"] = "true" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.band"] = "-1" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.baseline"] = "" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.blrange"] = "[]" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.corrtype"] = "" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.datacolumn"] = "DATA" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.forceautoweight"] = "false" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.missingdata"] = "false" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.nchan"] = "nchan" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.orderms"] = "false" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.sort"] = "false" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.startchan"] = "0" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.useflag"] = "true" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msout.overwrite"] = "false" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msout.tilenchan"] = "8" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msout.tilesize"] = "4096" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msout.vdsdir"] = "A" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msout.writefullresflag"] = "true" + + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.showprogress"] = "F" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.showtimings"] = "F" + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.uselogger"] = "T" + + # convert dict to real parameterset, and return it + parset = parameterset(parset) + return parset + + # dict to store conversion methods based on subtask.specifications_template.name -_convertors = {'observationcontrol schema': _convert_to_parset_for_observationcontrol_schema } +_convertors = {'observationcontrol schema': _convert_to_parset_for_observationcontrol_schema, + 'pipelinecontrol schema': _convert_to_parset_for_pipelinecontrol_schema } + def convert_to_parset(subtask: models.Subtask) -> parameterset: ''' @@ -146,7 +311,8 @@ def convert_to_parset(subtask: models.Subtask) -> parameterset: ''' try: convertor = _convertors[subtask.specifications_template.name] - return convertor(subtask) except KeyError: raise ConversionException("Cannot convert subtask id=%d to parset. No conversion routine available for specifications_template='%s'" % ( subtask.id, subtask.specifications_template.name)) + + return convertor(subtask) diff --git a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py index eacc7a839abe6e06dcff9f37faeab3285a44ede8..59f8844375a6d721e5a569c6ef3bd919ed86d94d 100644 --- a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py +++ b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 2.2.10 on 2020-04-17 08:37 +# Generated by Django 2.2.12 on 2020-05-27 09:15 from django.conf import settings import django.contrib.postgres.fields @@ -26,6 +26,22 @@ class Migration(migrations.Migration): 'abstract': False, }, ), + migrations.CreateModel( + name='AntennaSet', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('rcus', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=128)), + ('inputs', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, size=128)), + ], + options={ + 'abstract': False, + }, + ), migrations.CreateModel( name='Cluster', fields=[ @@ -94,7 +110,21 @@ class Migration(migrations.Migration): ('expected_size', models.BigIntegerField(help_text='Expected size of dataproduct size, in bytes. Used for scheduling purposes. NULL if size is unknown (NULLable).', null=True)), ('size', models.BigIntegerField(help_text='Dataproduct size, in bytes. Used for accounting purposes. NULL if size is (yet) unknown (NULLable).', null=True)), ('feedback_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Dataproduct properties, as reported by the producing process.')), - ('dataformat', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataformat')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='DataproductArchiveInfo', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('storage_ticket', models.CharField(help_text='Archive-system identifier.', max_length=128)), + ('public_since', models.DateTimeField(help_text='Dataproduct is available for public download since this moment, or NULL if dataproduct is not (NULLable).', null=True)), + ('corrupted_since', models.DateTimeField(help_text='Earliest timestamp from which this dataproduct is known to be partially or fully corrupt, or NULL if dataproduct is not known to be corrupt (NULLable).', null=True)), ], options={ 'abstract': False, @@ -112,6 +142,16 @@ class Migration(migrations.Migration): ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), ], + ), + migrations.CreateModel( + name='DataproductHash', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('hash', models.CharField(help_text='Hash value.', max_length=128)), + ], options={ 'abstract': False, }, @@ -128,6 +168,16 @@ class Migration(migrations.Migration): ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), ], + ), + migrations.CreateModel( + name='DataproductTransform', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('identity', models.BooleanField(help_text='TRUE if this transform only copies, tars, or losslessly compresses its input, FALSE if the transform changes the data. Allows for efficient reasoning about data duplication.')), + ], options={ 'abstract': False, }, @@ -141,6 +191,99 @@ class Migration(migrations.Migration): 'abstract': False, }, ), + migrations.CreateModel( + name='DefaultDataproductSpecificationsTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(max_length=128, unique=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='DefaultGeneratorTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(max_length=128, unique=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='DefaultSchedulingUnitTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(max_length=128, unique=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='DefaultSubtaskTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(max_length=128, unique=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='DefaultTaskTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(max_length=128, unique=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='DefaultWorkRelationSelectionTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(max_length=128, unique=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Filesystem', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('capacity', models.BigIntegerField(help_text='Capacity in bytes')), + ], + options={ + 'abstract': False, + }, + ), migrations.CreateModel( name='GeneratorTemplate', fields=[ @@ -154,9 +297,6 @@ class Migration(migrations.Migration): ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), ('create_function', models.CharField(help_text='Python function to call to execute the generator.', max_length=128)), ], - options={ - 'abstract': False, - }, ), migrations.CreateModel( name='Project', @@ -171,7 +311,26 @@ class Migration(migrations.Migration): ('private_data', models.BooleanField(default=True, help_text='True if data of this project is sensitive. Sensitive data is not made public.')), ('expert', models.BooleanField(default=False, help_text='Expert projects put more responsibility on the PI.')), ('filler', models.BooleanField(default=False, help_text='Use this project to fill up idle telescope time.')), - ('cycle', models.ForeignKey(help_text='Cycle(s) to which this project belongs (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='projects', to='tmssapp.Cycle')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='ProjectQuota', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('value', models.FloatField(help_text='Resource Quota value')), + ], + ), + migrations.CreateModel( + name='ResourceType', + fields=[ + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128, primary_key=True, serialize=False)), ], options={ 'abstract': False, @@ -250,8 +409,6 @@ class Migration(migrations.Migration): ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), ('requirements_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Scheduling and/or quality requirements for this run.')), ('generator_instance_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Parameter value that generated this run draft (NULLable).', null=True)), - ('copies', models.ForeignKey(help_text='Source reference, if we are a copy (NULLable).', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='copied_from', to='tmssapp.SchedulingUnitDraft')), - ('copy_reason', models.ForeignKey(help_text='Reason why source was copied (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.CopyReason')), ], options={ 'abstract': False, @@ -269,9 +426,6 @@ class Migration(migrations.Migration): ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), ], - options={ - 'abstract': False, - }, ), migrations.CreateModel( name='StationType', @@ -294,24 +448,19 @@ class Migration(migrations.Migration): ('specifications_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Final specifications, as input for the controller.')), ('do_cancel', models.DateTimeField(help_text='Timestamp when the subtask has been ordered to cancel (NULLable).', null=True)), ('priority', models.IntegerField(help_text='Absolute priority of this subtask (higher value means more important).')), - ('scheduler_input_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Partial specifications, as input for the scheduler.')), - ('cluster', models.ForeignKey(help_text='Where the Subtask is scheduled to run (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Cluster')), - ('created_or_updated_by_user', models.ForeignKey(editable=False, help_text='The user who created / updated the subtask.', null=True, on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL)), - ('schedule_method', models.ForeignKey(help_text='Which method to use for scheduling this Subtask. One of (MANUAL, BATCH, DYNAMIC).', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.ScheduleMethod')), ], options={ 'abstract': False, }, ), migrations.CreateModel( - name='SubtaskConnector', + name='SubtaskInput', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('dataformats', models.ManyToManyField(blank=True, to='tmssapp.Dataformat')), - ('datatype', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Datatype')), + ('selection_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Filter to apply to the dataproducts of the producer, to derive input dataproducts when scheduling.')), ], options={ 'abstract': False, @@ -329,6 +478,15 @@ class Migration(migrations.Migration): ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), ], + ), + migrations.CreateModel( + name='SubtaskOutput', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ], options={ 'abstract': False, }, @@ -342,6 +500,34 @@ class Migration(migrations.Migration): 'abstract': False, }, ), + migrations.CreateModel( + name='SubtaskStateLog', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('user_identifier', models.CharField(editable=False, help_text='The ID of the user who changed the state of the subtask.', max_length=128, null=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='SubtaskTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), + ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), + ('queue', models.BooleanField(default=False)), + ('realtime', models.BooleanField(default=False)), + ], + ), migrations.CreateModel( name='SubtaskType', fields=[ @@ -376,14 +562,12 @@ class Migration(migrations.Migration): }, ), migrations.CreateModel( - name='TaskConnectors', + name='TaskConnector', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('dataformats', models.ManyToManyField(blank=True, to='tmssapp.Dataformat')), - ('datatype', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Datatype')), ], options={ 'abstract': False, @@ -399,103 +583,179 @@ class Migration(migrations.Migration): ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), ('specifications_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Specifications for this task.')), - ('copies', models.ForeignKey(help_text='Source reference, if we are a copy (NULLable).', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='copied_from', to='tmssapp.TaskDraft')), - ('copy_reason', models.ForeignKey(help_text='Reason why source was copied (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.CopyReason')), - ('scheduling_unit_draft', models.ForeignKey(help_text='Scheduling Unit draft to which this task draft belongs.', on_delete=django.db.models.deletion.CASCADE, related_name='task_drafts', to='tmssapp.SchedulingUnitDraft')), ], options={ 'abstract': False, }, ), migrations.CreateModel( - name='TaskTemplate', + name='TaskRelationBlueprint', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), - ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), - ('validation_code_js', models.CharField(help_text='JavaScript code for additional (complex) validation.', max_length=128)), + ('selection_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Filter for selecting dataproducts from the output role.')), ], options={ 'abstract': False, }, ), migrations.CreateModel( - name='WorkRelationSelectionTemplate', + name='TaskRelationDraft', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), - ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), + ('selection_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Filter for selecting dataproducts from the output role.')), ], options={ 'abstract': False, }, ), migrations.CreateModel( - name='TaskRelationDraft', + name='TaskTemplate', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('selection_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Filter for selecting dataproducts from the output role.')), - ('consumer', models.ForeignKey(help_text='Task Draft that has the input connector.', on_delete=django.db.models.deletion.CASCADE, related_name='consumed_by', to='tmssapp.TaskDraft')), - ('dataformat', models.ForeignKey(help_text='Selected data format to use. One of (MS, HDF5).', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataformat')), - ('input', models.ForeignKey(help_text='Input connector of consumer.', on_delete=django.db.models.deletion.CASCADE, related_name='inputs_task_relation_draft', to='tmssapp.TaskConnectors')), - ('output', models.ForeignKey(help_text='Output connector of producer.', on_delete=django.db.models.deletion.CASCADE, related_name='outputs_task_relation_draft', to='tmssapp.TaskConnectors')), - ('producer', models.ForeignKey(help_text='Task Draft that has the output connector. NOTE: The producer does typically, but not necessarily, belong to the same Scheduling Unit (or even the same Project) as the consumer.', on_delete=django.db.models.deletion.CASCADE, related_name='produced_by', to='tmssapp.TaskDraft')), - ('selection_template', models.ForeignKey(help_text='Schema used for selection_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.WorkRelationSelectionTemplate')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), + ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), + ('validation_code_js', models.CharField(help_text='JavaScript code for additional (complex) validation.', max_length=128)), ], - options={ - 'abstract': False, - }, ), migrations.CreateModel( - name='TaskRelationBlueprint', + name='WorkRelationSelectionTemplate', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('selection_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Filter for selecting dataproducts from the output role.')), - ('consumer', models.ForeignKey(help_text='Task Blueprint that has the input connector.', on_delete=django.db.models.deletion.CASCADE, related_name='consumed_by', to='tmssapp.TaskBlueprint')), - ('dataformat', models.ForeignKey(help_text='Selected data format to use.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataformat')), - ('draft', models.ForeignKey(help_text='Task Relation Draft which this work request instantiates.', on_delete=django.db.models.deletion.CASCADE, related_name='related_task_relation_blueprint', to='tmssapp.TaskRelationDraft')), - ('input', models.ForeignKey(help_text='Input connector of consumer.', on_delete=django.db.models.deletion.CASCADE, related_name='inputs_task_relation_blueprint', to='tmssapp.TaskConnectors')), - ('output', models.ForeignKey(help_text='Output connector of producer.', on_delete=django.db.models.deletion.CASCADE, related_name='outputs_task_relation_blueprint', to='tmssapp.TaskConnectors')), - ('producer', models.ForeignKey(help_text='Task Blueprint that has the output connector.', on_delete=django.db.models.deletion.CASCADE, related_name='produced_by', to='tmssapp.TaskBlueprint')), - ('selection_template', models.ForeignKey(help_text='Schema used for selection_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.WorkRelationSelectionTemplate')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), + ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), ], - options={ - 'abstract': False, - }, ), - migrations.AddField( - model_name='taskdraft', - name='specifications_template', - field=models.ForeignKey(help_text='Schema used for requirements_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.TaskTemplate'), + migrations.AddConstraint( + model_name='workrelationselectiontemplate', + constraint=models.UniqueConstraint(fields=('name', 'version'), name='WorkRelationSelectionTemplate_unique_name_version'), + ), + migrations.AddConstraint( + model_name='tasktemplate', + constraint=models.UniqueConstraint(fields=('name', 'version'), name='TaskTemplate_unique_name_version'), ), migrations.AddField( - model_name='taskconnectors', - name='input_of', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='inputs', to='tmssapp.TaskTemplate'), + model_name='taskrelationdraft', + name='consumer', + field=models.ForeignKey(help_text='Task Draft that has the input connector.', on_delete=django.db.models.deletion.CASCADE, related_name='consumed_by', to='tmssapp.TaskDraft'), ), migrations.AddField( - model_name='taskconnectors', - name='output_of', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='outputs', to='tmssapp.TaskTemplate'), + model_name='taskrelationdraft', + name='dataformat', + field=models.ForeignKey(help_text='Selected data format to use. One of (MS, HDF5).', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataformat'), ), migrations.AddField( - model_name='taskconnectors', + model_name='taskrelationdraft', + name='input', + field=models.ForeignKey(help_text='Input connector of consumer.', on_delete=django.db.models.deletion.CASCADE, related_name='inputs_task_relation_draft', to='tmssapp.TaskConnector'), + ), + migrations.AddField( + model_name='taskrelationdraft', + name='output', + field=models.ForeignKey(help_text='Output connector of producer.', on_delete=django.db.models.deletion.CASCADE, related_name='outputs_task_relation_draft', to='tmssapp.TaskConnector'), + ), + migrations.AddField( + model_name='taskrelationdraft', + name='producer', + field=models.ForeignKey(help_text='Task Draft that has the output connector. NOTE: The producer does typically, but not necessarily, belong to the same Scheduling Unit (or even the same Project) as the consumer.', on_delete=django.db.models.deletion.CASCADE, related_name='produced_by', to='tmssapp.TaskDraft'), + ), + migrations.AddField( + model_name='taskrelationdraft', + name='selection_template', + field=models.ForeignKey(help_text='Schema used for selection_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.WorkRelationSelectionTemplate'), + ), + migrations.AddField( + model_name='taskrelationblueprint', + name='consumer', + field=models.ForeignKey(help_text='Task Blueprint that has the input connector.', on_delete=django.db.models.deletion.CASCADE, related_name='consumed_by', to='tmssapp.TaskBlueprint'), + ), + migrations.AddField( + model_name='taskrelationblueprint', + name='dataformat', + field=models.ForeignKey(help_text='Selected data format to use.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataformat'), + ), + migrations.AddField( + model_name='taskrelationblueprint', + name='draft', + field=models.ForeignKey(help_text='Task Relation Draft which this work request instantiates.', on_delete=django.db.models.deletion.CASCADE, related_name='related_task_relation_blueprint', to='tmssapp.TaskRelationDraft'), + ), + migrations.AddField( + model_name='taskrelationblueprint', + name='input', + field=models.ForeignKey(help_text='Input connector of consumer.', on_delete=django.db.models.deletion.CASCADE, related_name='inputs_task_relation_blueprint', to='tmssapp.TaskConnector'), + ), + migrations.AddField( + model_name='taskrelationblueprint', + name='output', + field=models.ForeignKey(help_text='Output connector of producer.', on_delete=django.db.models.deletion.CASCADE, related_name='outputs_task_relation_blueprint', to='tmssapp.TaskConnector'), + ), + migrations.AddField( + model_name='taskrelationblueprint', + name='producer', + field=models.ForeignKey(help_text='Task Blueprint that has the output connector.', on_delete=django.db.models.deletion.CASCADE, related_name='produced_by', to='tmssapp.TaskBlueprint'), + ), + migrations.AddField( + model_name='taskrelationblueprint', + name='selection_template', + field=models.ForeignKey(help_text='Schema used for selection_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.WorkRelationSelectionTemplate'), + ), + migrations.AddField( + model_name='taskdraft', + name='copies', + field=models.ForeignKey(help_text='Source reference, if we are a copy (NULLable).', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='copied_from', to='tmssapp.TaskDraft'), + ), + migrations.AddField( + model_name='taskdraft', + name='copy_reason', + field=models.ForeignKey(help_text='Reason why source was copied (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.CopyReason'), + ), + migrations.AddField( + model_name='taskdraft', + name='scheduling_unit_draft', + field=models.ForeignKey(help_text='Scheduling Unit draft to which this task draft belongs.', on_delete=django.db.models.deletion.CASCADE, related_name='task_drafts', to='tmssapp.SchedulingUnitDraft'), + ), + migrations.AddField( + model_name='taskdraft', + name='specifications_template', + field=models.ForeignKey(help_text='Schema used for requirements_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.TaskTemplate'), + ), + migrations.AddField( + model_name='taskconnector', + name='dataformats', + field=models.ManyToManyField(blank=True, to='tmssapp.Dataformat'), + ), + migrations.AddField( + model_name='taskconnector', + name='datatype', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Datatype'), + ), + migrations.AddField( + model_name='taskconnector', + name='input_of', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='inputs', to='tmssapp.TaskTemplate'), + ), + migrations.AddField( + model_name='taskconnector', + name='output_of', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='outputs', to='tmssapp.TaskTemplate'), + ), + migrations.AddField( + model_name='taskconnector', name='role', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Role'), ), @@ -514,89 +774,79 @@ class Migration(migrations.Migration): name='specifications_template', field=models.ForeignKey(help_text='Schema used for specifications_doc (IMMUTABLE).', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.TaskTemplate'), ), - migrations.CreateModel( - name='SubtaskTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), - ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), - ('queue', models.BooleanField(default=False)), - ('realtime', models.BooleanField(default=False)), - ('type', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskType')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='subtasktemplate', + name='type', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskType'), ), - migrations.CreateModel( - name='SubtaskStateLog', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('user_identifier', models.CharField(editable=False, help_text='The ID of the user who changed the state of the subtask.', max_length=128, null=True)), - ('new_state', models.ForeignKey(editable=False, help_text='Subtask state after update (see Subtask State Machine).', on_delete=django.db.models.deletion.PROTECT, related_name='is_new_state_of', to='tmssapp.SubtaskState')), - ('old_state', models.ForeignKey(editable=False, help_text='Subtask state before update (see Subtask State Machine).', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='is_old_state_of', to='tmssapp.SubtaskState')), - ('subtask', models.ForeignKey(editable=False, help_text='Subtask to which this state change refers.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.Subtask')), - ('user', models.ForeignKey(editable=False, help_text='The user who changed the state of the subtask.', null=True, on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL)), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='subtaskstatelog', + name='new_state', + field=models.ForeignKey(editable=False, help_text='Subtask state after update (see Subtask State Machine).', on_delete=django.db.models.deletion.PROTECT, related_name='is_new_state_of', to='tmssapp.SubtaskState'), ), - migrations.CreateModel( - name='SubtaskOutput', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('connector', models.ForeignKey(help_text='Which connector this Subtask Output implements.', null=True, on_delete=django.db.models.deletion.SET_NULL, to='tmssapp.SubtaskConnector')), - ('subtask', models.ForeignKey(help_text='Subtask to which this output specification refers.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.Subtask')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='subtaskstatelog', + name='old_state', + field=models.ForeignKey(editable=False, help_text='Subtask state before update (see Subtask State Machine).', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='is_old_state_of', to='tmssapp.SubtaskState'), ), - migrations.CreateModel( - name='SubtaskInput', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('selection_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Filter to apply to the dataproducts of the producer, to derive input dataproducts when scheduling.')), - ('connector', models.ForeignKey(help_text='Which connector this Task Input implements.', null=True, on_delete=django.db.models.deletion.SET_NULL, to='tmssapp.SubtaskConnector')), - ('dataproducts', models.ManyToManyField(help_text='The Dataproducts resulting from application of the filter at time of scheduling Although the dataproducts are simply the result of applying the filter on immutable data, the filter application could change over time. We thus store the result of this filtering directly to retain which input was specified for the task..', to='tmssapp.Dataproduct')), - ('producer', models.ForeignKey(help_text='The Subtask Output providing the input dataproducts.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskOutput')), - ('selection_template', models.ForeignKey(help_text='Schema used for selection_doc.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskInputSelectionTemplate')), - ('subtask', models.ForeignKey(help_text='Subtask to which this input specification refers.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.Subtask')), - ('task_relation_blueprint', models.ForeignKey(help_text='Task Relation Blueprint which this Subtask Input implements (NULLable).', null=True, on_delete=django.db.models.deletion.SET_NULL, to='tmssapp.TaskRelationBlueprint')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='subtaskstatelog', + name='subtask', + field=models.ForeignKey(editable=False, help_text='Subtask to which this state change refers.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.Subtask'), ), migrations.AddField( - model_name='subtaskconnector', - name='input_of', - field=models.ForeignKey(blank=True, on_delete=django.db.models.deletion.PROTECT, related_name='outputs', to='tmssapp.SubtaskTemplate'), + model_name='subtaskstatelog', + name='user', + field=models.ForeignKey(editable=False, help_text='The user who changed the state of the subtask.', null=True, on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL), ), migrations.AddField( - model_name='subtaskconnector', - name='output_of', - field=models.ForeignKey(blank=True, on_delete=django.db.models.deletion.PROTECT, related_name='inputs', to='tmssapp.SubtaskTemplate'), + model_name='subtaskoutput', + name='subtask', + field=models.ForeignKey(help_text='Subtask to which this output specification refers.', on_delete=django.db.models.deletion.CASCADE, related_name='outputs', to='tmssapp.Subtask'), + ), + migrations.AddConstraint( + model_name='subtaskinputselectiontemplate', + constraint=models.UniqueConstraint(fields=('name', 'version'), name='SubtaskInputSelectionTemplate_unique_name_version'), ), migrations.AddField( - model_name='subtaskconnector', - name='role', - field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Role'), + model_name='subtaskinput', + name='dataproducts', + field=models.ManyToManyField(help_text='The Dataproducts resulting from application of the filter at time of scheduling Although the dataproducts are simply the result of applying the filter on immutable data, the filter application could change over time. We thus store the result of this filtering directly to retain which input was specified for the task..', to='tmssapp.Dataproduct'), + ), + migrations.AddField( + model_name='subtaskinput', + name='producer', + field=models.ForeignKey(help_text='The SubtaskOutput producing the input dataproducts for this SubtaskInput.', on_delete=django.db.models.deletion.PROTECT, related_name='consumers', to='tmssapp.SubtaskOutput'), + ), + migrations.AddField( + model_name='subtaskinput', + name='selection_template', + field=models.ForeignKey(help_text='Schema used for selection_doc.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskInputSelectionTemplate'), + ), + migrations.AddField( + model_name='subtaskinput', + name='subtask', + field=models.ForeignKey(help_text='Subtask to which this input specification refers.', on_delete=django.db.models.deletion.CASCADE, related_name='inputs', to='tmssapp.Subtask'), + ), + migrations.AddField( + model_name='subtaskinput', + name='task_relation_blueprint', + field=models.ForeignKey(help_text='Task Relation Blueprint which this Subtask Input implements (NULLable).', null=True, on_delete=django.db.models.deletion.SET_NULL, to='tmssapp.TaskRelationBlueprint'), + ), + migrations.AddField( + model_name='subtask', + name='cluster', + field=models.ForeignKey(help_text='Where the Subtask is scheduled to run (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Cluster'), + ), + migrations.AddField( + model_name='subtask', + name='created_or_updated_by_user', + field=models.ForeignKey(editable=False, help_text='The user who created / updated the subtask.', null=True, on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL), + ), + migrations.AddField( + model_name='subtask', + name='schedule_method', + field=models.ForeignKey(help_text='Which method to use for scheduling this Subtask. One of (MANUAL, BATCH, DYNAMIC).', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.ScheduleMethod'), ), migrations.AddField( model_name='subtask', @@ -613,6 +863,20 @@ class Migration(migrations.Migration): name='task_blueprint', field=models.ForeignKey(help_text='Task Blueprint to which this Subtask belongs.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='subtasks', to='tmssapp.TaskBlueprint'), ), + migrations.AddConstraint( + model_name='schedulingunittemplate', + constraint=models.UniqueConstraint(fields=('name', 'version'), name='SchedulingUnitTemplate_unique_name_version'), + ), + migrations.AddField( + model_name='schedulingunitdraft', + name='copies', + field=models.ForeignKey(help_text='Source reference, if we are a copy (NULLable).', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='copied_from', to='tmssapp.SchedulingUnitDraft'), + ), + migrations.AddField( + model_name='schedulingunitdraft', + name='copy_reason', + field=models.ForeignKey(help_text='Reason why source was copied (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.CopyReason'), + ), migrations.AddField( model_name='schedulingunitdraft', name='requirements_template', @@ -648,174 +912,102 @@ class Migration(migrations.Migration): name='project', field=models.ForeignKey(help_text='Project to which this scheduling set belongs.', on_delete=django.db.models.deletion.PROTECT, related_name='scheduling_sets', to='tmssapp.Project'), ), - migrations.CreateModel( - name='ResourceType', - fields=[ - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128, primary_key=True, serialize=False)), - ('resource_unit', models.ForeignKey(help_text='Unit of current resource.', on_delete=django.db.models.deletion.PROTECT, related_name='resource_types', to='tmssapp.ResourceUnit')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='resourcetype', + name='resource_unit', + field=models.ForeignKey(help_text='Unit of current resource.', on_delete=django.db.models.deletion.PROTECT, related_name='resource_types', to='tmssapp.ResourceUnit'), ), - migrations.CreateModel( - name='ProjectQuota', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('value', models.FloatField(help_text='Resource Quota value')), - ('project', models.ForeignKey(help_text='Project to wich this quota belongs.', on_delete=django.db.models.deletion.PROTECT, related_name='project_quota', to='tmssapp.Project')), - ('resource_type', models.ForeignKey(help_text='Resource type.', on_delete=django.db.models.deletion.PROTECT, related_name='resource_type', to='tmssapp.ResourceType')), - ], + migrations.AddField( + model_name='projectquota', + name='project', + field=models.ForeignKey(help_text='Project to wich this quota belongs.', on_delete=django.db.models.deletion.PROTECT, related_name='project_quota', to='tmssapp.Project'), ), - migrations.CreateModel( - name='Filesystem', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('capacity', models.BigIntegerField(help_text='Capacity in bytes')), - ('cluster', models.ForeignKey(help_text='Cluster hosting this filesystem.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Cluster')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='projectquota', + name='resource_type', + field=models.ForeignKey(help_text='Resource type.', on_delete=django.db.models.deletion.PROTECT, related_name='resource_type', to='tmssapp.ResourceType'), ), - migrations.CreateModel( - name='DefaultWorkRelationSelectionTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(max_length=128, unique=True)), - ('template', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.WorkRelationSelectionTemplate')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='project', + name='cycle', + field=models.ForeignKey(help_text='Cycle(s) to which this project belongs (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='projects', to='tmssapp.Cycle'), ), - migrations.CreateModel( - name='DefaultTaskTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(max_length=128, unique=True)), - ('template', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.TaskTemplate')), - ], - options={ - 'abstract': False, - }, + migrations.AddConstraint( + model_name='generatortemplate', + constraint=models.UniqueConstraint(fields=('name', 'version'), name='GeneratorTemplate_unique_name_version'), ), - migrations.CreateModel( - name='DefaultSubtaskTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(max_length=128, unique=True)), - ('template', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskTemplate')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='filesystem', + name='cluster', + field=models.ForeignKey(help_text='Cluster hosting this filesystem.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Cluster'), ), - migrations.CreateModel( - name='DefaultSchedulingUnitTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(max_length=128, unique=True)), - ('template', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SchedulingUnitTemplate')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='defaultworkrelationselectiontemplate', + name='template', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.WorkRelationSelectionTemplate'), ), - migrations.CreateModel( - name='DefaultGeneratorTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(max_length=128, unique=True)), - ('template', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.GeneratorTemplate')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='defaulttasktemplate', + name='template', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.TaskTemplate'), ), - migrations.CreateModel( - name='DefaultDataproductSpecificationsTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(max_length=128, unique=True)), - ('template', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.DataproductSpecificationsTemplate')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='defaultsubtasktemplate', + name='template', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskTemplate'), ), - migrations.CreateModel( - name='DataproductTransform', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('identity', models.BooleanField(help_text='TRUE if this transform only copies, tars, or losslessly compresses its input, FALSE if the transform changes the data. Allows for efficient reasoning about data duplication.')), - ('input', models.ForeignKey(help_text='A dataproduct that was the input of a transformation.', on_delete=django.db.models.deletion.PROTECT, related_name='inputs', to='tmssapp.Dataproduct')), - ('output', models.ForeignKey(help_text='A dataproduct that was produced from the input dataproduct.', on_delete=django.db.models.deletion.PROTECT, related_name='outputs', to='tmssapp.Dataproduct')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='defaultschedulingunittemplate', + name='template', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SchedulingUnitTemplate'), ), - migrations.CreateModel( - name='DataproductHash', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('hash', models.CharField(help_text='Hash value.', max_length=128)), - ('algorithm', models.ForeignKey(help_text='Algorithm used (MD5, AES256).', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Algorithm')), - ('dataproduct', models.ForeignKey(help_text='The dataproduct to which this hash refers.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataproduct')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='defaultgeneratortemplate', + name='template', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.GeneratorTemplate'), ), - migrations.CreateModel( - name='DataproductArchiveInfo', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('storage_ticket', models.CharField(help_text='Archive-system identifier.', max_length=128)), - ('public_since', models.DateTimeField(help_text='Dataproduct is available for public download since this moment, or NULL if dataproduct is not (NULLable).', null=True)), - ('corrupted_since', models.DateTimeField(help_text='Earliest timestamp from which this dataproduct is known to be partially or fully corrupt, or NULL if dataproduct is not known to be corrupt (NULLable).', null=True)), - ('dataproduct', models.ForeignKey(help_text='A dataproduct residing in the archive.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataproduct')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='defaultdataproductspecificationstemplate', + name='template', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.DataproductSpecificationsTemplate'), + ), + migrations.AddField( + model_name='dataproducttransform', + name='input', + field=models.ForeignKey(help_text='A dataproduct that was the input of a transformation.', on_delete=django.db.models.deletion.PROTECT, related_name='inputs', to='tmssapp.Dataproduct'), + ), + migrations.AddField( + model_name='dataproducttransform', + name='output', + field=models.ForeignKey(help_text='A dataproduct that was produced from the input dataproduct.', on_delete=django.db.models.deletion.PROTECT, related_name='outputs', to='tmssapp.Dataproduct'), + ), + migrations.AddConstraint( + model_name='dataproductspecificationstemplate', + constraint=models.UniqueConstraint(fields=('name', 'version'), name='DataproductSpecificationsTemplate_unique_name_version'), + ), + migrations.AddField( + model_name='dataproducthash', + name='algorithm', + field=models.ForeignKey(help_text='Algorithm used (MD5, AES256).', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Algorithm'), + ), + migrations.AddField( + model_name='dataproducthash', + name='dataproduct', + field=models.ForeignKey(help_text='The dataproduct to which this hash refers.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataproduct'), + ), + migrations.AddConstraint( + model_name='dataproductfeedbacktemplate', + constraint=models.UniqueConstraint(fields=('name', 'version'), name='DataproductFeedbackTemplate_unique_name_version'), + ), + migrations.AddField( + model_name='dataproductarchiveinfo', + name='dataproduct', + field=models.ForeignKey(help_text='A dataproduct residing in the archive.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataproduct'), + ), + migrations.AddField( + model_name='dataproduct', + name='dataformat', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataformat'), ), migrations.AddField( model_name='dataproduct', @@ -825,29 +1017,17 @@ class Migration(migrations.Migration): migrations.AddField( model_name='dataproduct', name='producer', - field=models.ForeignKey(help_text='Subtask Output which generates this dataproduct.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskOutput'), + field=models.ForeignKey(help_text='Subtask Output which generates this dataproduct.', on_delete=django.db.models.deletion.PROTECT, related_name='dataproducts', to='tmssapp.SubtaskOutput'), ), migrations.AddField( model_name='dataproduct', name='specifications_template', field=models.ForeignKey(help_text='Schema used for specifications_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.DataproductSpecificationsTemplate'), ), - migrations.CreateModel( - name='AntennaSet', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('rcus', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=128)), - ('inputs', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, size=128)), - ('station_type', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.StationType')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='antennaset', + name='station_type', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.StationType'), ), migrations.AddIndex( model_name='taskrelationdraft', @@ -858,8 +1038,12 @@ class Migration(migrations.Migration): index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_tas_tags_256437_gin'), ), migrations.AddIndex( - model_name='taskconnectors', - index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_tas_tags_0ebd6d_gin'), + model_name='taskconnector', + index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_tas_tags_a12728_gin'), + ), + migrations.AddConstraint( + model_name='subtasktemplate', + constraint=models.UniqueConstraint(fields=('name', 'version'), name='SubtaskTemplate_unique_name_version'), ), migrations.AddIndex( model_name='subtaskstatelog', @@ -873,10 +1057,6 @@ class Migration(migrations.Migration): model_name='subtaskinput', index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_sub_tags_fb9960_gin'), ), - migrations.AddIndex( - model_name='subtaskconnector', - index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_sub_tags_60e299_gin'), - ), migrations.AddIndex( model_name='subtask', index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_sub_tags_d2fc43_gin'), diff --git a/SAS/TMSS/src/tmss/tmssapp/migrations/0002_populate.py b/SAS/TMSS/src/tmss/tmssapp/migrations/0002_populate.py index 7a25dab177badfe0cab6eeaecee51d3d1fdfee19..e33461a92950831b9b30d7cec2b13a718689035e 100644 --- a/SAS/TMSS/src/tmss/tmssapp/migrations/0002_populate.py +++ b/SAS/TMSS/src/tmss/tmssapp/migrations/0002_populate.py @@ -17,6 +17,5 @@ class Migration(migrations.Migration): # Start SubTask id with 2 000 000 to avoid overlap with 'old' (test/production) OTDB operations = [ migrations.RunSQL('ALTER SEQUENCE tmssapp_SubTask_id_seq RESTART WITH 2000000;'), migrations.RunPython(populate_choices), - migrations.RunPython(populate_resources), migrations.RunPython(populate_misc), migrations.RunPython(populate_lofar_json_schemas) ] diff --git a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py index 36cdc05269d5276798ec6163448b3f889ed24eac..48d9ca29ff7bd86466326ddeb51c47ec36c9f4e3 100644 --- a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py @@ -2,33 +2,29 @@ This file contains the database models """ +import os +import logging +logger = logging.getLogger(__name__) + from django.db.models import ForeignKey, CharField, DateTimeField, BooleanField, IntegerField, BigIntegerField, \ - ManyToManyField, CASCADE, SET_NULL, PROTECT + ManyToManyField, CASCADE, SET_NULL, PROTECT, UniqueConstraint, QuerySet from django.contrib.postgres.fields import ArrayField, JSONField from django.contrib.auth.models import User from .specification import AbstractChoice, BasicCommon, Template, NamedCommon # , <TaskBlueprint from enum import Enum from rest_framework.serializers import HyperlinkedRelatedField from django.dispatch import receiver +from django.db.models.expressions import RawSQL from lofar.sas.tmss.tmss.tmssapp.validation import validate_json_against_schema +from lofar.messaging.messagebus import ToBus, DEFAULT_BROKER, DEFAULT_BUSNAME +from lofar.messaging.messages import EventMessage +from lofar.sas.tmss.client.tmssbuslistener import DEFAULT_TMSS_SUBTASK_NOTIFICATION_PREFIX # # I/O # -class SubtaskConnector(BasicCommon): - """ - Represents the relation between input and output of the Subtasks. Some of these relations implement the Task - Relations. An input is tied to an output of another Subtask, and allows a filter to be specified. - """ - role = ForeignKey('Role', null=False, on_delete=PROTECT) - datatype = ForeignKey('Datatype', null=False, on_delete=PROTECT) - dataformats = ManyToManyField('Dataformat', blank=True) - output_of = ForeignKey('SubtaskTemplate', related_name='inputs', blank=True, on_delete=PROTECT) - input_of = ForeignKey('SubtaskTemplate', related_name='outputs', blank=True, on_delete=PROTECT) - - # # Choices # @@ -60,6 +56,8 @@ class SubtaskType(AbstractChoice): PIPELINE = "pipeline" COPY = "copy" INSPECTION = "inspection" + QA_FILES = "qa_files" # task which creates "adder" QA h5 file(s) from a MeasurementSet of beamformed data + QA_PLOTS = "qa_plots" # task which creates "adder" QA plots from an "adder" QA h5 file h5 DELETION = "deletion" MANUAL = 'manual' OTHER = 'other' @@ -103,6 +101,10 @@ class SubtaskTemplate(Template): queue = BooleanField(default=False) realtime = BooleanField(default=False) + class Meta: + # TODO: move up to the abstract base class and replace with django 3.0 UniqueConstraint(... name='%*class)s_unique_name_version) + constraints = [UniqueConstraint(fields=['name', 'version'], name='SubtaskTemplate_unique_name_version')] + class DefaultSubtaskTemplate(BasicCommon): name = CharField(max_length=128, unique=True) @@ -110,7 +112,9 @@ class DefaultSubtaskTemplate(BasicCommon): class DataproductSpecificationsTemplate(Template): - pass + class Meta: + # TODO: move up to the abstract base class and replace with django 3.0 UniqueConstraint(... name='%*class)s_unique_name_version) + constraints = [UniqueConstraint(fields=['name', 'version'], name='DataproductSpecificationsTemplate_unique_name_version')] class DefaultDataproductSpecificationsTemplate(BasicCommon): @@ -119,13 +123,17 @@ class DefaultDataproductSpecificationsTemplate(BasicCommon): class SubtaskInputSelectionTemplate(Template): - pass + class Meta: + # TODO: move up to the abstract base class and replace with django 3.0 UniqueConstraint(... name='%*class)s_unique_name_version) + constraints = [UniqueConstraint(fields=['name', 'version'], name='SubtaskInputSelectionTemplate_unique_name_version')] # todo: so we need to specify a default? class DataproductFeedbackTemplate(Template): - pass + class Meta: + # TODO: move up to the abstract base class and replace with django 3.0 UniqueConstraint(... name='%*class)s_unique_name_version) + constraints = [UniqueConstraint(fields=['name', 'version'], name='DataproductFeedbackTemplate_unique_name_version')] # todo: do we need to specify a default? @@ -148,7 +156,6 @@ class Subtask(BasicCommon): priority = IntegerField(help_text='Absolute priority of this subtask (higher value means more important).') schedule_method = ForeignKey('ScheduleMethod', null=False, on_delete=PROTECT, help_text='Which method to use for scheduling this Subtask. One of (MANUAL, BATCH, DYNAMIC).') cluster = ForeignKey('Cluster', null=True, on_delete=PROTECT, help_text='Where the Subtask is scheduled to run (NULLable).') - scheduler_input_doc = JSONField(help_text='Partial specifications, as input for the scheduler.') # resource_claim = ForeignKey("ResourceClaim", null=False, on_delete=PROTECT) # todo <-- how is this external reference supposed to work? created_or_updated_by_user = ForeignKey(User, null=True, editable=False, on_delete=PROTECT, help_text='The user who created / updated the subtask.') @@ -158,6 +165,36 @@ class Subtask(BasicCommon): # keep original state for logging self.__original_state = self.state + @staticmethod + def _send_state_change_event_message(subtask_id:int, old_state: str, new_state: str): + with ToBus(exchange=os.environ.get("TMSS_EXCHANGE", DEFAULT_BUSNAME), + broker=os.environ.get("TMSS_BROKER", DEFAULT_BROKER)) as tobus: #TODO: do we want to connect to the bus for each new message, or have some global tobus? + msg = EventMessage(subject="%s.%s" % (DEFAULT_TMSS_SUBTASK_NOTIFICATION_PREFIX, new_state.capitalize()), + content={'subtask_id': subtask_id, 'old_state': old_state, 'new_state': new_state}) + tobus.send(msg) + + @property + def successors(self) -> QuerySet: + '''return the connect successor subtask(s) as queryset (over which you can perform extended queries, or return via the serializers/viewsets) + If you want the result, add .all() like so: my_subtask.successors.all() + ''' + # JS, 20200528: I couldn't make django do a "self-reference" query from the subtask table to the subtask table (via input, output), so I used plain SQL. + return Subtask.objects.filter(id__in=RawSQL("SELECT successor_st.id FROM tmssapp_subtask as successor_st\n" + "INNER JOIN tmssapp_subtaskinput as st_input on st_input.subtask_id = successor_st.id\n" + "INNER JOIN tmssapp_subtaskoutput as st_output on st_output.id = st_input.producer_id\n" + "WHERE st_output.subtask_id = %s", params=[self.id])) + + @property + def predecessors(self) -> QuerySet: + '''return the connect predecessor subtask(s) as queryset (over which you can perform extended queries, or return via the serializers/viewsets) + If you want the result, add .all() like so: my_subtask.predecessors.all() + ''' + # JS, 20200528: I couldn't make django do a "self-reference" query from the subtask table to the subtask table (via input, output), so I used plain SQL. + return Subtask.objects.filter(id__in=RawSQL("SELECT predecessor_st.id FROM tmssapp_subtask as predecessor_st\n" + "INNER JOIN tmssapp_subtaskoutput as st_output on st_output.subtask_id = predecessor_st.id\n" + "INNER JOIN tmssapp_subtaskinput as st_input on st_input.producer_id = st_output.id\n" + "WHERE st_input.subtask_id = %s", params=[self.id])) + def save(self, force_insert=False, force_update=False, using=None, update_fields=None): creating = self._state.adding # True on create, False on update @@ -167,7 +204,7 @@ class Subtask(BasicCommon): super().save(force_insert, force_update, using, update_fields) # log if either state update or new entry: - if self.state != self.__original_state or creating is True: + if self.state != self.__original_state or creating == True: if self.created_or_updated_by_user is None: identifier = None else: @@ -176,7 +213,13 @@ class Subtask(BasicCommon): user=self.created_or_updated_by_user, user_identifier=identifier) log_entry.save() + try: + self._send_state_change_event_message(self.id, log_entry.old_state.value, log_entry.new_state.value) + except Exception as e: + logger.error("Could not send state change to messagebus: %s", e) + # update the previous state value + self.__original_state = self.state class SubtaskStateLog(BasicCommon): """ @@ -196,10 +239,9 @@ class SubtaskStateLog(BasicCommon): class SubtaskInput(BasicCommon): - subtask = ForeignKey('Subtask', null=False, on_delete=CASCADE, help_text='Subtask to which this input specification refers.') + subtask = ForeignKey('Subtask', null=False, on_delete=CASCADE, related_name='inputs', help_text='Subtask to which this input specification refers.') task_relation_blueprint = ForeignKey('TaskRelationBlueprint', null=True, on_delete=SET_NULL, help_text='Task Relation Blueprint which this Subtask Input implements (NULLable).') - connector = ForeignKey('SubtaskConnector', null=True, on_delete=SET_NULL, help_text='Which connector this Task Input implements.') - producer = ForeignKey('SubtaskOutput', on_delete=PROTECT, help_text='The Subtask Output providing the input dataproducts.') + producer = ForeignKey('SubtaskOutput', on_delete=PROTECT, related_name='consumers', help_text='The SubtaskOutput producing the input dataproducts for this SubtaskInput.') dataproducts = ManyToManyField('Dataproduct', help_text='The Dataproducts resulting from application of the filter at time of scheduling Although the dataproducts are simply the result of applying the filter on immutable data, the filter application could change over time. We thus store the result of this filtering directly to retain which input was specified for the task..') selection_doc = JSONField(help_text='Filter to apply to the dataproducts of the producer, to derive input dataproducts when scheduling.') selection_template = ForeignKey('SubtaskInputSelectionTemplate', on_delete=PROTECT, help_text='Schema used for selection_doc.') @@ -212,8 +254,7 @@ class SubtaskInput(BasicCommon): class SubtaskOutput(BasicCommon): - subtask = ForeignKey('Subtask', null=False, on_delete=CASCADE, help_text='Subtask to which this output specification refers.') - connector = ForeignKey('SubtaskConnector', null=True, on_delete=SET_NULL, help_text='Which connector this Subtask Output implements.') + subtask = ForeignKey('Subtask', null=False, on_delete=CASCADE, related_name='outputs', help_text='Subtask to which this output specification refers.') class Dataproduct(BasicCommon): @@ -230,7 +271,7 @@ class Dataproduct(BasicCommon): pinned_since = DateTimeField(null=True, help_text='When this dataproduct was pinned to disk, that is, forbidden to be removed, or NULL if not pinned (NULLable).') specifications_doc = JSONField(help_text='Dataproduct properties (f.e. beam, subband), to distinguish them when produced by the same task, and to act as input for selections in the Task Input and Work Request Relation Blueprint objects.') specifications_template = ForeignKey('DataproductSpecificationsTemplate', null=False, on_delete=CASCADE, help_text='Schema used for specifications_doc.') - producer = ForeignKey('SubtaskOutput', on_delete=PROTECT, help_text='Subtask Output which generates this dataproduct.') + producer = ForeignKey('SubtaskOutput', on_delete=PROTECT, related_name="dataproducts", help_text='Subtask Output which generates this dataproduct.') do_cancel = DateTimeField(null=True, help_text='When this dataproduct was cancelled (NULLable). Cancelling a dataproduct triggers cleanup if necessary.') expected_size = BigIntegerField(null=True, help_text='Expected size of dataproduct size, in bytes. Used for scheduling purposes. NULL if size is unknown (NULLable).') size = BigIntegerField(null=True, help_text='Dataproduct size, in bytes. Used for accounting purposes. NULL if size is (yet) unknown (NULLable).') diff --git a/SAS/TMSS/src/tmss/tmssapp/models/specification.py b/SAS/TMSS/src/tmss/tmssapp/models/specification.py index 6dd29ad59246cb26fc7b44fa87baed01897ffc4c..9dca008d148704b32906959cf5115b13fb8c679b 100644 --- a/SAS/TMSS/src/tmss/tmssapp/models/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/models/specification.py @@ -2,7 +2,7 @@ This file contains the database models """ -from django.db.models import Model, CharField, DateTimeField, BooleanField, ForeignKey, CASCADE, IntegerField, FloatField, SET_NULL, PROTECT, ManyToManyField +from django.db.models import Model, CharField, DateTimeField, BooleanField, ForeignKey, CASCADE, IntegerField, FloatField, SET_NULL, PROTECT, ManyToManyField, UniqueConstraint from django.contrib.postgres.fields import ArrayField, JSONField from django.contrib.postgres.indexes import GinIndex from enum import Enum @@ -89,7 +89,7 @@ class AbstractChoice(Model): class Role(AbstractChoice): - """Defines the model and predefined list of possible Role's for TaskConnectors. + """Defines the model and predefined list of possible Role's for TaskConnector. The items in the Choises class below are automagically populated into the database via a data migration.""" class Choices(Enum): CORRELATOR = "correlator" @@ -102,7 +102,7 @@ class Role(AbstractChoice): class Datatype(AbstractChoice): - """Defines the model and predefined list of possible Datatype's for TaskConnectors. + """Defines the model and predefined list of possible Datatype's for TaskConnector. The items in the Choises class below are automagically populated into the database via a data migration.""" class Choices(Enum): VISIBILITIES = "visibilities" @@ -118,6 +118,8 @@ class Dataformat(AbstractChoice): class Choices(Enum): MEASUREMENTSET = "MeasurementSet" BEAMFORMED = "Beamformed" + QA_HDF5 = "QA_HDF5" + QA_PLOTS = "QA_Plots" class CopyReason(AbstractChoice): @@ -130,7 +132,7 @@ class CopyReason(AbstractChoice): # concrete models -class TaskConnectors(BasicCommon): +class TaskConnector(BasicCommon): role = ForeignKey('Role', null=False, on_delete=PROTECT) datatype = ForeignKey('Datatype', null=False, on_delete=PROTECT) dataformats = ManyToManyField('Dataformat', blank=True) @@ -150,12 +152,18 @@ class Template(NamedCommon): class Meta: abstract = True + # TODO: remove all <class>_unique_name_version UniqueConstraint's from the subclasses and replace by this line below when we start using django 3.0 + # constraints = [UniqueConstraint(fields=['name', 'version'], name='%(class)s_unique_name_version')] # concrete models class GeneratorTemplate(Template): create_function = CharField(max_length=128, help_text='Python function to call to execute the generator.') + class Meta: + # TODO: move up to the abstract base class and replace with django 3.0 UniqueConstraint(... name='%*class)s_unique_name_version) + constraints = [UniqueConstraint(fields=['name', 'version'], name='GeneratorTemplate_unique_name_version')] + class DefaultGeneratorTemplate(BasicCommon): name = CharField(max_length=128, unique=True) @@ -163,7 +171,9 @@ class DefaultGeneratorTemplate(BasicCommon): class SchedulingUnitTemplate(Template): - pass + class Meta: + # TODO: move up to the abstract base class and replace with django 3.0 UniqueConstraint(... name='%*class)s_unique_name_version) + constraints = [UniqueConstraint(fields=['name', 'version'], name='SchedulingUnitTemplate_unique_name_version')] class DefaultSchedulingUnitTemplate(BasicCommon): @@ -174,6 +184,9 @@ class DefaultSchedulingUnitTemplate(BasicCommon): class TaskTemplate(Template): validation_code_js = CharField(max_length=128, help_text='JavaScript code for additional (complex) validation.') + class Meta: + # TODO: move up to the abstract base class and replace with django 3.0 UniqueConstraint(... name='%*class)s_unique_name_version) + constraints = [UniqueConstraint(fields=['name', 'version'], name='TaskTemplate_unique_name_version')] class DefaultTaskTemplate(BasicCommon): name = CharField(max_length=128, unique=True) @@ -181,7 +194,9 @@ class DefaultTaskTemplate(BasicCommon): class WorkRelationSelectionTemplate(Template): - pass + class Meta: + # TODO: move up to the abstract base class and replace with django 3.0 UniqueConstraint(... name='%*class)s_unique_name_version) + constraints = [UniqueConstraint(fields=['name', 'version'], name='WorkRelationSelectionTemplate_unique_name_version')] class DefaultWorkRelationSelectionTemplate(BasicCommon): @@ -252,6 +267,7 @@ class SchedulingUnitDraft(NamedCommon): super().save(force_insert, force_update, using, update_fields) + class SchedulingUnitBlueprint(NamedCommon): requirements_doc = JSONField(help_text='Scheduling and/or quality requirements for this scheduling unit (IMMUTABLE).') do_cancel = BooleanField() @@ -264,6 +280,7 @@ class SchedulingUnitBlueprint(NamedCommon): super().save(force_insert, force_update, using, update_fields) + class TaskDraft(NamedCommon): specifications_doc = JSONField(help_text='Specifications for this task.') copies = ForeignKey('TaskDraft', related_name="copied_from", on_delete=SET_NULL, null=True, help_text='Source reference, if we are a copy (NULLable).') @@ -297,8 +314,8 @@ class TaskRelationDraft(BasicCommon): dataformat = ForeignKey('Dataformat', null=False, on_delete=PROTECT, help_text='Selected data format to use. One of (MS, HDF5).') producer = ForeignKey('TaskDraft', related_name='produced_by', on_delete=CASCADE, help_text='Task Draft that has the output connector. NOTE: The producer does typically, but not necessarily, belong to the same Scheduling Unit (or even the same Project) as the consumer.') consumer = ForeignKey('TaskDraft', related_name='consumed_by', on_delete=CASCADE, help_text='Task Draft that has the input connector.') - input = ForeignKey('TaskConnectors', related_name='inputs_task_relation_draft', on_delete=CASCADE, help_text='Input connector of consumer.') - output = ForeignKey('TaskConnectors', related_name='outputs_task_relation_draft', on_delete=CASCADE, help_text='Output connector of producer.') + input = ForeignKey('TaskConnector', related_name='inputs_task_relation_draft', on_delete=CASCADE, help_text='Input connector of consumer.') + output = ForeignKey('TaskConnector', related_name='outputs_task_relation_draft', on_delete=CASCADE, help_text='Output connector of producer.') selection_template = ForeignKey('WorkRelationSelectionTemplate', on_delete=CASCADE, help_text='Schema used for selection_doc.') # todo: 'schema'? def save(self, force_insert=False, force_update=False, using=None, update_fields=None): @@ -313,8 +330,8 @@ class TaskRelationBlueprint(BasicCommon): dataformat = ForeignKey('Dataformat', null=False, on_delete=PROTECT, help_text='Selected data format to use.') producer = ForeignKey('TaskBlueprint', related_name='produced_by', on_delete=CASCADE, help_text='Task Blueprint that has the output connector.') consumer = ForeignKey('TaskBlueprint', related_name='consumed_by', on_delete=CASCADE, help_text='Task Blueprint that has the input connector.') - input = ForeignKey('TaskConnectors', related_name='inputs_task_relation_blueprint', on_delete=CASCADE, help_text='Input connector of consumer.') - output = ForeignKey('TaskConnectors', related_name='outputs_task_relation_blueprint', on_delete=CASCADE, help_text='Output connector of producer.') + input = ForeignKey('TaskConnector', related_name='inputs_task_relation_blueprint', on_delete=CASCADE, help_text='Input connector of consumer.') + output = ForeignKey('TaskConnector', related_name='outputs_task_relation_blueprint', on_delete=CASCADE, help_text='Output connector of producer.') draft = ForeignKey('TaskRelationDraft', on_delete=CASCADE, related_name='related_task_relation_blueprint', help_text='Task Relation Draft which this work request instantiates.') selection_template = ForeignKey('WorkRelationSelectionTemplate', on_delete=CASCADE, help_text='Schema used for selection_doc.') # todo: 'schema'? diff --git a/SAS/TMSS/src/tmss/tmssapp/populate.py b/SAS/TMSS/src/tmss/tmssapp/populate.py index b11731904350d2dafffc8fc26f6253756dbc5547..7e316fc110770115dcbc8ad9db0532e8fb7405e6 100644 --- a/SAS/TMSS/src/tmss/tmssapp/populate.py +++ b/SAS/TMSS/src/tmss/tmssapp/populate.py @@ -14,11 +14,16 @@ class Migration(migrations.Migration): """ +import logging +logger = logging.getLogger(__name__) + import json -from lofar.sas.tmss.tmss.tmssapp.models.specification import Role, Datatype, Dataformat, CopyReason, TaskTemplate, ResourceType, ResourceUnit -from lofar.sas.tmss.tmss.tmssapp.models.scheduling import SubtaskState, SubtaskType, SubtaskTemplate, Subtask, \ - StationType, Algorithm, ScheduleMethod, Cluster, Filesystem +from lofar.sas.tmss.tmss.tmssapp.subtasks import * +from lofar.sas.tmss.tmss.tmssapp.models.specification import * +from lofar.sas.tmss.tmss.tmssapp.models.scheduling import * from lofar.common.json_utils import * +from lofar.common import isTestEnvironment, isDevelopmentEnvironment + def populate_choices(apps, schema_editor): ''' @@ -32,11 +37,44 @@ def populate_choices(apps, schema_editor): def populate_lofar_json_schemas(apps, schema_editor): + _populate_subtask_input_selection_templates() + _populate_dataproduct_specifications_templates() + _populate_dataproduct_feedback_templates() _populate_correlator_calibrator_schema() _populate_obscontrol_schema() _populate_stations_schema() + _populate_pipelinecontrol_schema() + _populate_preprocessing_schema() + + _populate_qa_files_subtask_template() + _populate_qa_plots_subtask_template() + + # Chain of Blueprint Task and subtasks should be instantiated with an API call + # so only create a Test Observation Draft Task + _populate_task_draft_example() + + +def _populate_task_draft_example(): + """ + Create a Task Draft 'Target Observation' + :return: + """ + try: + from datetime import datetime + from lofar.sas.tmss.tmss.tmssapp import models + from lofar.sas.tmss.test.tmss_test_data_django_models import TaskDraft_test_data + + if isTestEnvironment() or isDevelopmentEnvironment(): + task_template = models.TaskTemplate.objects.get(name='correlator schema') + task_draft_data = TaskDraft_test_data(name="Test Target Observation", specifications_template=task_template) + models.TaskDraft.objects.create(**task_draft_data) + + task_template = models.TaskTemplate.objects.get(name='preprocessing schema') + task_draft_data = TaskDraft_test_data(name="Test PreProcessingPipeline Task", specifications_template=task_template) + models.TaskDraft.objects.create(**task_draft_data) + except ImportError: + pass - _populate_example_data() def populate_resources(apps, schema_editor): ru_bytes = ResourceUnit.objects.create(name="bytes", description="Bytes") @@ -165,6 +203,70 @@ def _populate_correlator_calibrator_schema(): "CEP4", "DragNet" ] + }, + "QA": { + "type": "object", + "title": "Quality Assurance", + "default": {}, + "description": "Specify Quality Assurance steps for this observation", + "properties": { + "file_conversion": { + "type": "object", + "title": "File Conversion", + "default": {}, + "description": "Create a QA file for the observation", + "properties": { + "enabled": { + "type": "boolean", + "title": "enabled", + "default": true, + "description": "Do/Don't create a QA file for the observation" + }, + "nr_of_subbands": { + "type": "integer", + "title": "#subbands", + "default": -1, + "description": "Keep this number of subbands from the observation in the QA file, or all if -1" + }, + "nr_of_timestamps": { + "type": "integer", + "title": "#timestamps", + "default": 256, + "minimum": 1, + "description": "Extract this number of timestamps from the observation in the QA file (equidistantanly sampled, no averaging/interpolation)" + } + }, + "additionalProperties": false + }, + "plots": { + "type": "object", + "title": "Plots", + "default": {}, + "description": "Create dynamic spectrum plots", + "properties": { + "enabled": { + "type": "boolean", + "title": "enabled", + "default": true, + "description": "Do/Don't create plots from the QA file from the observation" + }, + "autocorrelation": { + "type": "boolean", + "title": "autocorrelation", + "default": true, + "description": "Create autocorrelation plots for all stations" + }, + "crosscorrelation": { + "type": "boolean", + "title": "crosscorrelation", + "default": true, + "description": "Create crosscorrelation plots for all baselines" + } + }, + "additionalProperties": false + } + }, + "additionalProperties": false } } }'''), "tags": []} @@ -172,52 +274,6 @@ def _populate_correlator_calibrator_schema(): TaskTemplate.objects.create(**task_template_data) -def _populate_example_data(): - try: - from datetime import datetime - from lofar.sas.tmss.tmss.tmssapp import models - from lofar.sas.tmss.test.tmss_test_data_django_models import TaskDraft_test_data, TaskBlueprint_test_data, SubtaskOutput_test_data, Dataproduct_test_data, Subtask_test_data - - cluster = Cluster.objects.get(name="CEP4") - - for i in range(10): - task_template = models.TaskTemplate.objects.get(name='correlator schema') - task_draft_data = TaskDraft_test_data(name="my test obs", specifications_template=task_template) - task_draft = models.TaskDraft.objects.create(**task_draft_data) - - task_blueprint_data = TaskBlueprint_test_data(task_draft=task_draft) - task_blueprint = models.TaskBlueprint.objects.create(**task_blueprint_data) - - subtask_template = models.SubtaskTemplate.objects.get(name='observationcontrol schema') - specifications_doc = { - "stations": {"station_list": ["RS106","RS205"], - "antenna_set": "HBA_DUAL_INNER", - "filter": "HBA_110_190", - "analog_pointing": {"direction_type": "J2000", - "angle1": 0.4262457643630986, - "angle2": 0.5787463318245085 }, - "digital_pointings": [{"name": "3C48", - "pointing": {"direction_type": "J2000", - "angle1": 0.4262457643630986, - "angle2": 0.5787463318245085 }, - "subbands": list(range(0, 244)) - }] - } - } - - specifications_doc = add_defaults_to_json_object_for_schema(specifications_doc, subtask_template.schema) - subtask_data = Subtask_test_data(task_blueprint=task_blueprint, subtask_template=subtask_template, - specifications_doc=specifications_doc, cluster=cluster) - subtask = models.Subtask.objects.create(**subtask_data) - - subtask_output = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=subtask)) - for sb_nr in specifications_doc['stations']['digital_pointings'][0]['subbands']: - models.Dataproduct.objects.create(**Dataproduct_test_data(producer=subtask_output, - directory="CEP4:/data/test-projects/TMSS_test/L%d/uv/" % (subtask.id,), - filename="L%d_SB%03d_uv.MS"%(subtask.id, sb_nr))) - except ImportError: - pass - def _populate_obscontrol_schema(): subtask_template_data = {"type": SubtaskType.objects.get(value='observation'), "name": "observationcontrol schema", @@ -548,10 +604,10 @@ def _populate_obscontrol_schema(): def _populate_stations_schema(): - task_template_data = { "name": "stations schema", - "description": 'Generic station settings and selection', - "version": '0.1', - "schema": json.loads(''' + task_template_data = {"name": "stations schema", + "description": 'Generic station settings and selection', + "version": '0.1', + "schema": json.loads(''' { "$id": "http://example.com/example.json", "type": "object", @@ -790,6 +846,471 @@ def _populate_stations_schema(): } } }'''), + "tags": []} + + TaskTemplate.objects.create(**task_template_data) + + +def _populate_subtask_input_selection_templates(): + selection_template_data = { "name": "All", + "description": 'Select all, apply no filtering.', + "version": '1', + "schema": json.loads('''{ + "$id": "http://example.com/example.json", + "type": "object", + "$schema": "http://json-schema.org/draft-06/schema#", + "definitions": {}, + "additionalProperties": false, + "properties": {} }'''), "tags": []} + SubtaskInputSelectionTemplate.objects.create(**selection_template_data) + + +def _populate_dataproduct_specifications_templates(): + template_data = { "name": "Empty", + "description": 'Empty DataproductSpecificationsTemplate with an empty schema', + "version": '1', + "schema": json.loads('''{ +"$id": "http://example.com/example.json", +"type": "object", +"$schema": "http://json-schema.org/draft-06/schema#", +"definitions": {}, +"additionalProperties": false, +"properties": {} }'''), +"tags": []} + + DataproductSpecificationsTemplate.objects.create(**template_data) + + +def _populate_dataproduct_feedback_templates(): + template_data = { "name": "Empty", + "description": 'Empty DataproductFeedbackTemplate with an empty schema', + "version": '1', + "schema": json.loads('''{ +"$id": "http://example.com/example.json", +"type": "object", +"$schema": "http://json-schema.org/draft-06/schema#", +"definitions": {}, +"additionalProperties": false, +"properties": {} }'''), +"tags": []} + + DataproductFeedbackTemplate.objects.create(**template_data) + + +def _populate_qa_files_subtask_template(): + subtask_template_data = {"type": SubtaskType.objects.get(value=SubtaskType.Choices.QA_FILES.value), + "name": "QA file conversion", + "description": 'QA file conversion subtask template', + "version": '0.1', + "schema": json.loads(''' +{ + "$id": "http://example.com/example.json", + "type": "object", + "$schema": "http://json-schema.org/draft-06/schema#", + "definitions": {}, + "additionalProperties": false, + "properties": { + "nr_of_subbands": { + "type": "integer", + "title": "#subbands", + "default": -1, + "description": "Keep this number of subbands from the observation in the QA file, or all if -1" + }, + "nr_of_timestamps": { + "type": "integer", + "title": "#timestamps", + "default": 256, + "minimum": 1, + "description": "Extract this number of timestamps from the observation in the QA file (equidistantanly sampled, no averaging/interpolation)" + } + } +}'''), + "realtime": False, + "queue": True, + "tags": []} + + SubtaskTemplate.objects.create(**subtask_template_data) + +def _populate_qa_plots_subtask_template(): + subtask_template_data = {"type": SubtaskType.objects.get(value=SubtaskType.Choices.QA_PLOTS.value), + "name": "QA plots", + "description": 'QA plots subtask template', + "version": '0.1', + "schema": json.loads(''' +{ + "$id": "http://example.com/example.json", + "type": "object", + "$schema": "http://json-schema.org/draft-06/schema#", + "definitions": {}, + "additionalProperties": false, + "properties": { + "autocorrelation": { + "type": "boolean", + "title": "autocorrelation", + "default": true, + "description": "Create autocorrelation plots for all stations" + }, + "crosscorrelation": { + "type": "boolean", + "title": "crosscorrelation", + "default": true, + "description": "Create crosscorrelation plots for all baselines" + } + } +}'''), + "realtime": False, + "queue": True, + "tags": []} + + SubtaskTemplate.objects.create(**subtask_template_data) + +def _populate_preprocessing_schema(): + task_template_data = {"name": "preprocessing schema", + "description": 'preprocessing settings', + "version": '0.1', + "schema": json.loads(''' +{ + "$id": "http://example.com/example.json", + "type": "object", + "$schema": "http://json-schema.org/draft-06/schema#", + "additionalProperties": false, + "definitions": { + "demix_strategy": { + "type": "string", + "default": "auto", + "enum": [ + "auto", + "yes", + "no" + ] + } + }, + "properties": { + "flag": { + "title": "Flagging", + "type": "object", + "additionalProperties": false, + "properties": { + "outerchannels": { + "type": "boolean", + "title": "Flag outer channels", + "default": true + }, + "autocorrelations": { + "type": "boolean", + "title": "Flag auto correlations", + "default": true + }, + "rfi_strategy": { + "type": "string", + "title": "RFI flagging strategy", + "default": "auto", + "enum": [ + "none", + "auto", + "HBAdefault", + "LBAdefault" + ] + } + }, + "required": [ + "outerchannels", + "autocorrelations", + "rfi_strategy" + ], + "default": {} + }, + "average": { + "title": "Averaging", + "type": "object", + "additionalProperties": false, + "properties": { + "frequency_steps": { + "type": "integer", + "title": "Frequency steps", + "default": 4, + "minimum": 1 + }, + "time_steps": { + "type": "integer", + "title": "Time steps", + "default": 1, + "minimum": 1 + } + }, + "required": [ + "frequency_steps", + "time_steps" + ], + "default": {} + }, + "demix": { + "title": "Demixing", + "type": "object", + "additionalProperties": false, + "properties": { + "frequency_steps": { + "type": "integer", + "title": "Frequency steps", + "description": "Must be a multiple of the averaging frequency steps", + "default": 64, + "minimum": 1 + }, + "time_steps": { + "type": "integer", + "title": "Time steps", + "description": "Must be a multiple of the averaging time steps", + "default": 10, + "minimum": 1 + }, + "ignore_target": { + "type": "boolean", + "title": "Ignore target", + "default": false + }, + "sources": { + "title": "Sources", + "type": "object", + "additionalProperties": false, + "properties": { + "CasA": { + "title": "CasA", + "$ref": "#/definitions/demix_strategy" + }, + "CygA": { + "title": "CygA", + "$ref": "#/definitions/demix_strategy" + }, + "HerA": { + "title": "HerA", + "$ref": "#/definitions/demix_strategy" + }, + "HydraA": { + "title": "HyrdraA", + "$ref": "#/definitions/demix_strategy" + }, + "TauA": { + "title": "TauA", + "$ref": "#/definitions/demix_strategy" + }, + "VirA": { + "title": "VirA", + "$ref": "#/definitions/demix_strategy" + } + }, + "default": {} + } + }, + "required": [ + "frequency_steps", + "time_steps", + "ignore_target", + "sources" + ], + "options": { + "dependencies": { + "demix": true + } + }, + "default": {} + }, + "storagemanager": { + "type": "string", + "title": "Storage Manager", + "default": "dysco", + "enum": [ + "basic", + "dysco" + ] + } + }, + "required": [ + "storagemanager" + ] +}'''), + "tags": []} + TaskTemplate.objects.create(**task_template_data) + + +def _populate_pipelinecontrol_schema(): + subtask_template_data = {"type": SubtaskType.objects.get(value='pipeline'), + "name": "pipelinecontrol schema", + "description": 'pipelinecontrol schema for pipeline subtask', + "version": '0.1', + "schema": json.loads(''' +{ + "$id": "http://example.com/example.json", + "type": "object", + "$schema": "http://json-schema.org/draft-06/schema#", + "additionalProperties": false, + "properties": { + "preflagger0": { + "title": "Preflagger0", + "description": "Flag channels", + "type": "object", + "additionalProperties": false, + "properties": { + "channels": { + "title": "Channels", + "type": "string", + "default": "0..nchan/32-1,31*nchan/32..nchan-1" + } + }, + "required": [ + "channels" + ], + "default": {} + }, + "preflagger1": { + "title": "Preflagger1", + "description": "Flag correlations", + "type": "object", + "additionalProperties": false, + "properties": { + "corrtype": { + "title": "Correlations", + "type": "string", + "default": "auto", + "enum": [ + "", + "auto", + "cross" + ] + } + }, + "required": [ + "corrtype" + ], + "default": {} + }, + "aoflagger": { + "title": "AOFlagger", + "description": "Flag RFI", + "type": "object", + "additionalProperties": false, + "properties": { + "strategy": { + "title": "Strategy", + "type": "string", + "default": "HBAdefault", + "enum": [ + "HBAdefault", + "LBAdefault" + ] + } + }, + "required": [ + "strategy" + ], + "default": {} + }, + "demixer": { + "title": "Demixer & Averager", + "description": "Demix sources & average data", + "type": "object", + "additionalProperties": false, + "properties": { + "baselines": { + "title": "Baselines", + "type": "string", + "default": "CS*,RS*&" + }, + "frequency_steps": { + "type": "integer", + "title": "Frequency steps (average)", + "default": 4, + "minimum": 1 + }, + "time_steps": { + "type": "integer", + "title": "Time steps (average)", + "default": 1, + "minimum": 1 + }, + "demix_frequency_steps": { + "type": "integer", + "title": "Frequency steps (demix)", + "default": 4, + "minimum": 1 + }, + "demix_time_steps": { + "type": "integer", + "title": "Time steps (demix)", + "default": 1, + "minimum": 1 + }, + "ignore_target": { + "type": "boolean", + "title": "Ignore target", + "default": false + }, + "demix_always": { + "type": "array", + "title": "Demix always", + "default": [], + "uniqueItems": true, + "items": { + "type": "string", + "enum": [ + "CasA", + "CygA", + "HerA", + "HydraA", + "TauA", + "VirA" + ] + } + }, + "demix_if_needed": { + "type": "array", + "title": "Demix if needed", + "default": [], + "uniqueItems": true, + "items": { + "type": "string", + "enum": [ + "CasA", + "CygA", + "HerA", + "HydraA", + "TauA", + "VirA" + ] + } + } + }, + "required": [ + "baselines", + "frequency_steps", + "time_steps", + "demix_frequency_steps", + "demix_time_steps", + "ignore_target", + "demix_always", + "demix_if_needed" + ], + "default": {} + }, + "storagemanager": { + "type": "string", + "title": "Storage Manager", + "default": "dysco", + "enum": [ + "standard", + "dysco" + ] + } + }, + "required": [ + "storagemanager" + ] +} +'''), + "realtime": True, + "queue": False, + "tags": []} + + SubtaskTemplate.objects.create(**subtask_template_data) diff --git a/SAS/TMSS/src/tmss/tmssapp/renderers/CMakeLists.txt b/SAS/TMSS/src/tmss/tmssapp/renderers/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..b13b5f0f17a78c51205135ff04929bd3fb096b15 --- /dev/null +++ b/SAS/TMSS/src/tmss/tmssapp/renderers/CMakeLists.txt @@ -0,0 +1,10 @@ + +include(PythonInstall) + +set(_py_files + __init__.py + PlainTextRenderer.py + ) + +python_install(${_py_files} + DESTINATION lofar/sas/tmss/tmss/tmssapp/renderers) diff --git a/SAS/TMSS/src/tmss/tmssapp/renderers/PlainTextRenderer.py b/SAS/TMSS/src/tmss/tmssapp/renderers/PlainTextRenderer.py new file mode 100644 index 0000000000000000000000000000000000000000..72e521c4e67838c7d6b9a81efcf2d7995edf4566 --- /dev/null +++ b/SAS/TMSS/src/tmss/tmssapp/renderers/PlainTextRenderer.py @@ -0,0 +1,9 @@ +from rest_framework import renderers + + +class PlainTextRenderer(renderers.BaseRenderer): + media_type = 'text/plain' + format = 'text' + + def render(self, data, media_type=None, renderer_context=None): + return str(renderers.JSONRenderer().render(data, media_type, renderer_context)).encode(self.charset) \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/tmssapp/renderers/__init__.py b/SAS/TMSS/src/tmss/tmssapp/renderers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b904c2d500a9747448051fce699fb447a743b341 --- /dev/null +++ b/SAS/TMSS/src/tmss/tmssapp/renderers/__init__.py @@ -0,0 +1 @@ +from .PlainTextRenderer import * diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/CMakeLists.txt b/SAS/TMSS/src/tmss/tmssapp/serializers/CMakeLists.txt index bb0cdfd3ead59ce7ccff5f6d345a2f5199a4f137..bf90ee06dc627f8dbf4909c786387a2e219e2521 100644 --- a/SAS/TMSS/src/tmss/tmssapp/serializers/CMakeLists.txt +++ b/SAS/TMSS/src/tmss/tmssapp/serializers/CMakeLists.txt @@ -5,6 +5,7 @@ set(_py_files __init__.py specification.py scheduling.py + widgets.py ) python_install(${_py_files} diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py index 7d29e4e4ffaa70e67e6194235897eeef5906d4de..4baf35cddd3651872cbbc4765ad107df0e93ad11 100644 --- a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py @@ -8,13 +8,7 @@ logger = logging.getLogger(__name__) from rest_framework import serializers from .. import models from .specification import RelationalHyperlinkedModelSerializer - - -class SubtaskConnectorSerializer(serializers.HyperlinkedModelSerializer): - class Meta: - model = models.SubtaskConnector - fields = '__all__' - +from .widgets import JSONEditorField class SubtaskStateSerializer(serializers.ModelSerializer): class Meta: @@ -95,18 +89,42 @@ class SubtaskSerializer(serializers.HyperlinkedModelSerializer): class SubtaskInputSerializer(serializers.HyperlinkedModelSerializer): + + # Create a JSON editor form to replace the simple text field based on the schema in the template that this + # draft refers to. If that fails, the JSONField remains a standard text input. + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + try: + self.fields['selection_doc'] = JSONEditorField(self.instance.selection_template.schema) + except Exception as e: + print('Could not initialize JSONEditorField (%s), hence no fancy JSON form. This is expected for e.g. the list view.' % e) + # todo: Shall we use the schema for one of the default templates in this case instead? + class Meta: model = models.SubtaskInput fields = '__all__' -class SubtaskOutputSerializer(serializers.HyperlinkedModelSerializer): +class SubtaskOutputSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.SubtaskOutput fields = '__all__' + #extra_fields = ['dataproducts', 'consumers'] #TODO: how can we make the inputs and outputs visible in the rest view without making them required for POSTs? class DataproductSerializer(serializers.HyperlinkedModelSerializer): + + # Create a JSON editor form to replace the simple text field based on the schema in the template that this + # draft refers to. If that fails, the JSONField remains a standard text input. + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + try: + self.fields['specifications_doc'] = JSONEditorField(self.instance.specifications_template.schema) + self.fields['feedback_doc'] = JSONEditorField(self.instance.feedback_template.schema) + except Exception as e: + print('Could not initialize JSONEditorField (%s), hence no fancy JSON form. This is expected for e.g. the list view.' % e) + # todo: Shall we use the schema for one of the default templates in this case instead? + class Meta: model = models.Dataproduct fields = '__all__' @@ -149,27 +167,16 @@ class DataproductHashSerializer(serializers.HyperlinkedModelSerializer): class SubtaskSerializerJSONeditorOnline(RelationalHyperlinkedModelSerializer): + # Create a JSON editor form to replace the simple text field based on the schema in the template that this # draft refers to. If that fails, the JSONField remains a standard text input. - # - # Note: I feel a bit uneasy with this since I feel there should be a more straight-forward solution tham - # ...intercepting the init process to determine the schema (or template uri or so) for the style attribute. - # ...Hoewever, I did not manage to simply pass the value(!) of e.g. the template field as a style attribute - # ...of the JSONField via a SerializerMethodField or similar, although I feel that should be possible. def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - - import json - try: - if isinstance(self.instance, models.Subtask): - schema = self.instance.specifications_template.schema - self.fields['specifications_doc'] = serializers.JSONField( - style={'template': 'josdejong_jsoneditor_widget.html', - 'schema': json.dumps(schema)}) + self.fields['specifications_doc'] = JSONEditorField(self.instance.specifications_template.schema) except Exception as e: - # todo: Shall we use one of the default templates for the init? - logger.warning('Could not determine schema, hence no fancy JSON form. Expected for list view.') + print('Could not initialize JSONEditorField (%s), hence no fancy JSON form. This is expected for e.g. the list view.' % e) + # todo: Shall we use the schema for one of the default templates in this case instead? def create(self, validated_data): validated_data['created_or_updated_by_user'] = self.context.get('request').user @@ -179,22 +186,8 @@ class SubtaskSerializerJSONeditorOnline(RelationalHyperlinkedModelSerializer): validated_data['created_or_updated_by_user'] = self.context.get('request').user return super().update(instance, validated_data) - # todo: remove the following if we are happy to log on the model level - # Intercept updates to also create a log entry - # def update(self, instance, validated_data): - # if instance.state is not None \ - # and validated_data.get('state') is not None \ - # and instance.state != validated_data.get('state'): - # user = self.context.get('request').user - # log_entry = models.SubtaskStateLog(user=user, - # user_identifier=user.email, - # subtask=instance, - # old_state=instance.state, - # new_state=validated_data.get('state')) - # log_entry.save() - # return super().update(instance, validated_data) - class Meta: model = models.Subtask #fields = '__all__' + # extra_fields = ['inputs', 'outputs'] #TODO: how can we make the inputs and outputs visible in the rest view without making them required for POSTs? exclude = ('created_or_updated_by_user',) diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py index e241bb9d02ffd4c9230fa78a454fe842f84fe264..577c00b2b292a0fba731956687cc902ab43efe77 100644 --- a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py @@ -4,8 +4,11 @@ This file contains the serializers (for the elsewhere defined data models) from rest_framework import serializers from .. import models +from .widgets import JSONEditorField from django.contrib.auth.models import User from rest_framework import decorators +import json + class RelationalHyperlinkedModelSerializer(serializers.HyperlinkedModelSerializer): @@ -103,11 +106,10 @@ class CopyReasonSerializer(serializers.ModelSerializer): model = models.CopyReason fields = '__all__' - -class TaskConnectorsSerializer(serializers.HyperlinkedModelSerializer): +class TaskConnectorSerializer(serializers.HyperlinkedModelSerializer): class Meta: - model = models.TaskConnectors + model = models.TaskConnector fields = '__all__' @@ -124,25 +126,39 @@ class ProjectSerializer(RelationalHyperlinkedModelSerializer): fields = '__all__' extra_fields = ['name','project_quota'] + class ProjectQuotaSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.ProjectQuota fields = '__all__' extra_fields = ['resource_type'] + class ResourceUnitSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.ResourceUnit fields = '__all__' extra_fields = ['name'] + class ResourceTypeSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.ResourceType fields = '__all__' extra_fields = ['name'] + class SchedulingSetSerializer(RelationalHyperlinkedModelSerializer): + + # Create a JSON editor form to replace the simple text field based on the schema in the template that this + # draft refers to. If that fails, the JSONField remains a standard text input. + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + try: + self.fields['generator_doc'] = JSONEditorField(self.instance.generator_template.schema) + except Exception as e: + pass + class Meta: model = models.SchedulingSet fields = '__all__' @@ -150,6 +166,16 @@ class SchedulingSetSerializer(RelationalHyperlinkedModelSerializer): class SchedulingUnitDraftSerializer(RelationalHyperlinkedModelSerializer): + + # Create a JSON editor form to replace the simple text field based on the schema in the template that this + # draft refers to. If that fails, the JSONField remains a standard text input. + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + try: + self.fields['requirements_doc'] = JSONEditorField(self.instance.requirements_template.schema) + except Exception as e: + pass + class Meta: model = models.SchedulingUnitDraft fields = '__all__' @@ -157,12 +183,32 @@ class SchedulingUnitDraftSerializer(RelationalHyperlinkedModelSerializer): class SchedulingUnitBlueprintSerializer(serializers.HyperlinkedModelSerializer): + + # Create a JSON editor form to replace the simple text field based on the schema in the template that this + # draft refers to. If that fails, the JSONField remains a standard text input. + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + try: + self.fields['requirements_doc'] = JSONEditorField(self.instance.requirements_template.schema) + except Exception as e: + pass + class Meta: model = models.SchedulingUnitBlueprint fields = '__all__' class TaskDraftSerializer(RelationalHyperlinkedModelSerializer): + + # Create a JSON editor form to replace the simple text field based on the schema in the template that this + # draft refers to. If that fails, the JSONField remains a standard text input. + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + try: + self.fields['specifications_doc'] = JSONEditorField(self.instance.specifications_template.schema) + except Exception as e: + pass + class Meta: model = models.TaskDraft fields = '__all__' @@ -170,6 +216,16 @@ class TaskDraftSerializer(RelationalHyperlinkedModelSerializer): class TaskBlueprintSerializer(RelationalHyperlinkedModelSerializer): + + # Create a JSON editor form to replace the simple text field based on the schema in the template that this + # draft refers to. If that fails, the JSONField remains a standard text input. + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + try: + self.fields['specifications_doc'] = JSONEditorField(self.instance.specifications_template.schema) + except Exception as e: + pass + class Meta: model = models.TaskBlueprint fields = '__all__' @@ -177,77 +233,36 @@ class TaskBlueprintSerializer(RelationalHyperlinkedModelSerializer): class TaskRelationDraftSerializer(RelationalHyperlinkedModelSerializer): - class Meta: - model = models.TaskRelationDraft - fields = '__all__' - extra_fields = ['related_task_relation_blueprint'] - - -class TaskRelationBlueprintSerializer(serializers.HyperlinkedModelSerializer): - class Meta: - model = models.TaskRelationBlueprint - fields = '__all__' - - -# ----- JSON - -class TaskBlueprintSerializerJSONeditorOnline(RelationalHyperlinkedModelSerializer): # Create a JSON editor form to replace the simple text field based on the schema in the template that this - # blueprint refers to. If that fails, the JSONField remains a standard text input. - # - # Note: I feel a bit uneasy with this since I feel there should be a more straight-forward solution tham - # ...intercepting the init process to determine the schema (or template uri or so) for the style attribute. - # ...Hoewever, I did not manage to simply pass the value(!) of e.g. the template field as a style attribute - # ...of the JSONField via a SerializerMethodField or similar, although I feel that should be possible. + # draft refers to. If that fails, the JSONField remains a standard text input. def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - - import json - try: - schema = self.instance.specifications_template.schema - self.fields['specifications_doc'] = serializers.JSONField( - style={'template': 'josdejong_jsoneditor_widget.html', - 'schema': json.dumps(schema)}) - - except: - # todo: Shall we use one of the default templates for the init? - print('Could not determine schema, hence no fancy JSON form. Expected for list view.') + self.fields['selection_doc'] = JSONEditorField(self.instance.selection_template.schema) + except Exception as e: + pass class Meta: - model = models.TaskBlueprint + model = models.TaskRelationDraft fields = '__all__' - extra_fields = ['subtasks', 'produced_by', 'consumed_by'] + extra_fields = ['related_task_relation_blueprint'] -class TaskDraftSerializerJSONeditorOnline(RelationalHyperlinkedModelSerializer): +class TaskRelationBlueprintSerializer(serializers.HyperlinkedModelSerializer): # Create a JSON editor form to replace the simple text field based on the schema in the template that this # draft refers to. If that fails, the JSONField remains a standard text input. - # - # Note: I feel a bit uneasy with this since I feel there should be a more straight-forward solution tham - # ...intercepting the init process to determine the schema (or template uri or so) for the style attribute. - # ...Hoewever, I did not manage to simply pass the value(!) of e.g. the template field as a style attribute - # ...of the JSONField via a SerializerMethodField or similar, although I feel that should be possible. def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - - import json - try: - schema = self.instance.specifications_template.schema - self.fields['specifications_doc'] = serializers.JSONField( - style={'template': 'josdejong_jsoneditor_widget.html', - 'schema': json.dumps(schema)}) - - except: - # todo: Shall we use one of the default templates for the init? - print('Could not determine schema, hence no fancy JSON form. Expected for list view.') + self.fields['selection_doc'] = JSONEditorField(self.instance.selection_template.schema) + except Exception as e: + pass class Meta: - model = models.TaskDraft + model = models.TaskRelationBlueprint fields = '__all__' - extra_fields = ['related_task_blueprint', 'produced_by', 'consumed_by'] + diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/widgets.py b/SAS/TMSS/src/tmss/tmssapp/serializers/widgets.py new file mode 100644 index 0000000000000000000000000000000000000000..3f621dc899332041702a3a0e1320f8c61f033e45 --- /dev/null +++ b/SAS/TMSS/src/tmss/tmssapp/serializers/widgets.py @@ -0,0 +1,14 @@ +""" +This file contains customized UI elements for use in the viewsets (based on the elsewhere defined data models and serializers) +""" +from rest_framework import serializers +import json + + +class JSONEditorField(serializers.JSONField): + """ + An enhanced JSONfield that provides a nice editor widget with validation against the provided schema. + """ + def __init__(self, schema, *args, **kwargs): + kwargs['style'] = {'template': 'josdejong_jsoneditor_widget.html', 'schema': json.dumps(schema)} + super().__init__(*args, **kwargs) diff --git a/SAS/TMSS/src/tmss/tmssapp/subtasks.py b/SAS/TMSS/src/tmss/tmssapp/subtasks.py new file mode 100644 index 0000000000000000000000000000000000000000..249f2d5a3cbb6c0f642f43633fbf5233bdeda69a --- /dev/null +++ b/SAS/TMSS/src/tmss/tmssapp/subtasks.py @@ -0,0 +1,397 @@ +import logging +logger = logging.getLogger(__name__) + +from lofar.common.json_utils import add_defaults_to_json_object_for_schema, get_default_json_object_for_schema + +from lofar.sas.tmss.tmss.exceptions import SubtaskSchedulingException + +from lofar.sas.tmss.tmss.tmssapp.models.specification import * +from lofar.sas.tmss.tmss.tmssapp.models.scheduling import * + +from datetime import datetime, timedelta +from lofar.common.datetimeutils import parseDatetime +from lofar.common.json_utils import add_defaults_to_json_object_for_schema + +from lofar.sas.tmss.tmss.tmssapp.models.specification import * +from lofar.sas.tmss.tmss.tmssapp.models.scheduling import * + +def create_observation_to_qafile_subtask(observation_subtask: Subtask): + ''' Create a subtask to convert the observation output to a QA h5 file. + This method implements "Instantiate subtasks" step from the "Specification Flow" + https://support.astron.nl/confluence/display/TMSS/Specification+Flow + ''' + # step 0: check pre-requisites + if observation_subtask.specifications_template.type.value != SubtaskType.Choices.OBSERVATION.value: + raise ValueError("Cannot create %s subtask for subtask id=%d type=%s because it is not an %s" % ( + SubtaskType.Choices.QA_FILES.value, observation_subtask.pk, + observation_subtask.specifications_template.type, SubtaskType.Choices.OBSERVATION.value)) + + if observation_subtask.state.value == SubtaskState.Choices.DEFINING.value: + raise ValueError("Cannot create %s subtask for subtask id=%d because it is not DEFINED" % ( + SubtaskType.Choices.QA_FILES.value, observation_subtask.pk)) + + obs_task_spec = observation_subtask.task_blueprint.specifications_doc + obs_task_qafile_spec = obs_task_spec.get("QA", {}).get("file_conversion", {}) + + if not obs_task_qafile_spec.get("enabled", False): + logger.debug("Skipping creation of qafile_subtask because QA.file_conversion is not enabled") + return None + + # step 1: create subtask in defining state, with filled-in subtask_template + qafile_subtask_template = SubtaskTemplate.objects.get(name="QA file conversion") + qafile_subtask_spec = add_defaults_to_json_object_for_schema({}, qafile_subtask_template.schema) + qafile_subtask_spec['nr_of_subbands'] = obs_task_qafile_spec.get("nr_of_subbands") + qafile_subtask_spec['nr_of_timestamps'] = obs_task_qafile_spec.get("nr_of_timestamps") + validate_json_against_schema(qafile_subtask_spec, qafile_subtask_template.schema) + + qafile_subtask_data = { "start_time": None, + "stop_time": None, + "state": SubtaskState.objects.get(value=SubtaskState.Choices.DEFINING.value), + "task_blueprint": observation_subtask.task_blueprint, + "specifications_template": qafile_subtask_template, + "specifications_doc": qafile_subtask_spec, + "priority": 1, + "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value), + "cluster": observation_subtask.cluster} + qafile_subtask = Subtask.objects.create(**qafile_subtask_data) + + # step 2: create and link subtask input/output + qafile_subtask_input = SubtaskInput.objects.create(subtask=qafile_subtask, + producer=observation_subtask.outputs.first(), # TODO: determine proper producer based on spec in task_relation_blueprint + selection_doc="{}", + selection_template=SubtaskInputSelectionTemplate.objects.get(name="All")) + qafile_subtask_output = SubtaskOutput.objects.create(subtask=qafile_subtask) + + # step 3: set state to DEFINED + qafile_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value) + qafile_subtask.save() + + # done, now return the subtask, and allow the system to wait for the predecessors to be finished before we schedule this qa_file_subtask + return qafile_subtask + +def schedule_qafile_subtask(qafile_subtask: Subtask): + ''' Schedule the given qafile_subtask (which converts the observation output to a QA h5 file) + This method should typically be called upon the event of the observation_subtask being finished. + This method implements "Scheduling subtasks" step from the "Specification Flow" + https://support.astron.nl/confluence/display/TMSS/Specification+Flow + ''' + + # step 0: check pre-requisites + check_prerequities_for_scheduling(qafile_subtask) + + if qafile_subtask.specifications_template.type.value != SubtaskType.Choices.QA_FILES.value: + raise SubtaskSchedulingException("Cannot schedule subtask id=%d type=%s but type should be %s" % (qafile_subtask.pk, + qafile_subtask.specifications_template.type, SubtaskType.Choices.QA_FILES.value)) + + + # step 1: set state to SCHEDULING + qafile_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULING.value) + qafile_subtask.save() + + # step 2: link input dataproducts + for input in qafile_subtask.inputs.all(): + input.dataproducts.set(input.producer.dataproducts.all()) + + # step 3: resource assigner + # is a no-op for QA + + # step 4: create output dataproducts, and link these to the output + # TODO: Should the output and/or dataproduct be determined by the specification in task_relation_blueprint? + if qafile_subtask.outputs.first(): + qafile_subtask_dataproduct = Dataproduct.objects.create(filename="L%d_QA.h5" % (qafile_subtask.id,), + directory="/data/qa/qa_files", + dataformat=Dataformat.objects.get(value=Dataformat.Choices.QA_HDF5.value), + producer=qafile_subtask.outputs.first(), + specifications_doc="", + specifications_template=DataproductSpecificationsTemplate.objects.first(), # ????? + feedback_doc="", + feedback_template=DataproductFeedbackTemplate.objects.first() # ????? + ) + + # step 5: set state to SCHEDULED (resulting in the qaservice to pick this subtask up and run it) + qafile_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULED.value) + qafile_subtask.save() + + return qafile_subtask + +def create_qafile_to_qaplots_subtask(qafile_subtask: Subtask): + ''' Create a subtask to create inspection plots from the QA h5 file. + This method implements "Instantiate subtasks" step from the "Specification Flow" + https://support.astron.nl/confluence/display/TMSS/Specification+Flow + ''' + # step 0: check pre-requisites + if qafile_subtask.specifications_template.type.value != SubtaskType.Choices.QA_FILES.value: + raise ValueError("Cannot create %s subtask for subtask id=%d type=%s because it is not an %s" % ( + SubtaskType.Choices.QA_PLOTS.value, qafile_subtask.pk, + qafile_subtask.specifications_template.type, SubtaskType.Choices.QA_FILES.value)) + + if qafile_subtask.state.value == SubtaskState.Choices.DEFINING.value: + raise ValueError("Cannot create %s subtask for subtask id=%d because it is not DEFINED. Current state=%s" % ( + SubtaskType.Choices.QA_PLOTS.value, qafile_subtask.pk, qafile_subtask.state.value)) + + obs_task_spec = qafile_subtask.task_blueprint.specifications_doc + obs_task_qaplots_spec = obs_task_spec.get("QA", {}).get("plots", {}) + + if not obs_task_qaplots_spec.get("enabled", False): + logger.debug("Skipping creation of qaplots_subtask because QA.plots is not enabled") + return None + + # step 1: create subtask in defining state, with filled-in subtask_template + qaplots_subtask_template = SubtaskTemplate.objects.get(name="QA plots") + qaplots_subtask_spec_doc = add_defaults_to_json_object_for_schema({}, qaplots_subtask_template.schema) + qaplots_subtask_spec_doc['autocorrelation'] = obs_task_qaplots_spec.get("autocorrelation") + qaplots_subtask_spec_doc['crosscorrelation'] = obs_task_qaplots_spec.get("crosscorrelation") + validate_json_against_schema(qaplots_subtask_spec_doc, qaplots_subtask_template.schema) + + qaplots_subtask_data = { "start_time": None, + "stop_time": None, + "state": SubtaskState.objects.get(value=SubtaskState.Choices.DEFINING.value), + "task_blueprint": qafile_subtask.task_blueprint, + "specifications_template": qaplots_subtask_template, + "specifications_doc": qaplots_subtask_spec_doc, + "priority": 1, + "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value), + "cluster": qafile_subtask.cluster} + qaplots_subtask = Subtask.objects.create(**qaplots_subtask_data) + + # step 2: create and link subtask input/output + qaplots_subtask_input = SubtaskInput.objects.create(subtask=qaplots_subtask, + producer=qafile_subtask.outputs.first(), + selection_doc="{}", + selection_template=SubtaskInputSelectionTemplate.objects.get(name="All")) + qaplots_subtask_output = SubtaskOutput.objects.create(subtask=qaplots_subtask) + + # step 3: set state to DEFINED + qaplots_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value) + qaplots_subtask.save() + + # done, now return the subtask, and allow the system to wait for the predecessors to be finished before we schedule this qaplots_subtask + return qaplots_subtask + +def schedule_qaplots_subtask(qaplots_subtask: Subtask): + ''' Schedule the given qaplots_subtask (which creates inspection plots from a QA h5 file) + This method should typically be called upon the event of the qafile_subtask being finished. + This method implements "Scheduling subtasks" step from the "Specification Flow" + https://support.astron.nl/confluence/display/TMSS/Specification+Flow + ''' + + # step 0: check pre-requisites + check_prerequities_for_scheduling(qaplots_subtask) + + if qaplots_subtask.specifications_template.type.value != SubtaskType.Choices.QA_PLOTS.value: + raise SubtaskSchedulingException("Cannot schedule subtask id=%d type=%s but type should be %s" % (qaplots_subtask.pk, + qaplots_subtask.specifications_template.type, + SubtaskType.Choices.QA_PLOTS.value)) + + # step 1: set state to SCHEDULING + qaplots_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULING.value) + qaplots_subtask.save() + + # step 2: link input dataproducts + # this should typically be a single input with a single dataproduct (the qa h5 file) + for input in qaplots_subtask.inputs.all(): + input.dataproducts.set(input.producer.dataproducts.all()) + + # step 3: resource assigner + # is a no-op for QA + + # step 4: create output dataproducts, and link these to the output + # TODO: Should the output and/or dataproduct be determined by the specification in task_relation_blueprint? + qaplots_subtask_dataproduct = Dataproduct.objects.create(directory="/data/qa/plots/L%d" % (qaplots_subtask.id,), + dataformat=Dataformat.objects.get(value=Dataformat.Choices.QA_PLOTS.value), + producer=qaplots_subtask.outputs.first(), + specifications_doc="", + specifications_template=DataproductSpecificationsTemplate.objects.first(), # ????? + feedback_doc="", + feedback_template=DataproductFeedbackTemplate.objects.first() # ????? + ) + + # step 5: set state to SCHEDULED (resulting in the qaservice to pick this subtask up and run it) + qaplots_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULED.value) + qaplots_subtask.save() + + return qaplots_subtask + + +def connect_observation_subtask_to_preprocessing_subtask(observation_subtask: Subtask, pipeline_subtask: Subtask): + if observation_subtask.specifications_template.type.value != SubtaskType.Choices.OBSERVATION.value or \ + pipeline_subtask.specifications_template.type.value != SubtaskType.Choices.PIPELINE.value: + raise ValueError("Cannot connect subtask %s type=%s to subtask id=%d type=%s. Expecting types %s and %s." % ( + observation_subtask.pk, observation_subtask.specifications_template.type, + pipeline_subtask.pk, pipeline_subtask.specifications_template.type, + SubtaskType.Choices.OBSERVATION.value, SubtaskType.Choices.PIPELINE.value)) + + logging.info("Connecting subtask %s type=%s to subtask id=%d type=%s" % ( + observation_subtask.pk, observation_subtask.specifications_template.type, + pipeline_subtask.pk, pipeline_subtask.specifications_template.type)) + + if observation_subtask.stop_time and isinstance(observation_subtask.stop_time, datetime): + pipeline_subtask.start_time = max(datetime.utcnow(), observation_subtask.stop_time) + + # todo: use existing and reasonable selection and specification templates when we have those, for now, use "All" and "Empty" + subtask_input_selection_template = SubtaskInputSelectionTemplate.objects.get(name="All") + dataproduct_specifications_template = DataproductSpecificationsTemplate.objects.get(name="Empty") + dataproduct_feedback_template = DataproductFeedbackTemplate.objects.get(name="Empty") + + if len(observation_subtask.outputs.all()) == 0: + raise ValueError('Observation has no outputs!') + + # use observation output dataproducts as pipeline input + pipeline_subtask_input = SubtaskInput.objects.create(subtask=pipeline_subtask, + producer=observation_subtask.outputs.first(), + selection_doc={}, + selection_template=subtask_input_selection_template) + pipeline_subtask_input.dataproducts.set(observation_subtask.outputs.first().dataproducts.all()) + + # specify pipeline output (map input dataproducts 1:1, but with pipeline subtask ID) + pipeline_subtask_output = SubtaskOutput.objects.create(subtask=pipeline_subtask) + output_dps = [] + for input_dp in pipeline_subtask_input.dataproducts.all(): + if '_' in input_dp.filename and input_dp.filename.startswith('L'): + filename = "L%s_%s" % (pipeline_subtask.pk, input_dp.filename.split('_', 1)[1]) + else: + filename = "L%s_%s" % (pipeline_subtask.pk, input_dp.filename) + + output_dp = Dataproduct.objects.create(filename=filename, + directory=input_dp.directory.replace(str(observation_subtask.pk), str(pipeline_subtask.pk)), + dataformat=Dataformat.objects.get(value="MeasurementSet"), + producer=pipeline_subtask_output, + specifications_doc={}, + specifications_template=dataproduct_specifications_template, + feedback_doc="", + feedback_template=dataproduct_feedback_template) + DataproductTransform.objects.create(input=input_dp, output=output_dp, identity=False) + output_dps.append(output_dp) + pipeline_subtask_output.dataproducts.set(output_dps) + + # todo: specify a SubtaskConnector? TaskRelation + + +def create_subtasks_from_task_blueprint(task_blueprint: TaskBlueprint): + generator_mapping = {'preprocessing schema': _create_subtasks_from_preprocessing_task_blueprint } + template_name = task_blueprint.specifications_template.name + if template_name in generator_mapping: + generator = generator_mapping[template_name] + return generator(task_blueprint) + else: + raise ValueError('Cannot create subtasks for task id=%s since no generator exists for its schema name=%s' % (task_blueprint.pk, template_name)) + + +def _create_subtasks_from_preprocessing_task_blueprint(task_blueprint: TaskBlueprint): + # todo: check whether already created to avoid duplication? + + subtask_template = SubtaskTemplate.objects.get(name='pipelinecontrol schema') + default_subtask_specs = get_default_json_object_for_schema(subtask_template.schema) + subtasks = [] + subtask_specs = _generate_subtask_specs_from_preprocessing_task_specs(task_blueprint.specifications_doc, + default_subtask_specs) + subtask = create_subtask(subtask_template, subtask_specs) + subtask.task_blueprint = task_blueprint + subtask.cluster = Cluster.objects.get(name="CEP4") # todo: probably should not be hardcoded? Can be optional in parset? + subtask.save() + subtasks.append(subtask) + SubtaskTemplate.objects.get(name='pipelinecontrol schema') + + return {'subtasks_created': [s.pk for s in subtasks]} + + # todo: determine observation subtask, then call connect_observation_subtask_to_preprocessing_subtask to create inputs (not sure where exactly this should happen) + + +def _generate_subtask_specs_from_preprocessing_task_specs(preprocessing_task_specs, default_subtask_specs): + # preprocessing task default spec: { + # "storagemanager": "dysco", + # "flag": {"outerchannels": true, "autocorrelations": true, "rfi_strategy": "auto"}, + # "demix": {"frequency_steps": 64, "time_steps": 10, "ignore_target": false, "sources": {}}, + # "average": {"frequency_steps": 4, "time_steps": 1}} + # pipelinecontrol subtask default spec: { + # "storagemanager": "dysco", + # "demixer": {"baselines": "CS*,RS*&", "frequency_steps": 4, "time_steps": 1, "demix_frequency_steps": 4, + # "demix_time_steps": 1, "ignore_target": false, "demix_always": [], "demix_if_needed": []}, + # "aoflagger": {"strategy": "HBAdefault"}, + # "preflagger0": {"channels": "0..nchan/32-1,31*nchan/32..nchan-1"}, + # "preflagger1": {"corrtype": "auto"}} + + # todo: check that this is actually how these need to be translated + # todo: especially check when defaults are NOT supposed to be set because the task implies to not include them + + # todo: translate task "sources": {} - I guess this is demix_always/demix_if_needed? + # todo: set subtask demixer properties "baselines": "CS*,RS*&", "demix_always": [], "demix_if_needed": [] + + subtask_specs = {} + subtask_specs['storagemanager'] = preprocessing_task_specs.get('storagemanager', + default_subtask_specs.get('storagemanager')) + + # todo: we depend on valid json here with knowledge about required properties. To generalize, we need to expect things to not be there. + if 'demix' or 'average' in preprocessing_task_specs: + # todo: should we exclude defaults in subtask.demixer if only one of these is defined on the task? + subtask_specs['demixer'] = default_subtask_specs['demixer'] + if 'demix' in preprocessing_task_specs: + subtask_specs['demixer'].update({ + "demix_frequency_steps": preprocessing_task_specs['demix']['frequency_steps'], + "demix_time_steps": preprocessing_task_specs['demix']['time_steps'], + "ignore_target": preprocessing_task_specs['demix']['ignore_target'] + }), + if 'average' in preprocessing_task_specs: + subtask_specs['demixer'].update({ + "demix_frequency_steps": preprocessing_task_specs['demix']['frequency_steps'], + "frequency_steps": preprocessing_task_specs['average']['frequency_steps'], + "demix_time_steps": preprocessing_task_specs['demix']['time_steps'], + "time_steps": preprocessing_task_specs['average']['time_steps'], + "ignore_target": preprocessing_task_specs['demix']['ignore_target'] + }), + if 'flag' in preprocessing_task_specs: + if preprocessing_task_specs["flag"]["rfi_strategy"] != 'none': + subtask_specs.update({"aoflagger": {"strategy": preprocessing_task_specs["flag"]["rfi_strategy"]}}) + + if preprocessing_task_specs["flag"]["rfi_strategy"] == 'auto': + # todo: handle 'auto' properly: we need to determine input dataproduct type and set LBA or HBA accordingly + # either here or allow 'auto' in subtask json and translate it when we connect obs to pipe subtask + default_strategy = default_subtask_specs['aoflagger']['strategy'] + subtask_specs.update({"aoflagger": {"strategy": default_strategy}}) + logger.warning('Translating aoflagger "auto" strategy to "%s" without knowing whether that makes sense!' % default_strategy) + + if preprocessing_task_specs["flag"]["outerchannels"]: + subtask_specs.update({"preflagger0": {"channels": "0..nchan/32-1,31*nchan/32..nchan-1"}}) + + if preprocessing_task_specs["flag"]["autocorrelations"]: + subtask_specs.update({"preflagger1": {"corrtype": "auto"}}) + + return subtask_specs + + +def create_subtask(subtask_template: SubtaskTemplate, subtask_specifications): + subtask_data = { + "tags": [], + "specifications_doc": subtask_specifications, + "do_cancel": None, + "priority": 0, + "state": SubtaskState.objects.get(value="defining"), + "task_blueprint": None, + "specifications_template": subtask_template, + "schedule_method": ScheduleMethod.objects.get(value="dynamic"), + "cluster": None + } + + return Subtask.objects.create(**subtask_data) + +def schedule_subtask(subtask: Subtask) -> Subtask: + '''Generic scheduling method for subtasks. Calls the appropiate scheduling method based on the subtask's type.''' + check_prerequities_for_scheduling(subtask) + + if subtask.specifications_template.type.value == SubtaskType.Choices.QA_FILES.value: + return schedule_qafile_subtask(subtask) + + if subtask.specifications_template.type.value == SubtaskType.Choices.QA_PLOTS.value: + return schedule_qaplots_subtask(subtask) + + raise SubtaskSchedulingException("Cannot schedule subtask id=%d because there is no schedule-method known for this subtasktype=%s." % (subtask.pk, subtask.specifications_template.type.value)) + +def check_prerequities_for_scheduling(subtask: Subtask) -> bool: + if subtask.state.value != SubtaskState.Choices.DEFINED.value: + raise SubtaskSchedulingException("Cannot schedule subtask id=%d because it is not DEFINED. Current state=%s" % (subtask.pk, subtask.state.value)) + + for predecessor in subtask.predecessors.all(): + if predecessor.state.value != SubtaskState.Choices.FINISHED.value: + raise SubtaskSchedulingException("Cannot schedule subtask id=%d because its predecessor id=%s in not FINISHED but state=%s" % (subtask.pk, predecessor.pk, predecessor.state.value)) + + return True \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/tmssapp/tasks.py b/SAS/TMSS/src/tmss/tmssapp/tasks.py new file mode 100644 index 0000000000000000000000000000000000000000..843fce85273c8a9c8acbd862b77a49c5e386af39 --- /dev/null +++ b/SAS/TMSS/src/tmss/tmssapp/tasks.py @@ -0,0 +1,183 @@ +import datetime +from lofar.sas.tmss.tmss.tmssapp import models +from lofar.common.datetimeutils import formatDatetime +from lofar.common.json_utils import * +from lofar.sas.tmss.tmss.exceptions import * +from lofar.sas.tmss.tmss.tmssapp.models.specification import TaskDraft, TaskBlueprint, TaskRelationBlueprint, \ + TaskRelationDraft, SchedulingUnitBlueprint +from lofar.sas.tmss.tmss.tmssapp.models.scheduling import Subtask, SubtaskType, SubtaskInput, SubtaskOutput, \ + SubtaskTemplate, SubtaskState, ScheduleMethod, SubtaskInputSelectionTemplate +from lofar.sas.tmss.tmss.tmssapp.subtasks import connect_observation_subtask_to_preprocessing_subtask, \ + create_qafile_to_qaplots_subtask, create_observation_to_qafile_subtask + +import logging +logger = logging.getLogger(__name__) + + +def create_task_blueprint_from_task_draft_and_instantiate_subtasks_from_template(task_draft: models.TaskDraft): + """ + Create a task_blueprint from the task_draft + For every subtask specified in task blueprint: + - create subtask and set to DEFINING + - create subtask input and outputs and link + - link subtask inputs to predecessor outputs + - set subtask to DEFINED + """ + logger.debug("create_task_blueprint_from_task_draft_and_instantiate_subtasks_from_template(task_draft.id=%s)...", task_draft.pk) + task_blueprint = create_task_blueprint_from_task_draft(task_draft) + + obs_subtask = create_subtask_observation_control(task_blueprint) + pipe_subtask = create_subtask_pipeline_control(task_blueprint) + connect_observation_subtask_to_preprocessing_subtask(obs_subtask, pipe_subtask) + + if task_blueprint.specifications_doc.get("QA",{}).get("file_conversion",{}).get("enabled", False): + qa_file_subtask = create_observation_to_qafile_subtask(obs_subtask) + + if qa_file_subtask is not None and task_blueprint.specifications_doc.get("QA", {}).get("plots", {}).get("enabled", False): + qa_plots_subtask = create_qafile_to_qaplots_subtask(qa_file_subtask) + + return task_blueprint + + +def create_task_blueprint_from_task_draft(task_draft: models.TaskDraft): + """ + Create a task_blueprint from the task_draft + :raises Exception if instantiate fails. + """ + logger.debug("create_task_blueprint_from_task_draft(task_draft.id=%s)", task_draft.pk) + + # Get scheduling unit blueprint from scheduling unit draft, but that is a multi object relation + # so which one is related to this task_draft? + # Therefore I (RGOE) do NOT care about the relation with the Scheduling Unit BluePrint, that should be solved later + # with another Story/Task + scheduling_unit_blueprint = SchedulingUnitBlueprint.objects.create( + name="Temporary Dummy Scheduling Unit Blueprint", + description="", + requirements_doc={}, + do_cancel=False, + draft=task_draft.scheduling_unit_draft, + requirements_template=models.SchedulingUnitTemplate.objects.first() # because we have multiple now with the same name, and I dont care for now + ) + + description_str = "Task Blueprint " + task_draft.description + name_str = "Task Blueprint of " + task_draft.name + task_blueprint = TaskBlueprint.objects.create( + description=description_str, + name=name_str, + do_cancel=False, + draft=task_draft, + scheduling_unit_blueprint=scheduling_unit_blueprint, + specifications_doc=task_draft.specifications_doc, + specifications_template=task_draft.specifications_template + ) + + logger.info("create_task_blueprint_from_task_draft(task_draft.id=%s) created task_blueprint: %s", task_draft.pk, task_blueprint.pk) + return task_blueprint + + +def create_subtask_observation_control(task_blueprint: models.TaskBlueprint): + """ + Create a subtask observation control. + This method implements "Instantiate subtasks" step from the "Specification Flow" + https://support.astron.nl/confluence/display/TMSS/Specification+Flow + """ + # step 0: check pre-requisites + if task_blueprint.do_cancel: + raise ValueError("Cancel create subtasks from blueprint task id=%d, because its explicit set to cancel" % + task_blueprint.id) + + # step 1: create subtask in defining state + subtask_template = SubtaskTemplate.objects.get(name='observationcontrol schema') + # This is some 'extra' specification to add to subtask ... where should it comes from, + # currently not defined in task ? + extra_specifications_doc = { + "stations": {"station_list": ["RS106", "RS205"], + "antenna_set": "HBA_DUAL_INNER", + "filter": "HBA_110_190", + "analog_pointing": {"direction_type": "J2000", + "angle1": 0.4262457643630986, + "angle2": 0.5787463318245085}, + "digital_pointings": [{"name": "3C48", + "pointing": {"direction_type": "J2000", + "angle1": 0.4262457643630986, + "angle2": 0.5787463318245085}, + "subbands": list(range(0, 8)) + }] + } + } + specifications_doc = add_defaults_to_json_object_for_schema(extra_specifications_doc, subtask_template.schema) + cancel = datetime.datetime.utcnow().isoformat() # I dont understand why this should be a dateformat and not a boolean ? + cluster_name = task_blueprint.specifications_doc.get("storage_cluster", "CEP4") + subtask_data = { "start_time": None, + "stop_time": None, + "state": SubtaskState.objects.get(value=SubtaskState.Choices.DEFINING.value), + "specifications_doc": specifications_doc, + "task_blueprint": task_blueprint, + "specifications_template": subtask_template, + "tags": [], + "do_cancel": cancel, + "priority": 1, + "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value), + "cluster": models.Cluster.objects.get(name=cluster_name) + } + subtask = Subtask.objects.create(**subtask_data) + + # step 2: create and link subtask input/output + # an observation has no input, it just produces output data + subtask_output = SubtaskOutput.objects.create(subtask=subtask) + + # step 3: set state to DEFINED + subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value) + subtask.save() + return subtask + + +def create_subtask_pipeline_control(task_blueprint: models.TaskBlueprint): + """ + Create a subtask preprocessing pipeline control. + This method implements "Instantiate subtasks" step from the "Specification Flow" + https://support.astron.nl/confluence/display/TMSS/Specification+Flow + + This function is almost the same as the create_subtask_observation_control + will be refactored later + """ + # step 0: check pre-requisites + + # step 1: create subtask in defining state + subtask_template = SubtaskTemplate.objects.get(name='pipelinecontrol schema') + specifications_doc = add_defaults_to_json_object_for_schema({}, subtask_template.schema) + cancel = datetime.datetime.utcnow().isoformat() # I dont understand why this should be a dateformat and not a boolean ? + cluster_name = task_blueprint.specifications_doc.get("storage_cluster", "CEP4") + + subtask_data = { "start_time": None, + "stop_time": None, + "state": SubtaskState.objects.get(value=SubtaskState.Choices.DEFINING.value), + "specifications_doc": specifications_doc, + "task_blueprint": task_blueprint, + "specifications_template": subtask_template, + "tags": [], + "do_cancel": cancel, + "priority": 1, + "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value), + "cluster": models.Cluster.objects.get(name=cluster_name) + } + subtask = models.Subtask.objects.create(**subtask_data) + + # step 2: create and link subtask input/output + # subtask_input = SubtaskInput.objects.create(subtask=subtask, + # producer=task_blueprint.produced_by, + # selection_doc="{}", + # selection_template=SubtaskInputSelectionTemplate.objects.get(name="All")) + # subtask_output = SubtaskOutput.objects.create(subtask=subtask) + + # step 3: set state to DEFINED + subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value) + subtask.save() + return subtask + + + + + + + diff --git a/SAS/TMSS/src/tmss/tmssapp/views.py b/SAS/TMSS/src/tmss/tmssapp/views.py index 8a48f6a2120d79d5fef6dcbe0e4da3c4ff727028..bfb670fd87bbbd28b9addf622ef51b2a6b8c5385 100644 --- a/SAS/TMSS/src/tmss/tmssapp/views.py +++ b/SAS/TMSS/src/tmss/tmssapp/views.py @@ -6,16 +6,19 @@ from lofar.sas.tmss.tmss.tmssapp import models from lofar.common.json_utils import get_default_json_object_for_schema from lofar.sas.tmss.tmss.tmssapp.adapters.parset import convert_to_parset + def subtask_template_default_specification(request, subtask_template_pk:int): subtask_template = get_object_or_404(models.SubtaskTemplate, pk=subtask_template_pk) spec = get_default_json_object_for_schema(subtask_template.schema) return JsonResponse(spec) + def task_template_default_specification(request, task_template_pk:int): task_template = get_object_or_404(models.TaskTemplate, pk=task_template_pk) spec = get_default_json_object_for_schema(task_template.schema) return JsonResponse(spec) + def subtask_parset(request, subtask_pk:int): subtask = get_object_or_404(models.Subtask, pk=subtask_pk) parset = convert_to_parset(subtask) @@ -24,3 +27,7 @@ def subtask_parset(request, subtask_pk:int): def index(request): return render(request, os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), '../../frontend','frontend_poc/build/index.html')) #return render(request, "../../../frontend/frontend_poc/build/index.html") + +def task_specify_observation(request, pk=None): + task = get_object_or_404(models.TaskDraft, pk=pk) + return HttpResponse("response", content_type='text/plain') diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/lofar_viewset.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/lofar_viewset.py index d8da38f7ac126cb8644f22d336dc6c4c5c54d5ec..017d8e13c90b3b6f7e4dce84d8136e1553a38393 100644 --- a/SAS/TMSS/src/tmss/tmssapp/viewsets/lofar_viewset.py +++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/lofar_viewset.py @@ -7,6 +7,7 @@ Adds the following functionality: from rest_framework import viewsets from drf_yasg.utils import swagger_auto_schema +from rest_framework import mixins class LOFARViewSet(viewsets.ModelViewSet): """ @@ -38,3 +39,15 @@ class LOFARViewSet(viewsets.ModelViewSet): def destroy(self, request, pk=None, **kwargs): return super(LOFARViewSet, self).destroy(request, pk, **kwargs) +class LOFARNestedViewSet(mixins.CreateModelMixin, + mixins.ListModelMixin, + #mixins.RetrieveModelMixin, + viewsets.GenericViewSet): + + @swagger_auto_schema(responses={403: 'forbidden'}) + def list(self, request, **kwargs): + return super(LOFARNestedViewSet, self).list(request, **kwargs) + + @swagger_auto_schema(responses={400: 'invalid specification', 403: 'forbidden'}) + def create(self, request, **kwargs): + return super(LOFARNestedViewSet, self).create(request, **kwargs) \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py index edc9ab43d276c032739d4764e663f84d1eb0f264..48afa70f17b0622e3c2a740b4c97b201c3d0f554 100644 --- a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py @@ -4,28 +4,20 @@ This file contains the viewsets (based on the elsewhere defined data models and from django.shortcuts import get_object_or_404 from rest_framework import viewsets -from .lofar_viewset import LOFARViewSet +from .lofar_viewset import LOFARViewSet, LOFARNestedViewSet from .. import models from .. import serializers from django_filters import rest_framework as filters from lofar.sas.tmss.tmss.tmssapp.models.scheduling import Subtask # Don't use Ordering class from the django_filters but use rest_framework instead !! from rest_framework.filters import OrderingFilter - - -class subTaskFilter(filters.FilterSet): - class Meta: - model = Subtask - fields = { - 'state__value': ['exact'], - 'start_time': ['lt', 'gt'], - 'stop_time': ['lt', 'gt'], - 'cluster__name': ['exact', 'icontains'], - } +from drf_yasg import openapi +from drf_yasg.utils import swagger_auto_schema +from drf_yasg.inspectors import SwaggerAutoSchema from rest_framework.decorators import action from django.http import HttpResponse, JsonResponse -from drf_yasg.utils import swagger_auto_schema +from rest_framework.response import Response as RestResponse from lofar.sas.tmss.tmss.tmssapp.viewsets.lofar_viewset import LOFARViewSet from lofar.sas.tmss.tmss.tmssapp import models @@ -35,11 +27,17 @@ from datetime import datetime from lofar.common.json_utils import get_default_json_object_for_schema from lofar.common.datetimeutils import formatDatetime from lofar.sas.tmss.tmss.tmssapp.adapters.parset import convert_to_parset +from drf_yasg.renderers import _SpecRenderer + + +from lofar.sas.tmss.tmss.tmssapp.renderers import PlainTextRenderer +from rest_framework.views import APIView +from rest_framework.decorators import api_view, renderer_classes -class SubtaskConnectorViewSet(LOFARViewSet): - queryset = models.SubtaskConnector.objects.all() - serializer_class = serializers.SubtaskConnectorSerializer +class TextPlainAutoSchema(SwaggerAutoSchema): + def get_produces(self): + return ["text/plain"] class SubtaskStateViewSet(LOFARViewSet): @@ -52,8 +50,8 @@ class SubtaskStateLogViewSet(LOFARViewSet): serializer_class = serializers.SubtaskStateLogSerializer def get_queryset(self): - if 'subtask_pk' in self.kwargs: - subtask = get_object_or_404(models.Subtask, pk=self.kwargs['subtask_pk']) + if 'subtask_id' in self.kwargs: + subtask = get_object_or_404(models.Subtask, pk=self.kwargs['subtask_id']) return subtask.subtaskstatelog_set.all() queryset = models.SubtaskStateLog.objects.all() @@ -85,9 +83,18 @@ class ScheduleMethodViewSet(LOFARViewSet): serializer_class = serializers.ScheduleMethodSerializer +class SubtaskTemplateFilter(filters.FilterSet): + class Meta: + model = models.SubtaskTemplate + fields = { + 'name': ['exact'], + 'version': ['lt', 'gt', 'exact'] + } + class SubtaskTemplateViewSet(LOFARViewSet): queryset = models.SubtaskTemplate.objects.all() serializer_class = serializers.SubtaskTemplateSerializer + filter_class = SubtaskTemplateFilter def get_queryset(self): queryset = models.SubtaskTemplate.objects.all() @@ -99,6 +106,14 @@ class SubtaskTemplateViewSet(LOFARViewSet): return queryset + @swagger_auto_schema(responses={200: 'The schema as a JSON object', + 403: 'forbidden'}, + operation_description="Get the schema as a JSON object.") + @action(methods=['get'], detail=True) + def schema(self, request, pk=None): + subtask_template = get_object_or_404(models.SubtaskTemplate, pk=pk) + return JsonResponse(subtask_template.schema) + @swagger_auto_schema(responses={200: 'JSON object with all the defaults from the schema filled in', 403: 'forbidden'}, operation_description="Get a JSON object with all the defaults from the schema filled in.") @@ -134,27 +149,86 @@ class DataproductFeedbackTemplateViewSet(LOFARViewSet): serializer_class = serializers.DataproductFeedbackTemplateSerializer +class SubTaskFilter(filters.FilterSet): + class Meta: + model = Subtask + fields = { + 'state__value': ['exact'], + 'start_time': ['lt', 'gt'], + 'stop_time': ['lt', 'gt'], + 'cluster__name': ['exact', 'icontains'], + } + + class SubtaskViewSet(LOFARViewSet): + queryset = models.Subtask.objects.all() + serializer_class = serializers.SubtaskSerializer + filter_backends = (filters.DjangoFilterBackend, OrderingFilter,) + filter_class = SubTaskFilter + ordering = ('start_time',) + + @swagger_auto_schema(auto_schema=TextPlainAutoSchema, + responses={200: 'A LOFAR parset for this subtask (as plain text)', + 403: 'forbidden', + 404: 'Not found'}, + produces='text/plain', + operation_description="Get a LOFAR parset for the specifications of this subtask") + @action(methods=['get'], detail=True, renderer_classes=[PlainTextRenderer]) + def parset(self, request, pk=None): + subtask = get_object_or_404(models.Subtask, pk=pk) + parset = convert_to_parset(subtask) + + header = "# THIS PARSET WAS GENERATED BY TMSS FROM THE SPECIFICATION OF SUBTASK ID=%d ON %s\n" % (subtask.pk, formatDatetime(datetime.utcnow())) + parset_str = header + str(parset) + return HttpResponse(parset_str, content_type='text/plain') + + + @swagger_auto_schema(responses={200: 'The predecessor subtasks of this subtask', + 403: 'forbidden'}, + operation_description="Get the predecessor subtasks of this subtask.") + @action(methods=['get'], detail=True, url_name="predecessors") + def predecessors(self, request, pk=None): + subtask = get_object_or_404(models.Subtask, pk=pk) + predecessors = self.filter_queryset(subtask.predecessors) + serializer = self.get_serializer(predecessors, many=True) + return RestResponse(serializer.data) + + + @swagger_auto_schema(responses={200: 'The successor subtasks of this subtask', + 403: 'forbidden'}, + operation_description="Get the successor subtasks of this subtask.") + @action(methods=['get'], detail=True, url_name="successors") + def successors(self, request, pk=None): + subtask = get_object_or_404(models.Subtask, pk=pk) + successors = self.filter_queryset(subtask.successors) + serializer = self.get_serializer(successors, many=True) + return RestResponse(serializer.data) + + + @swagger_auto_schema(responses={200: 'The a scheduled version of this subtask', + 403: 'forbidden', + 500: 'The subtask could not be scheduled'}, + operation_description="Try to schedule this subtask.") + @action(methods=['get'], detail=True, url_name="schedule") + def schedule(self, request, pk=None): + subtask = get_object_or_404(models.Subtask, pk=pk) + from lofar.sas.tmss.tmss.tmssapp.subtasks import schedule_subtask + scheduled_subtask = schedule_subtask(subtask) + serializer = self.get_serializer(scheduled_subtask) + return RestResponse(serializer.data) + +class SubtaskNestedViewSet(LOFARNestedViewSet): queryset = models.Subtask.objects.all() serializer_class = serializers.SubtaskSerializer filter_backends = (filters.DjangoFilterBackend,) - filter_class = subTaskFilter + filter_class = SubTaskFilter + ordering = ('start_time',) def get_queryset(self): - if 'task_blueprint_pk' in self.kwargs: - task_blueprint = get_object_or_404(models.TaskBlueprint, pk=self.kwargs['task_blueprint_pk']) + if 'task_blueprint_id' in self.kwargs: + task_blueprint = get_object_or_404(models.TaskBlueprint, pk=self.kwargs['task_blueprint_id']) return task_blueprint.subtasks.all() - else: - return models.Subtask.objects.all() - @swagger_auto_schema(responses={200: 'A LOFAR parset for this subtask', - 403: 'forbidden'}, - operation_description="Get a a LOFAR parset for the specifications of this subtask") - @action(methods=['get'], detail=True) - def default_specification(self, request, pk=None): - subtask = get_object_or_404(models.Subtask, pk=pk) - parset = convert_to_parset(subtask) - return HttpResponse(str(parset), content_type='text/plain') class SubtaskInputViewSet(LOFARViewSet): queryset = models.SubtaskInput.objects.all() @@ -201,40 +275,3 @@ class DataproductHashViewSet(LOFARViewSet): serializer_class = serializers.DataproductHashSerializer -# --- JSON - -class SubtaskViewSetJSONeditorOnline(LOFARViewSet): - queryset = models.Subtask.objects.all() - serializer_class = serializers.SubtaskSerializerJSONeditorOnline - filter_backends = (filters.DjangoFilterBackend, OrderingFilter,) - filter_class = subTaskFilter - ordering = ('start_time',) - - def get_view_name(self): # override name because DRF auto-naming dot_tmssapp_scheduling_djangoes not produce something usable here - name = "Subtask" - if self.suffix: - name += ' ' + self.suffix - return name - - def get_queryset(self): - if 'task_blueprint_pk' in self.kwargs: - task_blueprint = get_object_or_404(models.TaskBlueprint, pk=self.kwargs['task_blueprint_pk']) - return task_blueprint.subtasks.all() - else: - return models.Subtask.objects.all() - - @swagger_auto_schema(responses={200: 'A LOFAR parset for this subtask (as plain text, not json)', - 403: 'forbidden', - 404: 'Not found'}, - operation_description="Get a LOFAR parset for the specifications of this subtask") - @action(methods=['get'], detail=True) - def parset(self, request, pk=None): - subtask = get_object_or_404(models.Subtask, pk=pk) - parset = convert_to_parset(subtask) - parset_str = "# THIS PARSET WAS GENERATED BY TMSS FROM THE SPECICATION OF SUBTASK ID=%d ON %s url: %s\n%s" % ( - subtask.pk, - formatDatetime(datetime.utcnow()), - request._request.get_raw_uri(), - parset,) - return HttpResponse(parset_str, content_type='text/plain') - diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py index 16f25c43c2a58715bfba2ed1fc87ac401db3a410..b873d55f99e92645deab3414c693c91de4c7a830 100644 --- a/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py @@ -6,6 +6,8 @@ from django.shortcuts import get_object_or_404 from django.http import JsonResponse from django.contrib.auth.models import User from rest_framework.viewsets import ReadOnlyModelViewSet +from rest_framework import status +from rest_framework.response import Response from rest_framework.decorators import permission_classes from rest_framework.permissions import IsAuthenticatedOrReadOnly, DjangoModelPermissions @@ -13,11 +15,17 @@ from rest_framework.decorators import action from drf_yasg.utils import swagger_auto_schema -from lofar.sas.tmss.tmss.tmssapp.viewsets.lofar_viewset import LOFARViewSet +from lofar.sas.tmss.tmss.tmssapp.viewsets.lofar_viewset import LOFARViewSet, LOFARNestedViewSet from lofar.sas.tmss.tmss.tmssapp import models from lofar.sas.tmss.tmss.tmssapp import serializers +from datetime import datetime from lofar.common.json_utils import get_default_json_object_for_schema +from lofar.common.datetimeutils import formatDatetime +from lofar.sas.tmss.tmss.tmssapp.tasks import create_task_blueprint_from_task_draft_and_instantiate_subtasks_from_template +from lofar.sas.tmss.tmss.tmssapp.subtasks import create_subtasks_from_task_blueprint + + # This is required for keeping a user reference as ForeignKey in other models @@ -56,6 +64,14 @@ class TaskTemplateViewSet(LOFARViewSet): queryset = models.TaskTemplate.objects.all() serializer_class = serializers.TaskTemplateSerializer + @swagger_auto_schema(responses={200: 'The schema as a JSON object', + 403: 'forbidden'}, + operation_description="Get the schema as a JSON object.") + @action(methods=['get'], detail=True) + def schema(self, request, pk=None): + template = get_object_or_404(models.TaskTemplate, pk=pk) + return JsonResponse(template.schema) + @swagger_auto_schema(responses={200: 'JSON object with all the defaults from the schema filled in', 403: 'forbidden'}, operation_description="Get a JSON object with all the defaults from the schema filled in.") @@ -104,9 +120,9 @@ class ResourceUnitViewSet(LOFARViewSet): queryset = models.ResourceUnit.objects.all() serializer_class = serializers.ResourceUnitSerializer -class TaskConnectorsViewSet(LOFARViewSet): - queryset = models.TaskConnectors.objects.all() - serializer_class = serializers.TaskConnectorsSerializer +class TaskConnectorViewSet(LOFARViewSet): + queryset = models.TaskConnector.objects.all() + serializer_class = serializers.TaskConnectorSerializer @permission_classes((DjangoModelPermissions,)) # example override of default permissions per viewset | todo: review for production @@ -119,9 +135,14 @@ class ProjectViewSet(LOFARViewSet): queryset = models.Project.objects.all() serializer_class = serializers.ProjectSerializer + +class ProjectNestedViewSet(LOFARNestedViewSet): + queryset = models.Project.objects.all() + serializer_class = serializers.ProjectSerializer + def get_queryset(self): - if 'cycle_pk' in self.kwargs: - cycle = get_object_or_404(models.Cycle, pk=self.kwargs['cycle_pk']) + if 'cycle_id' in self.kwargs: + cycle = get_object_or_404(models.Cycle, pk=self.kwargs['cycle_id']) return cycle.projects.all() else: return models.Project.objects.all() @@ -145,9 +166,14 @@ class SchedulingUnitDraftViewSet(LOFARViewSet): queryset = models.SchedulingUnitDraft.objects.all() serializer_class = serializers.SchedulingUnitDraftSerializer + +class SchedulingUnitDraftNestedViewSet(LOFARNestedViewSet): + queryset = models.SchedulingUnitDraft.objects.all() + serializer_class = serializers.SchedulingUnitDraftSerializer + def get_queryset(self): - if 'scheduling_set_pk' in self.kwargs: - scheduling_set = get_object_or_404(models.SchedulingSet, pk=self.kwargs['scheduling_set_pk']) + if 'scheduling_set_id' in self.kwargs: + scheduling_set = get_object_or_404(models.SchedulingSet, pk=self.kwargs['scheduling_set_id']) return scheduling_set.scheduling_unit_drafts.all() else: return models.SchedulingUnitDraft.objects.all() @@ -157,21 +183,48 @@ class SchedulingUnitBlueprintViewSet(LOFARViewSet): queryset = models.SchedulingUnitBlueprint.objects.all() serializer_class = serializers.SchedulingUnitBlueprintSerializer + +class SchedulingUnitBlueprintNestedViewSet(LOFARNestedViewSet): + queryset = models.SchedulingUnitBlueprint.objects.all() + serializer_class = serializers.SchedulingUnitBlueprintSerializer + def get_queryset(self): - if 'scheduling_unit_draft_pk' in self.kwargs: - scheduling_unit_draft = get_object_or_404(models.SchedulingUnitDraft, pk=self.kwargs['scheduling_unit_draft_pk']) + if 'scheduling_unit_draft_id' in self.kwargs: + scheduling_unit_draft = get_object_or_404(models.SchedulingUnitDraft, pk=self.kwargs['scheduling_unit_draft_id']) return scheduling_unit_draft.related_scheduling_unit_blueprint.all() else: return models.SchedulingUnitBlueprint.objects.all() class TaskDraftViewSet(LOFARViewSet): + queryset = models.TaskDraft.objects.all() + serializer_class = serializers.TaskDraftSerializer + + @swagger_auto_schema(responses={201: 'Created task blueprint, see Location in Response header', + 403: 'forbidden'}, + operation_description="Carve this draft task specification in stone, and make an (uneditable) blueprint out of it.") + @action(methods=['get'], detail=True, url_name="create_task_blueprint") + def create_task_blueprint(self, request, pk=None): + task_draft = get_object_or_404(models.TaskDraft, pk=pk) + task_blueprint = create_task_blueprint_from_task_draft_and_instantiate_subtasks_from_template(task_draft) + + # url path magic to construct the new task_blueprint_path url + task_draft_path = request._request.path + base_path = task_draft_path[:task_draft_path.find('/task_draft')] + task_blueprint_path = '%s/task_blueprint/%s/' % (base_path, task_blueprint.id,) + + # return a response with the new serialized TaskBlueprint, and a Location to the new instance in the header + return Response(serializers.TaskBlueprintSerializer(task_blueprint, context={'request':request}).data, + status=status.HTTP_201_CREATED, + headers={'Location': task_blueprint_path}) + +class TaskDraftNestedViewSet(LOFARNestedViewSet): queryset = models.TaskDraft.objects.all() serializer_class = serializers.TaskDraftSerializer def get_queryset(self): - if 'scheduling_unit_draft_pk' in self.kwargs: - scheduling_unit_draft = get_object_or_404(models.SchedulingUnitDraft, pk=self.kwargs['scheduling_unit_draft_pk']) + if 'scheduling_unit_draft_id' in self.kwargs: + scheduling_unit_draft = get_object_or_404(models.SchedulingUnitDraft, pk=self.kwargs['scheduling_unit_draft_id']) return scheduling_unit_draft.task_drafts.all() else: return models.TaskDraft.objects.all() @@ -181,9 +234,23 @@ class TaskBlueprintViewSet(LOFARViewSet): queryset = models.TaskBlueprint.objects.all() serializer_class = serializers.TaskBlueprintSerializer + @swagger_auto_schema(responses={200: 'Create subtasks from this task blueprint', + 403: 'forbidden'}, + operation_description="Create subtasks from this task blueprint") + @action(methods=['get'], detail=True) + def create_subtasks(self, request, pk=None): + task_blueprint = get_object_or_404(models.TaskBlueprint, pk=pk) + subtasks = create_subtasks_from_task_blueprint(task_blueprint) + return JsonResponse(subtasks) + + +class TaskBlueprintNestedViewSet(LOFARNestedViewSet): + queryset = models.TaskBlueprint.objects.all() + serializer_class = serializers.TaskBlueprintSerializer + def get_queryset(self): - if 'task_draft_pk' in self.kwargs: - task_draft = get_object_or_404(models.TaskDraft, pk=self.kwargs['task_draft_pk']) + if 'task_draft_id' in self.kwargs: + task_draft = get_object_or_404(models.TaskDraft, pk=self.kwargs['task_draft_id']) return task_draft.related_task_blueprint.all() else: return models.TaskBlueprint.objects.all() @@ -193,9 +260,14 @@ class TaskRelationDraftViewSet(LOFARViewSet): queryset = models.TaskRelationDraft.objects.all() serializer_class = serializers.TaskRelationDraftSerializer + +class TaskRelationDraftNestedViewSet(LOFARNestedViewSet): + queryset = models.TaskRelationDraft.objects.all() + serializer_class = serializers.TaskRelationDraftSerializer + def get_queryset(self): - if 'task_draft_pk' in self.kwargs: - task_draft = get_object_or_404(models.TaskDraft, pk=self.kwargs['task_draft_pk']) + if 'task_draft_id' in self.kwargs: + task_draft = get_object_or_404(models.TaskDraft, pk=self.kwargs['task_draft_id']) return task_draft.produced_by.all() | task_draft.consumed_by.all() else: return models.TaskRelationDraft.objects.all() @@ -206,42 +278,19 @@ class TaskRelationBlueprintViewSet(LOFARViewSet): serializer_class = serializers.TaskRelationBlueprintSerializer + +class TaskRelationBlueprintNestedViewSet(LOFARNestedViewSet): + queryset = models.TaskRelationBlueprint.objects.all() + serializer_class = serializers.TaskRelationBlueprintSerializer + + def get_queryset(self): - if 'task_blueprint_pk' in self.kwargs: - task_blueprint = get_object_or_404(models.TaskBlueprint, pk=self.kwargs['task_blueprint_pk']) + if 'task_blueprint_id' in self.kwargs: + task_blueprint = get_object_or_404(models.TaskBlueprint, pk=self.kwargs['task_blueprint_id']) return task_blueprint.produced_by.all() | task_blueprint.consumed_by.all() - elif 'task_relation_draft_pk' in self.kwargs: - task_relation_draft = get_object_or_404(models.TaskRelationDraft, pk=self.kwargs['task_relation_draft_pk']) + elif 'task_relation_draft_id' in self.kwargs: + task_relation_draft = get_object_or_404(models.TaskRelationDraft, pk=self.kwargs['task_relation_draft_id']) return task_relation_draft.related_task_relation_blueprint.all() else: return models.TaskRelationBlueprint.objects.all() - -# --- JSON - -class TaskBlueprintViewSetJSONeditorOnline(LOFARViewSet): - queryset = models.TaskBlueprint.objects.all() - serializer_class = serializers.TaskBlueprintSerializerJSONeditorOnline - - def get_view_name(self): # override name because DRF auto-naming does not produce something usable here - name = "Task Blueprint" - if self.suffix: - name += ' ' + self.suffix - return name - -class TaskDraftViewSetJSONeditorOnline(LOFARViewSet): - queryset = models.TaskDraft.objects.all() - serializer_class = serializers.TaskDraftSerializerJSONeditorOnline - - def get_view_name(self): # override name because DRF auto-naming does not produce something usable here - name = "Task Draft" - if self.suffix: - name += ' ' + self.suffix - return name - - def get_queryset(self): - if 'scheduling_unit_draft_pk' in self.kwargs: - scheduling_unit_draft = get_object_or_404(models.SchedulingUnitDraft, pk=self.kwargs['scheduling_unit_draft_pk']) - return scheduling_unit_draft.task_drafts.all() - else: - return models.TaskDraft.objects.all() \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/urls.py b/SAS/TMSS/src/tmss/urls.py index 2b00d8f195f5b58848ce26af2d3008214f352f24..a71c43412b993858bb4095df787ebcc4595e53a0 100644 --- a/SAS/TMSS/src/tmss/urls.py +++ b/SAS/TMSS/src/tmss/urls.py @@ -27,7 +27,6 @@ from rest_framework.documentation import include_docs_urls from drf_yasg.views import get_schema_view from drf_yasg import openapi - # # Django style patterns # @@ -43,8 +42,9 @@ swagger_schema_view = get_schema_view( license=openapi.License(name="GPL License v3"), ), validators=['flex', 'ssv'], - public=True, - permission_classes=(permissions.AllowAny,), + public=False, + # public=True, + # permission_classes=(permissions.AllowAny,), ) urlpatterns = [ @@ -77,7 +77,7 @@ router.register(r'generator_template', viewsets.GeneratorTemplateViewSet) router.register(r'scheduling_unit_template', viewsets.SchedulingUnitTemplateViewSet) router.register(r'task_template', viewsets.TaskTemplateViewSet) router.register(r'work_relation_selection_template', viewsets.WorkRelationSelectionTemplateViewSet) -router.register(r'task_connectors', viewsets.TaskConnectorsViewSet) +router.register(r'task_connector', viewsets.TaskConnectorViewSet) router.register(r'default_generator_template', viewsets.DefaultGeneratorTemplateViewSet) router.register(r'default_scheduling_unit_template', viewsets.DefaultSchedulingUnitTemplateViewSet) router.register(r'default_task_template', viewsets.DefaultTaskTemplateViewSet) @@ -93,22 +93,22 @@ router.register(r'project_quota', viewsets.ProjectQuotaViewSet) router.register(r'scheduling_set', viewsets.SchedulingSetViewSet) router.register(r'scheduling_unit_draft', viewsets.SchedulingUnitDraftViewSet) router.register(r'scheduling_unit_blueprint', viewsets.SchedulingUnitBlueprintViewSet) -#router.register(r'task_draft', viewsets.TaskDraftViewSet) # todo: default view, re-activate or remove the JSON editor one in bottom +router.register(r'task_draft', viewsets.TaskDraftViewSet) router.register(r'task_blueprint', viewsets.TaskBlueprintViewSet) router.register(r'task_relation_draft', viewsets.TaskRelationDraftViewSet) router.register(r'task_relation_blueprint', viewsets.TaskRelationBlueprintViewSet) # nested -router.register(r'cycle/(?P<cycle_pk>[\w\-]+)/project', viewsets.ProjectViewSet) -router.register(r'scheduling_set/(?P<scheduling_set_pk>\d+)/scheduling_unit_draft', viewsets.SchedulingUnitDraftViewSet) -router.register(r'scheduling_unit_draft/(?P<scheduling_unit_draft_pk>\d+)/scheduling_unit_blueprint', viewsets.SchedulingUnitBlueprintViewSet) -#router.register(r'scheduling_unit_draft/(?P<scheduling_unit_draft_pk>\d+)/task_draft', viewsets.TaskDraftViewSet) # todo: default view, re-activate or remove the JSON editor one in bottom -router.register(r'task_draft/(?P<task_draft_pk>\d+)/task_blueprint', viewsets.TaskBlueprintViewSet) -router.register(r'task_draft/(?P<task_draft_pk>\d+)/task_relation_draft', viewsets.TaskRelationDraftViewSet) -router.register(r'task_relation_draft/(?P<task_relation_draft_pk>\d+)/task_relation_blueprint', viewsets.TaskRelationBlueprintViewSet) -router.register(r'task_blueprint/(?P<task_blueprint_pk>\d+)/task_relation_blueprint', viewsets.TaskRelationBlueprintViewSet) -router.register(r'task_blueprint/(?P<task_blueprint_pk>\d+)/subtask', viewsets.SubtaskViewSet) -router.register(r'subtask/(?P<subtask_pk>[\w\-]+)/state_log', viewsets.SubtaskStateLogViewSet) +router.register(r'cycle/(?P<cycle_id>[\w\-]+)/project', viewsets.ProjectNestedViewSet) +router.register(r'scheduling_set/(?P<scheduling_set_id>\d+)/scheduling_unit_draft', viewsets.SchedulingUnitDraftNestedViewSet) +router.register(r'scheduling_unit_draft/(?P<scheduling_unit_draft_id>\d+)/scheduling_unit_blueprint', viewsets.SchedulingUnitBlueprintNestedViewSet) +router.register(r'scheduling_unit_draft/(?P<scheduling_unit_draft_id>\d+)/task_draft', viewsets.TaskDraftNestedViewSet) +router.register(r'task_draft/(?P<task_draft_id>\d+)/task_blueprint', viewsets.TaskBlueprintNestedViewSet) +router.register(r'task_draft/(?P<task_draft_id>\d+)/task_relation_draft', viewsets.TaskRelationDraftNestedViewSet) +router.register(r'task_relation_draft/(?P<task_relation_draft_id>\d+)/task_relation_blueprint', viewsets.TaskRelationBlueprintNestedViewSet) +router.register(r'task_blueprint/(?P<task_blueprint_id>\d+)/task_relation_blueprint', viewsets.TaskRelationBlueprintNestedViewSet) +router.register(r'task_blueprint/(?P<task_blueprint_id>\d+)/subtask', viewsets.SubtaskNestedViewSet) +#router.register(r'subtask/(?P<subtask_id>[\w\-]+)/state_log', viewsets.SubtaskStateLogViewSet) # SCHEDULING @@ -120,7 +120,6 @@ router.register(r'algorithm', viewsets.AlgorithmViewSet) router.register(r'schedule_method', viewsets.ScheduleMethodViewSet) # templates -router.register(r'subtask_connector', viewsets.SubtaskConnectorViewSet) router.register(r'subtask_template', viewsets.SubtaskTemplateViewSet) router.register(r'dataproduct_specifications_template', viewsets.DataproductSpecificationsTemplateViewSet) router.register(r'default_subtask_template', viewsets.DefaultSubtaskTemplateViewSet) @@ -129,7 +128,7 @@ router.register(r'subtask_input_selection_template', viewsets.SubtaskInputSelect router.register(r'dataproduct_feedback_template', viewsets.DataproductFeedbackTemplateViewSet) # instances -#router.register(r'subtask', viewsets.SubtaskViewSet) # todo: default view, re-activate or remove the JSON editor one in bottom +router.register(r'subtask', viewsets.SubtaskViewSet) router.register(r'dataproduct', viewsets.DataproductViewSet) router.register(r'subtask_input', viewsets.SubtaskInputViewSet) router.register(r'subtask_output', viewsets.SubtaskOutputViewSet) @@ -139,17 +138,11 @@ router.register(r'filesystem', viewsets.FilesystemViewSet) router.register(r'cluster', viewsets.ClusterViewSet) router.register(r'dataproduct_archive_info', viewsets.DataproductArchiveInfoViewSet) router.register(r'dataproduct_hash', viewsets.DataproductHashViewSet) -router.register(r'task_relation_blueprint', viewsets.TaskRelationBlueprintViewSet) router.register(r'subtask_state_log', viewsets.SubtaskStateLogViewSet) router.register(r'user', viewsets.UserViewSet) # --- -# JSON -router.register(r'task_draft', viewsets.TaskDraftViewSetJSONeditorOnline) -router.register(r'scheduling_unit_draft/(?P<scheduling_unit_draft_pk>\d+)/task_draft', viewsets.TaskDraftViewSetJSONeditorOnline) -router.register(r'subtask', viewsets.SubtaskViewSetJSONeditorOnline) - urlpatterns.extend(router.urls) diff --git a/SAS/TMSS/src/util.py b/SAS/TMSS/src/util.py deleted file mode 100644 index 12f8b3d6ab1e812e2c19fc51722996a5c71379a2..0000000000000000000000000000000000000000 --- a/SAS/TMSS/src/util.py +++ /dev/null @@ -1,49 +0,0 @@ -import logging -import requests - -logger = logging.getLogger(__file__) - -# usage example: -# -# with TMSSsession('paulus', 'pauluspass') as session: -# response = session.get(url='http://localhost:8008/api/task_draft/') -# print(response) - - -class TMSSsession(object): - - def __init__(self, username, password, host): - self.session = requests.session() - self.username = username - self.password = password - self.host = host - - def __enter__(self): - self.session.__enter__() - self.session.verify = False - - # get authentication page of OIDC through TMSS redirect - response = self.session.get(self.host + '/oidc/authenticate/', allow_redirects=True) - csrftoken = self.session.cookies['csrftoken'] - - # post user credentials to login page, also pass csrf token - data = {'username': self.username, 'password': self.password, 'csrfmiddlewaretoken': csrftoken} - response = self.session.post(url=response.url, data=data, allow_redirects=True) - - # raise when sth went wrong - if "The username and/or password you specified are not correct" in response.content.decode('utf8'): - raise ValueError("The username and/or password you specified are not correct") - if response.status_code != 200: - raise ConnectionError(response.content.decode('utf8')) - - # return the authenticated session as user context - return self.session - - def __exit__(self, type, value, traceback): - try: - # logout user - self.session.get(self.host + '/api/logout/', allow_redirects=True) - self.session.__exit__(self, type, value, traceback) - except: - pass - diff --git a/SAS/TMSS/test/CMakeLists.txt b/SAS/TMSS/test/CMakeLists.txt index 19041224c4f4e355b3eaa0ba112e3c17bed1c56e..f3ff1838d84942847f77ed41e89bfb3ac6f7160a 100644 --- a/SAS/TMSS/test/CMakeLists.txt +++ b/SAS/TMSS/test/CMakeLists.txt @@ -27,6 +27,9 @@ if(BUILD_TESTING) lofar_add_test(t_subtask_validation) lofar_add_test(t_tmssapp_specification_permissions) lofar_add_test(t_tmss_session_auth) + lofar_add_test(t_subtasks) + lofar_add_test(t_parset_adapter) + lofar_add_test(t_specify_observation) set_tests_properties(t_tmssapp_scheduling_REST_API PROPERTIES TIMEOUT 300) set_tests_properties(t_tmssapp_specification_REST_API PROPERTIES TIMEOUT 300) diff --git a/SAS/TMSS/test/ldap_test_service.py b/SAS/TMSS/test/ldap_test_service.py index 59eb0b60c041495333804830b90075ed0d472baa..6cb6921e83745cedcff267a5e139b6669963a7a7 100644 --- a/SAS/TMSS/test/ldap_test_service.py +++ b/SAS/TMSS/test/ldap_test_service.py @@ -5,6 +5,9 @@ logger = logging.getLogger(__name__) logging_already_configured = len(logging.root.handlers)>0 from ldap_test import LdapServer +from ldap_test.server import DEFAULT_GATEWAY_PORT, DEFAULT_PYTHON_PROXY_PORT +from py4j.java_gateway import Py4JNetworkError +from datetime import datetime, timedelta if not logging_already_configured: # the 3rd party ldap_test module erroneously does a logging.basicConfig upon module import... @@ -18,11 +21,13 @@ from optparse import OptionParser from lofar.common.util import waitForInterrupt, find_free_port from lofar.common.testing.dbcredentials import TemporaryCredentials +from lofar.common.locking import NamedAtomicLock class TestLDAPServer(): ''' A helper class which instantiates a running LDAP server (not interfering with any other test/production LDAP servers) Best used in a 'with'-context so the server is stoped automagically. ''' + _named_lock = NamedAtomicLock('TestLDAPServer') def __init__(self, user: str = 'test', password: str = 'test') -> None: self._tmp_creds = TemporaryCredentials(user=user, password=password) @@ -55,61 +60,71 @@ class TestLDAPServer(): '''instantiate the isolated postgres server''' logger.info('creating test-LDAP instance...') - self._tmp_creds.dbcreds.type = 'LDAP' - self._tmp_creds.dbcreds.host = '127.0.0.1' - self._tmp_creds.dbcreds.port = find_free_port() - self._tmp_creds.create() - - logger.info("Using dbcreds '%s' to start and configure LDAP server: %s", - self.dbcreds_id, self.dbcreds.stringWithHiddenPassword()) - - self._server = LdapServer({'port': self.dbcreds.port, - 'base': {'objectclass': ['domain'], - 'dn': 'o=lofar,c=eu', - 'attributes': {'o': 'lofar'}}, - 'entries': [ - {'objectclass': 'organizationUnit', - 'dn': 'ou=Users,o=lofar,c=eu', - 'attributes': {'ou': 'Users'}}, - {'objectclass': 'lofarPerson', - 'dn': 'cn=paulus,ou=users,o=lofar,c=eu', - 'attributes': {'cn': 'paulus', - 'userPassword': 'pauluspass', - 'mail': 'paulus@boskabouter.nl', - 'givenName': 'Paulus', - 'sn': 'Boskabouter', - 'lofarPersonSystemrole': 'cn=support,ou=Roles,o=lofar,c=eu'}}, - {'objectclass': 'lofarPerson', - 'dn': 'cn=paula,ou=users,o=lofar,c=eu', - 'attributes': {'cn': 'paula', - 'userPassword': 'paulapass', - 'mail': 'paulus@boskabouter.nl', - 'givenName': 'Paulus', - 'sn': 'Boskabouter', - 'lofarPersonSystemrole': 'cn=user,ou=Roles,o=lofar,c=eu'}}, - {'objectclass': 'lofarPerson', - 'dn': 'cn=%s,ou=users,o=lofar,c=eu' % self.dbcreds.user, - 'attributes': {'cn': self.dbcreds.user, - 'userPassword': self.dbcreds.password, - 'mail': '%s@lofar.test' % self.dbcreds.user, - 'givenName': self.dbcreds.user, - 'sn': 'lofar_test'}}, - {'objectclass': 'organizationUnit', - 'dn': 'ou=Roles,o=lofar,c=eu', - 'attributes': {'ou': 'Roles'}}, - {'objectclass': 'lofarSystemrole', - 'dn': 'cn=user,ou=roles,o=lofar,c=eu', - 'attributes': {'cn': 'user'}}, - {'objectclass': 'lofarSystemrole', - 'dn': 'cn=support,ou=roles,o=lofar,c=eu', - 'attributes': {'cn': 'support'}}, - ] - }) - - self._server.start() - os.environ["TMSS_LDAPCREDENTIALS"] = self.dbcreds_id - logger.info('LDAP server running and listening on port %s...', self.dbcreds.port) - logger.info('LDAP test user/pass: %s %s...', self.dbcreds.user, self.dbcreds.password) + with self._named_lock: + self._tmp_creds.dbcreds.type = 'LDAP' + self._tmp_creds.dbcreds.host = '127.0.0.1' + self._tmp_creds.dbcreds.port = find_free_port() + self._tmp_creds.create() + + logger.info("Using dbcreds '%s' to start and configure LDAP server: %s", + self.dbcreds_id, self.dbcreds.stringWithHiddenPassword()) + + start_time = datetime.utcnow() + while datetime.utcnow()-start_time < timedelta(minutes=1): + try: + self._server = LdapServer(java_gateway_port=find_free_port(DEFAULT_GATEWAY_PORT), + python_proxy_port=find_free_port(DEFAULT_PYTHON_PROXY_PORT), + config={'port': self.dbcreds.port, + 'base': {'objectclass': ['domain'], + 'dn': 'o=lofar,c=eu', + 'attributes': {'o': 'lofar'}}, + 'entries': [ + {'objectclass': 'organizationUnit', + 'dn': 'ou=Users,o=lofar,c=eu', + 'attributes': {'ou': 'Users'}}, + {'objectclass': 'lofarPerson', + 'dn': 'cn=paulus,ou=users,o=lofar,c=eu', + 'attributes': {'cn': 'paulus', + 'userPassword': 'pauluspass', + 'mail': 'paulus@boskabouter.nl', + 'givenName': 'Paulus', + 'sn': 'Boskabouter', + 'lofarPersonSystemrole': 'cn=support,ou=Roles,o=lofar,c=eu'}}, + {'objectclass': 'lofarPerson', + 'dn': 'cn=paula,ou=users,o=lofar,c=eu', + 'attributes': {'cn': 'paula', + 'userPassword': 'paulapass', + 'mail': 'paulus@boskabouter.nl', + 'givenName': 'Paulus', + 'sn': 'Boskabouter', + 'lofarPersonSystemrole': 'cn=user,ou=Roles,o=lofar,c=eu'}}, + {'objectclass': 'lofarPerson', + 'dn': 'cn=%s,ou=users,o=lofar,c=eu' % self.dbcreds.user, + 'attributes': {'cn': self.dbcreds.user, + 'userPassword': self.dbcreds.password, + 'mail': '%s@lofar.test' % self.dbcreds.user, + 'givenName': self.dbcreds.user, + 'sn': 'lofar_test'}}, + {'objectclass': 'organizationUnit', + 'dn': 'ou=Roles,o=lofar,c=eu', + 'attributes': {'ou': 'Roles'}}, + {'objectclass': 'lofarSystemrole', + 'dn': 'cn=user,ou=roles,o=lofar,c=eu', + 'attributes': {'cn': 'user'}}, + {'objectclass': 'lofarSystemrole', + 'dn': 'cn=support,ou=roles,o=lofar,c=eu', + 'attributes': {'cn': 'support'}}, + ] + }) + + self._server.start() + os.environ["TMSS_LDAPCREDENTIALS"] = self.dbcreds_id + logger.info('LDAP server running and listening on port %s...', self.dbcreds.port) + logger.info('LDAP test user/pass: %s %s...', self.dbcreds.user, self.dbcreds.password) + return + except Py4JNetworkError as e: + logger.warning("TestLDAPServer could not be started, retrying with next free port. Error: %s", e) + raise TimeoutError("%s could not be started within 60 seconds. bailing out..." % self.__class__.__name__) def stop(self): '''stop the running postgres server''' diff --git a/SAS/TMSS/test/t_parset_adapter.py b/SAS/TMSS/test/t_parset_adapter.py index d68d9668b2760c835854e6ae6da981ad979a7657..5d1c905240233b742f8cfab22fe7488c9e09d750 100755 --- a/SAS/TMSS/test/t_parset_adapter.py +++ b/SAS/TMSS/test/t_parset_adapter.py @@ -44,11 +44,11 @@ from lofar.common.json_utils import get_default_json_object_for_schema class ParsetAdapterTest(unittest.TestCase): def test_01(self): - subtask_template = models.SubtaskTemplate.objects.get(name='obscontrol schema') + subtask_template = models.SubtaskTemplate.objects.get(name='observationcontrol schema') specifications_doc = get_default_json_object_for_schema(subtask_template.schema) for dp in specifications_doc['stations']['digital_pointings']: dp['subbands'] = list(range(8)) - subtask_data = Subtask_test_data(subtask_template, specifications_doc) + subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc) subtask:models.Subtask = models.Subtask.objects.create(**subtask_data) subtask_output = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=subtask)) dataproduct:models.Dataproduct = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=subtask_output)) diff --git a/SAS/TMSS/test/t_specify_observation.py b/SAS/TMSS/test/t_specify_observation.py new file mode 100755 index 0000000000000000000000000000000000000000..f80b138af737d0b6d9ef852e24c4342470aeeaa1 --- /dev/null +++ b/SAS/TMSS/test/t_specify_observation.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2018 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id: $ + +import os +import unittest +import requests + +import logging +logger = logging.getLogger(__name__) +logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) + +# Do Mandatory setup step: +# use setup/teardown magic for tmss test database, ldap server and django server +# (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_test_environment_unittest_setup module) +from lofar.sas.tmss.test.tmss_test_environment_unittest_setup import * + +from lofar.sas.tmss.test.tmss_test_data_django_models import * + +# import and setup rest test data creator +from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator +rest_data_creator = TMSSRESTTestDataCreator(BASE_URL, AUTH) + +from lofar.sas.tmss.tmss.tmssapp import models +from lofar.sas.tmss.tmss.tmssapp.tasks import create_task_blueprint_from_task_draft_and_instantiate_subtasks_from_template + + +class SpecifyObservationFromTaskDraftTest(unittest.TestCase): + def test_create_task_blueprint(self): + """ + Use the 'default' task draft (ID=1) to specify observation + Check if the task draft name is equal to the task draft name specified in the created task blueprint + Check with REST-call if 4 subtasks are created and if these subtaskshave state value 'defined' + """ + task_draft = models.TaskDraft.objects.get(id=1) + res_task_draft = GET_and_assert_equal_expected_code(self, BASE_URL + '/task_draft/1/', 200) + task_blueprint = create_task_blueprint_from_task_draft_and_instantiate_subtasks_from_template(task_draft) + self.assertEqual(task_draft.name, task_blueprint.draft.name) + res_task_blueprint = GET_and_assert_equal_expected_code(self, BASE_URL + '/task_blueprint/1/', 200) + self.assertEqual(len(res_task_blueprint['subtasks']), 4) + self.assertEqual(res_task_blueprint['specifications_template'], res_task_draft['specifications_template']) + for subtask_url in res_task_blueprint['subtasks']: + res_subtask = GET_and_assert_equal_expected_code(self, subtask_url, 200) + state_value = GET_and_assert_equal_expected_code(self, res_subtask['state'], 200)['value'] + self.assertEqual(state_value, "defined") + + +if __name__ == "__main__": + os.environ['TZ'] = 'UTC' + unittest.main() diff --git a/SAS/TMSS/test/t_specify_observation.run b/SAS/TMSS/test/t_specify_observation.run new file mode 100755 index 0000000000000000000000000000000000000000..d563b37623a3f667cb891d7872bd230ed2d88f6e --- /dev/null +++ b/SAS/TMSS/test/t_specify_observation.run @@ -0,0 +1,6 @@ +#!/bin/bash + +# Run the unit test +source python-coverage.sh +python_coverage_test "*tmss*" t_specify_observation.py + diff --git a/SAS/TMSS/test/t_specify_observation.sh b/SAS/TMSS/test/t_specify_observation.sh new file mode 100755 index 0000000000000000000000000000000000000000..dd467716958fac3d617aca0642fd6dff0daee501 --- /dev/null +++ b/SAS/TMSS/test/t_specify_observation.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +./runctest.sh t_specify_observation \ No newline at end of file diff --git a/SAS/TMSS/test/t_subtasks.py b/SAS/TMSS/test/t_subtasks.py new file mode 100755 index 0000000000000000000000000000000000000000..2983af55885bbc0c862da3ab094c1cc7c81a613d --- /dev/null +++ b/SAS/TMSS/test/t_subtasks.py @@ -0,0 +1,93 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2018 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id: $ + +import os +import unittest + +import logging +logger = logging.getLogger(__name__) +logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) + +# Do Mandatory setup step: +# use setup/teardown magic for tmss test database, ldap server and django server +# (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_test_environment_unittest_setup module) +from lofar.sas.tmss.test.tmss_test_environment_unittest_setup import * + + +from lofar.sas.tmss.test.tmss_test_data_django_models import * + +# import and setup rest test data creator +from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator + +from lofar.sas.tmss.tmss.tmssapp import models + +from lofar.sas.tmss.tmss.tmssapp.subtasks import connect_observation_subtask_to_preprocessing_subtask + +class SubtasksTest(unittest.TestCase): + + @staticmethod + def create_subtask_template(template_type: object): + subtask_template_data = SubtaskTemplate_test_data() + subtask_template_data['type'] = template_type + return models.SubtaskTemplate.objects.create(**subtask_template_data) + + @staticmethod + def create_subtask(template_type: object): + subtask_template = SubtasksTest.create_subtask_template(template_type) + subtask_data = Subtask_test_data(subtask_template=subtask_template) + return models.Subtask.objects.create(**subtask_data) + + def test_connect_observation_to_preprocessing_fails_on_wrong_subtask_type(self): + subtask_1 = self.create_subtask(models.SubtaskType.objects.get(value='observation')) + subtask_2 = self.create_subtask(models.SubtaskType.objects.get(value='observation')) + with self.assertRaises(ValueError): + connect_observation_subtask_to_preprocessing_subtask(subtask_1, subtask_2) + + def test_connect_observation_to_preprocessing_succeeds_on_correct_subtask_type(self): + subtask_1 = self.create_subtask(models.SubtaskType.objects.get(value='observation')) + subtask_2 = self.create_subtask(models.SubtaskType.objects.get(value='pipeline')) + subtaskoutput = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=subtask_1)) + models.Dataproduct.objects.create(**Dataproduct_test_data(producer=subtaskoutput)) + connect_observation_subtask_to_preprocessing_subtask(subtask_1, subtask_2) + + def test_connect_observation_to_preprocessing_produces_correct_dataproducts(self): + subtask_1 = self.create_subtask(models.SubtaskType.objects.get(value='observation')) + subtask_2 = self.create_subtask(models.SubtaskType.objects.get(value='pipeline')) + subtaskoutput = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=subtask_1)) + dataproducts = [] + for f_in in ['whatever.ms', 'L1234_SB001.ms', 'L1234__SB002_XYZ.ms']: + dataproducts.append(models.Dataproduct.objects.create(**Dataproduct_test_data(producer=subtaskoutput, + filename=f_in))) + connect_observation_subtask_to_preprocessing_subtask(subtask_1, subtask_2) + + # check that observation output dataproducts are input to pipeline + for dp in dataproducts: + self.assertTrue(dp in subtask_2.inputs.first().dataproducts.all()) + + # check that pipeline output dataproducts have appropriate names + out_filenames = [dataproduct.filename for dataproduct in subtask_2.outputs.first().dataproducts.all()] + for f_out in ['L%s_whatever.ms' % subtask_2.pk, 'L%s_SB001.ms' % subtask_2.pk, 'L%s__SB002_XYZ.ms' % subtask_2.pk]: + self.assertTrue(f_out in out_filenames) + + +if __name__ == "__main__": + os.environ['TZ'] = 'UTC' + unittest.main() diff --git a/SAS/TMSS/test/t_subtasks.run b/SAS/TMSS/test/t_subtasks.run new file mode 100755 index 0000000000000000000000000000000000000000..a93185cbf4ee42f7f02d8d39709bd41c437ab679 --- /dev/null +++ b/SAS/TMSS/test/t_subtasks.run @@ -0,0 +1,6 @@ +#!/bin/bash + +# Run the unit test +source python-coverage.sh +python_coverage_test "*tmss*" t_subtasks.py + diff --git a/SAS/TMSS/test/t_subtasks.sh b/SAS/TMSS/test/t_subtasks.sh new file mode 100755 index 0000000000000000000000000000000000000000..1afbe68fd0c6a7b6acdea0dedf2ff20bdec8baf4 --- /dev/null +++ b/SAS/TMSS/test/t_subtasks.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +./runctest.sh t_subtasks \ No newline at end of file diff --git a/SAS/TMSS/test/t_tmss_session_auth.py b/SAS/TMSS/test/t_tmss_session_auth.py index 644f517c05dfa8426dfcaa7eccbf2b8b000bb541..423a1cdf048041efa12c6ecdd886fd1d6f0134dd 100755 --- a/SAS/TMSS/test/t_tmss_session_auth.py +++ b/SAS/TMSS/test/t_tmss_session_auth.py @@ -38,7 +38,7 @@ from lofar.sas.tmss.test.tmss_test_environment_unittest_setup import * from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator test_data_creator = TMSSRESTTestDataCreator(BASE_URL, AUTH) -from lofar.sas.tmss.util import TMSSsession +from lofar.sas.tmss.client.tmss_http_rest_client import TMSSsession from lofar.common.test_utils import integration_test @@ -97,7 +97,7 @@ class OIDCSession(unittest.TestCase): @integration_test def test_success_using_correct_credentials(self): - with TMSSsession(AUTH.username, AUTH.password, BASE_URL.replace('/api', '')) as session: + with TMSSsession(AUTH.username, AUTH.password, BASE_URL.replace('/api', '')).session as session: r = session.get(BASE_URL + '/task_draft/?format=api') self.assertEqual(r.status_code, 200) self.assertTrue("Task Draft List" in r.content.decode('utf8')) diff --git a/SAS/TMSS/test/t_tmss_test_database.py b/SAS/TMSS/test/t_tmss_test_database.py index 3e99b742d72f12c0f4456f6191aff3f05b7153df..708dcd2f4724181214093099fbc1cf96a3f883b5 100755 --- a/SAS/TMSS/test/t_tmss_test_database.py +++ b/SAS/TMSS/test/t_tmss_test_database.py @@ -24,7 +24,7 @@ import unittest import logging from datetime import datetime -logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.DEBUG) +logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) from lofar.common.postgres import PostgresDatabaseConnection, FETCH_ONE from lofar.sas.tmss.test.test_utils import TMSSPostgresTestMixin diff --git a/SAS/TMSS/test/t_tmssapp_scheduling_REST_API.py b/SAS/TMSS/test/t_tmssapp_scheduling_REST_API.py index 1b7495c610e311eeea6f23b243bd6cf8688c5494..611b2ea8abcb98dc07d2092deb91db1dbcdf99c2 100755 --- a/SAS/TMSS/test/t_tmssapp_scheduling_REST_API.py +++ b/SAS/TMSS/test/t_tmssapp_scheduling_REST_API.py @@ -33,6 +33,10 @@ import logging logger = logging.getLogger(__name__) logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) +from lofar.common.test_utils import skip_integration_tests +if skip_integration_tests(): + exit(3) + # Do Mandatory setup step: # use setup/teardown magic for tmss test database, ldap server and django server # (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_test_environment_unittest_setup module) @@ -288,6 +292,14 @@ class DefaultSubtaskTemplatesTestCase(unittest.TestCase): class SubtaskTestCase(unittest.TestCase): @classmethod def setUpClass(cls) -> None: + # we should not depend on "previous" data + models.SubtaskInput.objects.all().delete() + models.DataproductHash.objects.all().delete() + models.DataproductArchiveInfo.objects.all().delete() + models.DataproductTransform.objects.all().delete() + models.Dataproduct.objects.all().delete() + models.Subtask.objects.all().delete() + cls.cluster_url = test_data_creator.post_data_and_get_url(test_data_creator.Cluster(), '/cluster/') cls.task_blueprint_data = test_data_creator.TaskBlueprint() cls.task_blueprint_url = test_data_creator.post_data_and_get_url(cls.task_blueprint_data, '/task_blueprint/') @@ -436,26 +448,14 @@ class SubtaskTestCase(unittest.TestCase): # setup test_data_1 = Subtask_test_data() - test_data_2 = Subtask_test_data() tbt_test_data_1 = TaskBlueprint_test_data("task blue print one") - tbt_test_data_2 = TaskBlueprint_test_data("task blue print two") task_blueprint_1 = models.TaskBlueprint.objects.create(**tbt_test_data_1) - task_blueprint_2 = models.TaskBlueprint.objects.create(**tbt_test_data_2) test_data_1 = dict(test_data_1) test_data_1['task_blueprint'] = task_blueprint_1 subtask_1 = models.Subtask.objects.create(**test_data_1) - test_data_2 = dict(test_data_2) - test_data_2['task_blueprint'] = task_blueprint_2 - subtask_2 = models.Subtask.objects.create(**test_data_2) # assert the returned list contains related items, a list of length 1 is retrieved - GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_blueprint/%s/subtask/' % task_blueprint_2.id, test_data_2, 1) - # assert an existing related item is returned - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_blueprint/%s/subtask/%s/' % - (task_blueprint_2.id, subtask_2.id), test_data_2) - # assert an existing unrelated item is not returned - GET_and_assert_equal_expected_code(self, - BASE_URL + '/task_blueprint/%s/subtask/%s/' % (task_blueprint_2.id, subtask_1.id), 404) + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_blueprint/%s/subtask/' % task_blueprint_1.id, test_data_1, 1) def test_subtask_state_log_records(self): st_test_data = test_data_creator.Subtask() @@ -606,145 +606,6 @@ class DataproductTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/dataproduct/%s/' % id2, test_data_2) -class SubtaskConnectorTestCase(unittest.TestCase): - def test_subtask_connector_list_apiformat(self): - r = requests.get(BASE_URL + '/subtask_connector/?format=api', auth=AUTH) - self.assertEqual(r.status_code, 200) - self.assertTrue("Subtask Connector List" in r.content.decode('utf8')) - - def test_subtask_connector_GET_nonexistant_raises_error(self): - GET_and_assert_equal_expected_code(self, BASE_URL + '/subtask_connector/1234321/', 404) - - def test_subtask_connector_POST_and_GET(self): - stc_test_data = test_data_creator.SubtaskConnector() - - # POST and GET a new item and assert correctness - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_connector/', stc_test_data, 201, stc_test_data) - url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, stc_test_data) - - def test_subtask_connector_PUT_invalid_raises_error(self): - stc_test_data = test_data_creator.SubtaskConnector() - - PUT_and_assert_expected_response(self, BASE_URL + '/subtask_connector/9876789876/', stc_test_data, 404, {}) - - def test_subtask_connector_PUT(self): - stc_test_data = test_data_creator.SubtaskConnector() - stc_test_data2 = test_data_creator.SubtaskConnector() - - # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_connector/', stc_test_data, 201, stc_test_data) - url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, stc_test_data) - - # PUT new values, verify - PUT_and_assert_expected_response(self, url, stc_test_data2, 200, stc_test_data2) - GET_OK_and_assert_equal_expected_response(self, url, stc_test_data2) - - def test_subtask_connector_PATCH(self): - stc_test_data = test_data_creator.SubtaskConnector() - - # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_connector/', stc_test_data, 201, stc_test_data) - url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, stc_test_data) - - test_patch = {"role": BASE_URL + '/role/calibrator/', - "datatype": BASE_URL + '/datatype/quality/', } - - # PATCH item and verify - PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) - expected_data = dict(stc_test_data) - expected_data.update(test_patch) - GET_OK_and_assert_equal_expected_response(self, url, expected_data) - - def test_subtask_connector_DELETE(self): - stc_test_data = test_data_creator.SubtaskConnector() - - # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_connector/', stc_test_data, 201, stc_test_data) - url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, stc_test_data) - - # DELETE and check it's gone - DELETE_and_assert_gone(self, url) - - def test_subtask_connector_PROTECT_behavior_on_role_deleted(self): - stc_test_data = test_data_creator.SubtaskConnector() - - # create dependency that is safe to delete (enums are not populated / re-established between tests) - role_data = {'value': 'kickme'} - POST_and_assert_expected_response(self, BASE_URL + '/role/', role_data, 201, role_data) - role_url = BASE_URL + '/role/kickme/' - - - # POST new item and verify - test_data = dict(stc_test_data) - test_data['role'] = role_url - url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_connector/', test_data, 201, test_data)['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data) - - # Try to DELETE dependency, verify that was not successful - # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... - response = requests.delete(role_url, auth=AUTH) - self.assertEqual(500, response.status_code) - self.assertTrue("ProtectedError" in str(response.content)) - GET_OK_and_assert_equal_expected_response(self, role_url, role_data) - - def test_subtask_connector_PROTECT_behavior_on_datatype_deleted(self): - stc_test_data = test_data_creator.SubtaskConnector() - - # create new dependency that is safe to delete (enums are not populated / re-established between tests) - datatype_data = {'value': 'kickme'} - POST_and_assert_expected_response(self, BASE_URL + '/datatype/', datatype_data, 201, datatype_data) - datatype_url = BASE_URL + '/datatype/kickme/' - - # POST new item and verify - test_data = dict(stc_test_data) - test_data['datatype'] = datatype_url - url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_connector/', test_data, 201, test_data)['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data) - - # Try to DELETE dependency, verify that was not successful - # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... - response = requests.delete(datatype_url, auth=AUTH) - self.assertEqual(500, response.status_code) - self.assertTrue("ProtectedError" in str(response.content)) - GET_OK_and_assert_equal_expected_response(self, datatype_url, datatype_data) - - def test_GET_SubtaskConnector_list_view_shows_entry(self): - - test_data_1 = SubtaskConnector_test_data() - models.SubtaskConnector.objects.create(**test_data_1) - nbr_results = models.SubtaskConnector.objects.count() - GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/subtask_connector/', test_data_1, nbr_results) - - def test_GET_SubtaskConnector_view_returns_correct_entry(self): - - # setup - test_data_1 = SubtaskConnector_test_data() - test_data_2 = SubtaskConnector_test_data() - id1 = models.SubtaskConnector.objects.create(**test_data_1).id - id2 = models.SubtaskConnector.objects.create(**test_data_2).id - # assert - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask_connector/%s/' % id1, test_data_1) - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask_connector/%s/' % id2, test_data_2) - - def test_SubtaskConnector_allows_setting_dataformats(self): - """ - Other then through the API view, we cannot assign ManyToMany on creation, but have to set it later - """ - test_data_1 = dict(SubtaskConnector_test_data()) - test_data_1['inputs'] = None - test_data_2 = SubtaskConnector_test_data() - tior = models.SubtaskConnector.objects.create(**test_data_2) - tior.dataformats.set([models.Dataformat.objects.get(value='Beamformed'), - models.Dataformat.objects.get(value='MeasurementSet')]) - tior.save() - # assert - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask_connector/%s' % tior.id, test_data_2) - - class SubtaskInputTestCase(unittest.TestCase): @classmethod def setUpClass(cls) -> None: @@ -753,7 +614,6 @@ class SubtaskInputTestCase(unittest.TestCase): cls.task_relation_blueprint_data = test_data_creator.TaskRelationBlueprint() cls.task_relation_blueprint_url = test_data_creator.post_data_and_get_url(cls.task_relation_blueprint_data, '/task_relation_blueprint/') cls.dataproduct_urls = [test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(), '/dataproduct/'), test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(), '/dataproduct/')] - cls.subtask_connector_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskConnector(), '/subtask_connector/') cls.subtask_output_data = test_data_creator.SubtaskOutput() cls.subtask_output_url = test_data_creator.post_data_and_get_url(cls.subtask_output_data, '/subtask_output/') cls.subtask_input_selection_template_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskInputSelectionTemplate(), '/subtask_input_selection_template/') @@ -767,7 +627,7 @@ class SubtaskInputTestCase(unittest.TestCase): GET_and_assert_equal_expected_code(self, BASE_URL + '/subtask_input/1234321/', 404) def test_subtask_input_POST_and_GET(self): - sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) + sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data) @@ -775,12 +635,12 @@ class SubtaskInputTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, sti_test_data) def test_subtask_input_PUT_invalid_raises_error(self): - sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) + sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) PUT_and_assert_expected_response(self, BASE_URL + '/subtask_input/9876789876/', sti_test_data, 404, {}) def test_subtask_input_PUT(self): - sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) + sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data) @@ -788,12 +648,12 @@ class SubtaskInputTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, sti_test_data) # PUT new values, verify - sti_test_data2 = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) + sti_test_data2 = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) PUT_and_assert_expected_response(self, url, sti_test_data2, 200, sti_test_data2) GET_OK_and_assert_equal_expected_response(self, url, sti_test_data2) def test_subtask_input_PATCH(self): - sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) + sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data) @@ -816,7 +676,7 @@ class SubtaskInputTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_subtask_input_DELETE(self): - sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) + sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data) @@ -832,7 +692,7 @@ class SubtaskInputTestCase(unittest.TestCase): task_blueprint_url=self.subtask_data['task_blueprint'], specifications_template_url=self.subtask_data['specifications_template'], specifications_doc=self.subtask_data['specifications_doc']), '/subtask/') - sti_test_data = test_data_creator.SubtaskInput(subtask_url=subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) + sti_test_data = test_data_creator.SubtaskInput(subtask_url=subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) # POST new item, verify url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url'] @@ -844,28 +704,12 @@ class SubtaskInputTestCase(unittest.TestCase): # assert item gone GET_and_assert_equal_expected_code(self, url, 404) - def test_subtask_input_SET_NULL_behavior_on_connector_deleted(self): - subtask_connector_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskConnector(), '/subtask_connector/') - sti_test_data = test_data_creator.SubtaskInput(subtask_connector_url=subtask_connector_url, subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) - - # POST new item, verify - url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url'] - GET_OK_and_assert_equal_expected_response(self, url, sti_test_data) - - # DELETE dependency and check it's gone - DELETE_and_assert_gone(self, subtask_connector_url) - - # assert item reference is set null - expected_data = dict(sti_test_data) - expected_data['connector'] = None - GET_OK_and_assert_equal_expected_response(self, url, expected_data) - def test_subtask_input_SET_NULL_behavior_on_task_relation_blueprint_deleted(self): # make new task_relation_blueprint instance, but reuse related data for speed task_relation_blueprint_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskRelationBlueprint(draft_url=self.task_relation_blueprint_data['draft'], template_url=self.task_relation_blueprint_data['selection_template'], input_url=self.task_relation_blueprint_data['input'], output_url=self.task_relation_blueprint_data['output'], consumer_url=self.task_relation_blueprint_data['consumer'], producer_url=self.task_relation_blueprint_data['producer']), '/task_relation_blueprint/') - sti_test_data = test_data_creator.SubtaskInput(task_relation_blueprint_url=task_relation_blueprint_url, subtask_url=self.subtask_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) + sti_test_data = test_data_creator.SubtaskInput(task_relation_blueprint_url=task_relation_blueprint_url, subtask_url=self.subtask_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) # POST new item, verify url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url'] @@ -881,8 +725,8 @@ class SubtaskInputTestCase(unittest.TestCase): def test_subtask_input_PROTECT_behavior_on_producer_deleted(self): # make new subtask_output_url instance, but reuse related data for speed - subtask_output_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=self.subtask_output_data['subtask'], subtask_connector_url=self.subtask_output_data['connector']), '/subtask_output/') - sti_test_data = test_data_creator.SubtaskInput(subtask_output_url=subtask_output_url, subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) + subtask_output_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=self.subtask_output_data['subtask']), '/subtask_output/') + sti_test_data = test_data_creator.SubtaskInput(subtask_output_url=subtask_output_url, subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_input_selection_template_url=self.subtask_input_selection_template_url) # POST with dependency url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url'] @@ -901,7 +745,6 @@ class SubtaskInputTestCase(unittest.TestCase): subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, - subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url) # POST with dependency @@ -952,8 +795,6 @@ class SubtaskOutputTestCase(unittest.TestCase): def setUpClass(cls) -> None: cls.subtask_data = test_data_creator.Subtask() cls.subtask_url = test_data_creator.post_data_and_get_url(cls.subtask_data, '/subtask/') - cls.subtask_connector_data = test_data_creator.SubtaskConnector() - cls.subtask_connector_url = test_data_creator.post_data_and_get_url(cls.subtask_connector_data, '/subtask_connector/') def test_subtask_output_list_apiformat(self): r = requests.get(BASE_URL + '/subtask_output/?format=api', auth=AUTH) @@ -964,7 +805,7 @@ class SubtaskOutputTestCase(unittest.TestCase): GET_and_assert_equal_expected_code(self, BASE_URL + '/subtask_output/1234321/', 404) def test_subtask_output_POST_and_GET(self): - sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url) + sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url) # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201, @@ -973,12 +814,12 @@ class SubtaskOutputTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, sto_test_data) def test_subtask_output_PUT_invalid_raises_error(self): - sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url) + sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url) PUT_and_assert_expected_response(self, BASE_URL + '/subtask_output/9876789876/', sto_test_data, 404, {}) def test_subtask_output_PUT(self): - sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url) - sto_test_data2 = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url) + sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url) + sto_test_data2 = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url) # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201,sto_test_data) @@ -990,8 +831,8 @@ class SubtaskOutputTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, sto_test_data2) def test_subtask_output_PATCH(self): - sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url) - sto_test_data2 = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url) + sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url) + sto_test_data2 = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url) # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201, @@ -1009,7 +850,7 @@ class SubtaskOutputTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_subtask_output_DELETE(self): - sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url) + sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url) # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201, @@ -1023,7 +864,7 @@ class SubtaskOutputTestCase(unittest.TestCase): def test_subtask_output_CASCADE_behavior_on_subtask_deleted(self): # make new subtask_url instance, but reuse related data for speed subtask_url = test_data_creator.post_data_and_get_url(self.subtask_data, '/subtask/') - sto_test_data = test_data_creator.SubtaskOutput(subtask_url=subtask_url, subtask_connector_url=self.subtask_connector_url) + sto_test_data = test_data_creator.SubtaskOutput(subtask_url=subtask_url) # POST new item, verify url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201, sto_test_data)['url'] @@ -1035,23 +876,6 @@ class SubtaskOutputTestCase(unittest.TestCase): # assert item gone GET_and_assert_equal_expected_code(self, url, 404) - def test_subtask_output_SET_NULL_behavior_on_connector_deleted(self): - sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url) - - # POST new item, verify - url = \ - POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201, sto_test_data)[ - 'url'] - GET_OK_and_assert_equal_expected_response(self, url, sto_test_data) - - # DELETE dependency and check it's gone - DELETE_and_assert_gone(self, sto_test_data['connector']) - - # assert item reference is set null - expected_data = dict(sto_test_data) - expected_data['connector'] = None - GET_OK_and_assert_equal_expected_response(self, url, expected_data) - def test_GET_SubtaskOutput_list_view_shows_entry(self): test_data_1 = SubtaskOutput_test_data() @@ -1718,7 +1542,7 @@ class DataproductArchiveInfoTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/dataproduct_archive_info/%s/' % id2, test_data_2) -class SubtaskQuery(unittest.TestCase): +class SubtaskQueryTestCase(unittest.TestCase): """ Test queries on the subtask REST api: - query cluster only @@ -1758,7 +1582,7 @@ class SubtaskQuery(unittest.TestCase): Create multiple subtasks for a given number of days with start_time 2 hours from now and stop_time 4 hours from now """ - cluster = SubtaskQuery.create_cluster(cluster_name) + cluster = SubtaskQueryTestCase.create_cluster(cluster_name) for day_idx in range(0, total_number): start_time = datetime.now() + timedelta(hours=2, days=day_idx) stop_time = datetime.now() + timedelta(hours=4, days=day_idx) @@ -1777,11 +1601,19 @@ class SubtaskQuery(unittest.TestCase): clusterB 50 subtasks with start 2hr and stop time 4hr from now, recurring 'every day' clusterC 30 subtasks with start 2hr and stop time 4hr from now, recurring 'every day' """ - cluster = SubtaskQuery.create_cluster("clusterA") + # we're counting (filtered) subtasks, so we should not depend on "previous" data + models.SubtaskInput.objects.all().delete() + models.DataproductHash.objects.all().delete() + models.DataproductArchiveInfo.objects.all().delete() + models.DataproductTransform.objects.all().delete() + models.Dataproduct.objects.all().delete() + models.Subtask.objects.all().delete() + + cluster = SubtaskQueryTestCase.create_cluster("clusterA") subtask_data = Subtask_test_data(cluster=cluster) models.Subtask.objects.create(**subtask_data) - for cluster_name, period_length_in_days in SubtaskQuery.subtasks_test_data_with_start_stop_time.items(): - SubtaskQuery.create_multiple_subtask_object(period_length_in_days, cluster_name) + for cluster_name, period_length_in_days in SubtaskQueryTestCase.subtasks_test_data_with_start_stop_time.items(): + SubtaskQueryTestCase.create_multiple_subtask_object(period_length_in_days, cluster_name) def test_query_cluster_only(self): @@ -1792,7 +1624,7 @@ class SubtaskQuery(unittest.TestCase): response = requests.get(BASE_URL + '/subtask/?cluster__name=clusterA', auth=AUTH) self.check_response_OK_and_result_count(response, 1) - for cluster_name, period_length_in_days in SubtaskQuery.subtasks_test_data_with_start_stop_time.items(): + for cluster_name, period_length_in_days in SubtaskQueryTestCase.subtasks_test_data_with_start_stop_time.items(): logger.info("Check query on %s" % cluster_name) response = requests.get(BASE_URL + '/subtask/?cluster__name=%s' % cluster_name, auth=AUTH) self.check_response_OK_and_result_count(response, period_length_in_days) @@ -1802,7 +1634,7 @@ class SubtaskQuery(unittest.TestCase): Check if I can query on the start and stop time and cluster name (B and C) over a period Check status code and response length """ - for cluster_name, period_length_in_days in SubtaskQuery.subtasks_test_data_with_start_stop_time.items(): + for cluster_name, period_length_in_days in SubtaskQueryTestCase.subtasks_test_data_with_start_stop_time.items(): start_time = datetime.now() stop_time = start_time + timedelta(days=period_length_in_days) expected_count = period_length_in_days @@ -1857,7 +1689,7 @@ class SubtaskQuery(unittest.TestCase): Check if I can query on the start time and cluster name (B and C) over a period Check status code and response length """ - for cluster_name, period_length_in_days in SubtaskQuery.subtasks_test_data_with_start_stop_time.items(): + for cluster_name, period_length_in_days in SubtaskQueryTestCase.subtasks_test_data_with_start_stop_time.items(): start_time = datetime.now() expected_count = period_length_in_days logger.info("Check query greater than start_time (%s) for %s " % @@ -1880,7 +1712,7 @@ class SubtaskQuery(unittest.TestCase): Check if I can query on the stop time and cluster name (B and C) over a period Check status code and response length """ - for cluster_name, period_length_in_days in SubtaskQuery.subtasks_test_data_with_start_stop_time.items(): + for cluster_name, period_length_in_days in SubtaskQueryTestCase.subtasks_test_data_with_start_stop_time.items(): stop_time = datetime.now() + timedelta(days=period_length_in_days) logger.info("Check query less than stop_time (%s) for %s " % (formatDatetime(stop_time), cluster_name)) @@ -1910,7 +1742,7 @@ class SubtaskQuery(unittest.TestCase): self.check_response_OK_and_result_count(response, 0) # Check how many is 'ALL' - total_subtasks = SubtaskQuery.get_total_number_of_subtasks() + total_subtasks = SubtaskQueryTestCase.get_total_number_of_subtasks() response = requests.get(BASE_URL + '/subtask/?cluster__error_in_query=clusterA', auth=AUTH) self.check_response_OK_and_result_count(response, total_subtasks) @@ -1934,11 +1766,12 @@ class SubtaskQuery(unittest.TestCase): def test_query_state_only(self): """ Check the query on state value. Check status code and response length - All states are scheduling, None are defined + All states are defining (by default), None are defined """ - logger.info("Check query on state scheduling") - response = requests.get(BASE_URL + '/subtask/?state__value=scheduling', auth=AUTH) - self.check_response_OK_and_result_count(response, SubtaskQuery.get_total_number_of_subtasks()) + logger.info("Check query on state defining") + total_number_of_subtasks = SubtaskQueryTestCase.get_total_number_of_subtasks() + response = requests.get(BASE_URL + '/subtask/?state__value=defining', auth=AUTH) + self.check_response_OK_and_result_count(response, total_number_of_subtasks) response = requests.get(BASE_URL + '/subtask/?state__value=defined', auth=AUTH) self.check_response_OK_and_result_count(response, 0) @@ -1953,7 +1786,7 @@ class SubtaskQuery(unittest.TestCase): """ logger.info("Check query on ordering ascending start time") response = requests.get(BASE_URL + '/subtask/?ordering=start_time', auth=AUTH) - self.check_response_OK_and_result_count(response, SubtaskQuery.get_total_number_of_subtasks()) + self.check_response_OK_and_result_count(response, SubtaskQueryTestCase.get_total_number_of_subtasks()) previous_start_time = "2000-01-01T00:00:00" for item in response.json().get('results'): start_time = item['start_time'] @@ -1963,7 +1796,7 @@ class SubtaskQuery(unittest.TestCase): logger.info("Check query on ordering descending start time") response = requests.get(BASE_URL + '/subtask/?ordering=-start_time', auth=AUTH) - self.check_response_OK_and_result_count(response, SubtaskQuery.get_total_number_of_subtasks()) + self.check_response_OK_and_result_count(response, SubtaskQueryTestCase.get_total_number_of_subtasks()) previous_start_time = "2100-01-01T00:00:00" for item in response.json().get('results'): start_time = item['start_time'] diff --git a/SAS/TMSS/test/t_tmssapp_scheduling_django_API.py b/SAS/TMSS/test/t_tmssapp_scheduling_django_API.py index 9fa9a987f1e380d231159f80a43897d0c6435be9..6260ec3fbbb7aee89671a9990c2a48db2cfa8581 100755 --- a/SAS/TMSS/test/t_tmssapp_scheduling_django_API.py +++ b/SAS/TMSS/test/t_tmssapp_scheduling_django_API.py @@ -26,7 +26,7 @@ from datetime import datetime import logging logger = logging.getLogger(__name__) -logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.DEBUG) +logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) # todo: Tags? -> Decide how to deal with them first. @@ -211,6 +211,60 @@ class SubtaskTest(unittest.TestCase): with self.assertRaises(IntegrityError): models.Subtask.objects.create(**test_data) + def test_Subtask_predecessors_and_successors_none(self): + subtask1:models.Subtask = models.Subtask.objects.create(**Subtask_test_data()) + subtask2:models.Subtask = models.Subtask.objects.create(**Subtask_test_data()) + + self.assertEqual(set(), set(subtask1.predecessors.all())) + self.assertEqual(set(), set(subtask2.predecessors.all())) + self.assertEqual(set(), set(subtask1.successors.all())) + self.assertEqual(set(), set(subtask2.successors.all())) + + def test_Subtask_predecessors_and_successors_simple(self): + subtask1:models.Subtask = models.Subtask.objects.create(**Subtask_test_data()) + subtask2:models.Subtask = models.Subtask.objects.create(**Subtask_test_data()) + + output1 = models.SubtaskOutput.objects.create(subtask=subtask1) + models.SubtaskInput.objects.create(**SubtaskInput_test_data(subtask=subtask2, producer=output1)) + + self.assertEqual(subtask1, subtask2.predecessors.all()[0]) + self.assertEqual(subtask2, subtask1.successors.all()[0]) + + def test_Subtask_predecessors_and_successors_complex(self): + subtask1:models.Subtask = models.Subtask.objects.create(**Subtask_test_data()) + subtask2:models.Subtask = models.Subtask.objects.create(**Subtask_test_data()) + subtask3:models.Subtask = models.Subtask.objects.create(**Subtask_test_data()) + subtask4:models.Subtask = models.Subtask.objects.create(**Subtask_test_data()) + subtask5:models.Subtask = models.Subtask.objects.create(**Subtask_test_data()) + subtask6:models.Subtask = models.Subtask.objects.create(**Subtask_test_data()) + + # ST1 ---> ST3 ---> ST4 + # | | + # ST2 - -> ST5 ---> ST6 + + output1 = models.SubtaskOutput.objects.create(subtask=subtask1) + output2 = models.SubtaskOutput.objects.create(subtask=subtask2) + output3 = models.SubtaskOutput.objects.create(subtask=subtask3) + output4 = models.SubtaskOutput.objects.create(subtask=subtask4) + output5 = models.SubtaskOutput.objects.create(subtask=subtask5) + output6 = models.SubtaskOutput.objects.create(subtask=subtask6) + + models.SubtaskInput.objects.create(**SubtaskInput_test_data(subtask=subtask3, producer=output1)) + models.SubtaskInput.objects.create(**SubtaskInput_test_data(subtask=subtask3, producer=output2)) + models.SubtaskInput.objects.create(**SubtaskInput_test_data(subtask=subtask4, producer=output3)) + models.SubtaskInput.objects.create(**SubtaskInput_test_data(subtask=subtask5, producer=output3)) + models.SubtaskInput.objects.create(**SubtaskInput_test_data(subtask=subtask6, producer=output5)) + + self.assertEqual(set((subtask1, subtask2)), set(subtask3.predecessors.all())) + self.assertEqual(set((subtask4, subtask5)), set(subtask3.successors.all())) + self.assertEqual(set((subtask3,)), set(subtask4.predecessors.all())) + self.assertEqual(set((subtask3,)), set(subtask5.predecessors.all())) + self.assertEqual(set((subtask3,)), set(subtask1.successors.all())) + self.assertEqual(set((subtask3,)), set(subtask2.successors.all())) + self.assertEqual(set(), set(subtask1.predecessors.all())) + self.assertEqual(set(), set(subtask2.predecessors.all())) + self.assertEqual(set(), set(subtask4.successors.all())) + self.assertEqual(set((subtask6,)), set(subtask5.successors.all())) class DataproductTest(unittest.TestCase): def test_Dataproduct_gets_created_with_correct_creation_timestamp(self): @@ -249,32 +303,6 @@ class DataproductTest(unittest.TestCase): models.Dataproduct.objects.create(**test_data) -class SubtaskConnectorTest(unittest.TestCase): - def test_SubtaskConnector_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.SubtaskConnector.objects.create(**SubtaskConnector_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_SubtaskConnector_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.SubtaskConnector.objects.create(**SubtaskConnector_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - class AntennaSetTest(unittest.TestCase): def test_AntennaSet_gets_created_with_correct_creation_timestamp(self): diff --git a/SAS/TMSS/test/t_tmssapp_specification_REST_API.py b/SAS/TMSS/test/t_tmssapp_specification_REST_API.py index 8c51bdb7dd7f74966b898f36b02ab94f3eee33b1..a9f0b6c410b3fc3cc164d0e19be8b7f57f82f9d0 100755 --- a/SAS/TMSS/test/t_tmssapp_specification_REST_API.py +++ b/SAS/TMSS/test/t_tmssapp_specification_REST_API.py @@ -33,6 +33,10 @@ import logging logger = logging.getLogger(__name__) logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) +from lofar.common.test_utils import skip_integration_tests +if skip_integration_tests(): + exit(3) + # Do Mandatory setup step: # use setup/teardown magic for tmss test database, ldap server and django server # (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_test_environment_unittest_setup module) @@ -65,30 +69,35 @@ class GeneratorTemplateTestCase(unittest.TestCase): def test_generator_template_POST_and_GET(self): # POST and GET a new item and assert correctness - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data_creator.GeneratorTemplate(), 201, test_data_creator.GeneratorTemplate()) + test_data = test_data_creator.GeneratorTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.GeneratorTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) def test_generator_template_PUT_invalid_raises_error(self): - PUT_and_assert_expected_response(self, BASE_URL + '/generator_template/9876789876/', test_data_creator.GeneratorTemplate(), 404, {}) + test_data = test_data_creator.GeneratorTemplate() + PUT_and_assert_expected_response(self, BASE_URL + '/generator_template/9876789876/', test_data, 404, {}) def test_generator_template_PUT(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data_creator.GeneratorTemplate(), 201, test_data_creator.GeneratorTemplate()) + test_data = test_data_creator.GeneratorTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.GeneratorTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) # PUT new values, verify - PUT_and_assert_expected_response(self, url, test_data_creator.GeneratorTemplate("generatortemplate2"), 200, test_data_creator.GeneratorTemplate("generatortemplate2")) - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.GeneratorTemplate("generatortemplate2")) + test_data2 = test_data_creator.GeneratorTemplate("generatortemplate2") + PUT_and_assert_expected_response(self, url, test_data2, 200, test_data2) + GET_OK_and_assert_equal_expected_response(self, url, test_data2) def test_generator_template_PATCH(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data_creator.GeneratorTemplate(), 201, test_data_creator.GeneratorTemplate()) + test_data = test_data_creator.GeneratorTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.GeneratorTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) test_patch = {"version": 'v6.28318530718', "schema": {"mykey": "my better value"}} @@ -102,9 +111,10 @@ class GeneratorTemplateTestCase(unittest.TestCase): def test_generator_template_DELETE(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data_creator.GeneratorTemplate(), 201, test_data_creator.GeneratorTemplate()) + test_data = test_data_creator.GeneratorTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.GeneratorTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -131,30 +141,35 @@ class SchedulingUnitTemplateTestCase(unittest.TestCase): def test_scheduling_unit_template_POST_and_GET(self): # POST and GET a new item and assert correctness - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', test_data_creator.SchedulingUnitTemplate(), 201, test_data_creator.SchedulingUnitTemplate()) + test_data = test_data_creator.SchedulingUnitTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url+'?format=json', test_data_creator.SchedulingUnitTemplate()) + GET_OK_and_assert_equal_expected_response(self, url+'?format=json', test_data) def test_scheduling_unit_template_PUT_invalid_raises_error(self): - PUT_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/9876789876/', test_data_creator.SchedulingUnitTemplate(), 404, {}) + test_data = test_data_creator.SchedulingUnitTemplate() + PUT_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/9876789876/', test_data, 404, {}) def test_scheduling_unit_template_PUT(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', test_data_creator.SchedulingUnitTemplate(), 201, test_data_creator.SchedulingUnitTemplate()) + test_data = test_data_creator.SchedulingUnitTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.SchedulingUnitTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) # PUT new values, verify - PUT_and_assert_expected_response(self, url, test_data_creator.SchedulingUnitTemplate("schedulingunittemplate2"), 200, test_data_creator.SchedulingUnitTemplate("schedulingunittemplate2")) - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.SchedulingUnitTemplate("schedulingunittemplate2")) + test_data2 = test_data_creator.SchedulingUnitTemplate("schedulingunittemplate2") + PUT_and_assert_expected_response(self, url, test_data2, 200, test_data2) + GET_OK_and_assert_equal_expected_response(self, url, test_data2) def test_scheduling_unit_template_PATCH(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', test_data_creator.SchedulingUnitTemplate(), 201, test_data_creator.SchedulingUnitTemplate()) + test_data = test_data_creator.SchedulingUnitTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.SchedulingUnitTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) test_patch = {"version": 'v6.28318530718', "schema": {"mykey": "my better value"}} @@ -168,9 +183,10 @@ class SchedulingUnitTemplateTestCase(unittest.TestCase): def test_scheduling_unit_template_DELETE(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', test_data_creator.SchedulingUnitTemplate(), 201, test_data_creator.SchedulingUnitTemplate()) + test_data = test_data_creator.SchedulingUnitTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.SchedulingUnitTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -197,31 +213,33 @@ class TaskTemplateTestCase(unittest.TestCase): def test_task_template_POST_and_GET(self): # POST and GET a new item and assert correctness - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data_creator.TaskTemplate(), 201, - test_data_creator.TaskTemplate()) + test_data = test_data_creator.TaskTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url + '?format=json', test_data_creator.TaskTemplate()) + GET_OK_and_assert_equal_expected_response(self, url + '?format=json', test_data) def test_task_template_PUT_invalid_raises_error(self): - PUT_and_assert_expected_response(self, BASE_URL + '/task_template/9876789876/', test_data_creator.TaskTemplate(), 404, {}) + test_data = test_data_creator.TaskTemplate() + PUT_and_assert_expected_response(self, BASE_URL + '/task_template/9876789876/', test_data, 404, {}) def test_task_template_PUT(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data_creator.TaskTemplate(), 201, - test_data_creator.TaskTemplate()) + test_data = test_data_creator.TaskTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.TaskTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) # PUT new values, verify - PUT_and_assert_expected_response(self, url, test_data_creator.TaskTemplate("tasktemplate2"), 200, test_data_creator.TaskTemplate("tasktemplate2")) - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.TaskTemplate("tasktemplate2")) + test_data2 = test_data_creator.TaskTemplate("tasktemplate2") + PUT_and_assert_expected_response(self, url, test_data2, 200, test_data2) + GET_OK_and_assert_equal_expected_response(self, url, test_data2) def test_task_template_PATCH(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data_creator.TaskTemplate(), 201, - test_data_creator.TaskTemplate()) + test_data = test_data_creator.TaskTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.TaskTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) test_patch = {"version": 'v6.28318530718', "schema": {"mykey": "my better value"}, @@ -234,10 +252,10 @@ class TaskTemplateTestCase(unittest.TestCase): def test_task_template_DELETE(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data_creator.TaskTemplate(), 201, - test_data_creator.TaskTemplate()) + test_data = test_data_creator.TaskTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.TaskTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -264,30 +282,35 @@ class WorkRelationSelectionTemplateTestCase(unittest.TestCase): def test_work_relation_selection_template_POST_and_GET(self): # POST and GET a new item and assert correctness - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data_creator.WorkRelationSelectionTemplate(), 201, test_data_creator.WorkRelationSelectionTemplate()) + test_data = test_data_creator.WorkRelationSelectionTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url+'?format=json', test_data_creator.WorkRelationSelectionTemplate()) + GET_OK_and_assert_equal_expected_response(self, url+'?format=json', test_data) def test_work_relation_selection_template_PUT_invalid_raises_error(self): - PUT_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/9876789876/', test_data_creator.WorkRelationSelectionTemplate(), 404, {}) + test_data = test_data_creator.WorkRelationSelectionTemplate() + PUT_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/9876789876/', test_data, 404, {}) def test_work_relation_selection_template_PUT(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data_creator.WorkRelationSelectionTemplate(), 201, test_data_creator.WorkRelationSelectionTemplate()) + test_data = test_data_creator.WorkRelationSelectionTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.WorkRelationSelectionTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) # PUT new values, verify - PUT_and_assert_expected_response(self, url, test_data_creator.WorkRelationSelectionTemplate("workrelationselectiontemplate2"), 200, test_data_creator.WorkRelationSelectionTemplate("workrelationselectiontemplate2")) - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.WorkRelationSelectionTemplate("workrelationselectiontemplate2")) + test_data2 = test_data_creator.WorkRelationSelectionTemplate("workrelationselectiontemplate2") + PUT_and_assert_expected_response(self, url, test_data2, 200, test_data2) + GET_OK_and_assert_equal_expected_response(self, url, test_data2) def test_work_relation_selection_template_PATCH(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data_creator.WorkRelationSelectionTemplate(), 201, test_data_creator.WorkRelationSelectionTemplate()) + test_data = test_data_creator.WorkRelationSelectionTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.WorkRelationSelectionTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) test_patch = {"version": 'v6.28318530718', "schema": {"mykey": "my better value"}, @@ -302,9 +325,10 @@ class WorkRelationSelectionTemplateTestCase(unittest.TestCase): def test_work_relation_selection_template_DELETE(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data_creator.WorkRelationSelectionTemplate(), 201, test_data_creator.WorkRelationSelectionTemplate()) + test_data = test_data_creator.WorkRelationSelectionTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.WorkRelationSelectionTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -319,87 +343,88 @@ class WorkRelationSelectionTemplateTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/work_relation_selection_template/' + str(id2), test_data_2) -class TaskConnectorsTestCase(unittest.TestCase): +class TaskConnectorTestCase(unittest.TestCase): @classmethod def setUpClass(cls) -> None: cls.input_of_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskTemplate(), '/task_template/') cls.output_of_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskTemplate(), '/task_template/') - def test_task_connectors_list_apiformat(self): - r = requests.get(BASE_URL + '/task_connectors/?format=api', auth=AUTH) + def test_task_connector_list_apiformat(self): + r = requests.get(BASE_URL + '/task_connector/?format=api', auth=AUTH) self.assertEqual(r.status_code, 200) - self.assertTrue("Task Connectors List" in r.content.decode('utf8')) + self.assertTrue("Task Connector List" in r.content.decode('utf8')) - def test_task_connectors_GET_nonexistant_raises_error(self): - GET_and_assert_equal_expected_code(self, BASE_URL + '/task_connectors/1234321/', 404) + def test_task_connector_GET_nonexistant_raises_error(self): + GET_and_assert_equal_expected_code(self, BASE_URL + '/task_connector/1234321/', 404) - def test_task_connectors_POST_and_GET(self): - tc_test_data = test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=self.output_of_url) + def test_task_connector_POST_and_GET(self): + tc_test_data = test_data_creator.TaskConnector(input_of_url=self.input_of_url, output_of_url=self.output_of_url) # POST and GET a new item and assert correctness - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', tc_test_data, 201, tc_test_data) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector/', tc_test_data, 201, tc_test_data) url = r_dict['url'] GET_OK_and_assert_equal_expected_response(self, url, tc_test_data) - def test_task_connectors_POST_invalid_role_raises_error(self): + def test_task_connector_POST_invalid_role_raises_error(self): # POST a new item with invalid choice - test_data_invalid_role = dict(test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=self.output_of_url)) + test_data_invalid_role = dict(test_data_creator.TaskConnector(input_of_url=self.input_of_url, output_of_url=self.output_of_url)) test_data_invalid_role['role'] = BASE_URL + '/role/forbidden/' - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', test_data_invalid_role, 400, {}) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector/', test_data_invalid_role, 400, {}) self.assertTrue('Invalid hyperlink' in str(r_dict['role'])) - def test_task_connectors_POST_invalid_datatype_raises_error(self): + def test_task_connector_POST_invalid_datatype_raises_error(self): # POST a new item with invalid choice - test_data_invalid = dict(test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=self.output_of_url)) + test_data_invalid = dict(test_data_creator.TaskConnector(input_of_url=self.input_of_url, output_of_url=self.output_of_url)) test_data_invalid['datatype'] = BASE_URL + '/datatype/forbidden/' - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', test_data_invalid, 400, {}) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector/', test_data_invalid, 400, {}) self.assertTrue('Invalid hyperlink' in str(r_dict['datatype'])) - def test_task_connectors_POST_invalid_dataformats_raises_error(self): + def test_task_connector_POST_invalid_dataformats_raises_error(self): # POST a new item with invalid choice - test_data_invalid = dict(test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=self.output_of_url)) + test_data_invalid = dict(test_data_creator.TaskConnector(input_of_url=self.input_of_url, output_of_url=self.output_of_url)) test_data_invalid['dataformats'] = [BASE_URL + '/dataformat/forbidden/'] - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', test_data_invalid, 400, {}) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector/', test_data_invalid, 400, {}) self.assertTrue('Invalid hyperlink' in str(r_dict['dataformats'])) - def test_task_connectors_POST_nonexistant_input_of_raises_error(self): + def test_task_connector_POST_nonexistant_input_of_raises_error(self): # POST a new item with wrong reference - test_data_invalid = dict(test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=self.output_of_url)) + test_data_invalid = dict(test_data_creator.TaskConnector(input_of_url=self.input_of_url, output_of_url=self.output_of_url)) test_data_invalid['input_of'] = BASE_URL + "/task_template/6353748/" - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', test_data_invalid, 400, {}) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector/', test_data_invalid, 400, {}) self.assertTrue('Invalid hyperlink' in str(r_dict['input_of'])) - def test_task_connectors_POST_nonexistant_output_of_raises_error(self): + def test_task_connector_POST_nonexistant_output_of_raises_error(self): # POST a new item with wrong reference - test_data_invalid = dict(test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=self.output_of_url)) + test_data_invalid = dict(test_data_creator.TaskConnector(input_of_url=self.input_of_url, output_of_url=self.output_of_url)) test_data_invalid['output_of'] = BASE_URL + "/task_template/6353748/" - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', test_data_invalid, 400, {}) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector/', test_data_invalid, 400, {}) self.assertTrue('Invalid hyperlink' in str(r_dict['output_of'])) - def test_task_connectors_POST_existing_outputs_works(self): + def test_task_connector_POST_existing_outputs_works(self): # First POST a new item to reference - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data_creator.TaskTemplate(), 201, test_data_creator.TaskTemplate()) + test_data = test_data_creator.TaskTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data, 201, test_data) url = r_dict['url'] # POST a new item with correct reference - test_data_valid = dict(test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=self.output_of_url)) + test_data_valid = dict(test_data_creator.TaskConnector(input_of_url=self.input_of_url, output_of_url=self.output_of_url)) test_data_valid['output_of'] = url - POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', test_data_valid, 201, test_data_valid) + POST_and_assert_expected_response(self, BASE_URL + '/task_connector/', test_data_valid, 201, test_data_valid) - def test_task_connectors_PUT_nonexistant_raises_error(self): - PUT_and_assert_expected_response(self, BASE_URL + '/task_connectors/9876789876/', test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=self.output_of_url), 404, {}) + def test_task_connector_PUT_nonexistant_raises_error(self): + PUT_and_assert_expected_response(self, BASE_URL + '/task_connector/9876789876/', test_data_creator.TaskConnector(input_of_url=self.input_of_url, output_of_url=self.output_of_url), 404, {}) - def test_task_connectors_PUT(self): - tc_test_data1 = test_data_creator.TaskConnectors(role="correlator", input_of_url=self.input_of_url, output_of_url=self.output_of_url) - tc_test_data2 = test_data_creator.TaskConnectors(role="beamformer", input_of_url=self.input_of_url, output_of_url=self.output_of_url) + def test_task_connector_PUT(self): + tc_test_data1 = test_data_creator.TaskConnector(role="correlator", input_of_url=self.input_of_url, output_of_url=self.output_of_url) + tc_test_data2 = test_data_creator.TaskConnector(role="beamformer", input_of_url=self.input_of_url, output_of_url=self.output_of_url) # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', tc_test_data1, 201, tc_test_data1) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector/', tc_test_data1, 201, tc_test_data1) url = r_dict['url'] GET_OK_and_assert_equal_expected_response(self, url, tc_test_data1) @@ -407,11 +432,11 @@ class TaskConnectorsTestCase(unittest.TestCase): PUT_and_assert_expected_response(self, url, tc_test_data2, 200, tc_test_data2) GET_OK_and_assert_equal_expected_response(self, url, tc_test_data2) - def test_task_connectors_PATCH(self): - tc_test_data = test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=self.output_of_url) + def test_task_connector_PATCH(self): + tc_test_data = test_data_creator.TaskConnector(input_of_url=self.input_of_url, output_of_url=self.output_of_url) # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', tc_test_data, 201, tc_test_data) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector/', tc_test_data, 201, tc_test_data) url = r_dict['url'] GET_OK_and_assert_equal_expected_response(self, url, tc_test_data) @@ -425,11 +450,11 @@ class TaskConnectorsTestCase(unittest.TestCase): expected_data.update(test_patch) GET_OK_and_assert_equal_expected_response(self, url, expected_data) - def test_task_connectors_DELETE(self): - tc_test_data = test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=self.output_of_url) + def test_task_connector_DELETE(self): + tc_test_data = test_data_creator.TaskConnector(input_of_url=self.input_of_url, output_of_url=self.output_of_url) # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', tc_test_data, 201, tc_test_data) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector/', tc_test_data, 201, tc_test_data) url = r_dict['url'] GET_OK_and_assert_equal_expected_response(self, url, tc_test_data) @@ -438,9 +463,9 @@ class TaskConnectorsTestCase(unittest.TestCase): def test_task_relation_blueprint_CASCADE_behavior_on_inputs_template_deleted(self): input_of_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskTemplate(), '/task_template/') - tc_test_data = test_data_creator.TaskConnectors(input_of_url=input_of_url, output_of_url=self.output_of_url) + tc_test_data = test_data_creator.TaskConnector(input_of_url=input_of_url, output_of_url=self.output_of_url) # POST new item - url = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', tc_test_data, 201, tc_test_data)['url'] + url = POST_and_assert_expected_response(self, BASE_URL + '/task_connector/', tc_test_data, 201, tc_test_data)['url'] # verify GET_OK_and_assert_equal_expected_response(self, url, tc_test_data) # DELETE dependency @@ -450,9 +475,9 @@ class TaskConnectorsTestCase(unittest.TestCase): def test_task_relation_blueprint_CASCADE_behavior_on_outputs_template_deleted(self): output_of_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskTemplate(), '/task_template/') - tc_test_data = test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=output_of_url) + tc_test_data = test_data_creator.TaskConnector(input_of_url=self.input_of_url, output_of_url=output_of_url) # POST new item - url = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', tc_test_data, 201, tc_test_data)['url'] + url = POST_and_assert_expected_response(self, BASE_URL + '/task_connector/', tc_test_data, 201, tc_test_data)['url'] # verify GET_OK_and_assert_equal_expected_response(self, url, tc_test_data) # DELETE dependency @@ -460,21 +485,20 @@ class TaskConnectorsTestCase(unittest.TestCase): # assert GET_and_assert_equal_expected_code(self, url, 404) - def test_GET_task_connectors_view_returns_correct_entry(self): + def test_GET_task_connector_view_returns_correct_entry(self): - test_data_1 = TaskConnectors_test_data() - test_data_2 = TaskConnectors_test_data() - id1 = models.TaskConnectors.objects.create(**test_data_1).id - id2 = models.TaskConnectors.objects.create(**test_data_2).id - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_connectors/' + str(id1), test_data_1) - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_connectors/' + str(id2), test_data_2) + test_data_1 = TaskConnector_test_data() + test_data_2 = TaskConnector_test_data() + id1 = models.TaskConnector.objects.create(**test_data_1).id + id2 = models.TaskConnector.objects.create(**test_data_2).id + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_connector/' + str(id1), test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_connector/' + str(id2), test_data_2) class DefaultTemplates(unittest.TestCase): def test_default_generator_template_POST(self): - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', - test_data_creator.GeneratorTemplate(), 201, - test_data_creator.GeneratorTemplate()) + test_data = test_data_creator.GeneratorTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data, 201, test_data) url = r_dict['url'] test_data_1 = dict(test_data_creator.DefaultTemplates()) @@ -482,9 +506,10 @@ class DefaultTemplates(unittest.TestCase): POST_and_assert_expected_response(self, BASE_URL + '/default_generator_template/', test_data_1, 201, test_data_1) def test_default_scheduling_unit_template_POST(self): + test_data = test_data_creator.SchedulingUnitTemplate() r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', - test_data_creator.SchedulingUnitTemplate(), 201, - test_data_creator.SchedulingUnitTemplate()) + test_data, 201, + test_data) url = r_dict['url'] test_data_1 = dict(test_data_creator.DefaultTemplates()) @@ -492,9 +517,10 @@ class DefaultTemplates(unittest.TestCase): POST_and_assert_expected_response(self, BASE_URL + '/default_scheduling_unit_template/', test_data_1, 201, test_data_1) def test_default_task_template_POST(self): + test_data = test_data_creator.TaskTemplate() r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', - test_data_creator.TaskTemplate(), 201, - test_data_creator.TaskTemplate()) + test_data, 201, + test_data) url = r_dict['url'] test_data_1 = dict(test_data_creator.DefaultTemplates()) @@ -502,9 +528,10 @@ class DefaultTemplates(unittest.TestCase): POST_and_assert_expected_response(self, BASE_URL + '/default_task_template/', test_data_1, 201, test_data_1) def test_default_work_relation_selection_template_POST(self): + test_data = test_data_creator.WorkRelationSelectionTemplate() r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', - test_data_creator.WorkRelationSelectionTemplate(), 201, - test_data_creator.WorkRelationSelectionTemplate()) + test_data, 201, + test_data) url = r_dict['url'] test_data_1 = dict(test_data_creator.DefaultTemplates()) @@ -514,74 +541,78 @@ class DefaultTemplates(unittest.TestCase): def test_default_generator_template_PROTECT_behavior_on_template_deleted(self): # POST with dependency + test_data = test_data_creator.GeneratorTemplate() template_url = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', - test_data_creator.GeneratorTemplate(), 201, - test_data_creator.GeneratorTemplate())['url'] - test_data = dict(test_data_creator.DefaultTemplates("defaulttemplate2")) - test_data['template'] = template_url + test_data, 201, + test_data)['url'] + test_data2 = dict(test_data_creator.DefaultTemplates("defaulttemplate2")) + test_data2['template'] = template_url POST_and_assert_expected_response(self, BASE_URL + '/default_generator_template/', - test_data, 201, test_data) + test_data2, 201, test_data2) # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(template_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_OK_and_assert_equal_expected_response(self, template_url, test_data_creator.GeneratorTemplate()) + GET_OK_and_assert_equal_expected_response(self, template_url, test_data) def test_default_scheduling_unit_template_PROTECT_behavior_on_template_deleted(self): # POST with dependency + test_data = test_data_creator.SchedulingUnitTemplate() template_url = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', - test_data_creator.SchedulingUnitTemplate(), 201, - test_data_creator.SchedulingUnitTemplate())['url'] - test_data = dict(test_data_creator.DefaultTemplates("defaulttemplate2")) - test_data['template'] = template_url + test_data, 201, + test_data)['url'] + test_data2 = dict(test_data_creator.DefaultTemplates("defaulttemplate2")) + test_data2['template'] = template_url POST_and_assert_expected_response(self, BASE_URL + '/default_scheduling_unit_template/', - test_data, 201, test_data) + test_data2, 201, test_data2) # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(template_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_OK_and_assert_equal_expected_response(self, template_url, test_data_creator.SchedulingUnitTemplate()) + GET_OK_and_assert_equal_expected_response(self, template_url, test_data) def test_default_task_template_PROTECT_behavior_on_template_deleted(self): # POST with dependency + test_data = test_data_creator.TaskTemplate() template_url = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', - test_data_creator.TaskTemplate(), 201, - test_data_creator.TaskTemplate())['url'] - test_data = dict(test_data_creator.DefaultTemplates("defaulttemplate2")) - test_data['template'] = template_url + test_data, 201, + test_data)['url'] + test_data2 = dict(test_data_creator.DefaultTemplates("defaulttemplate2")) + test_data2['template'] = template_url POST_and_assert_expected_response(self, BASE_URL + '/default_task_template/', - test_data, 201, test_data) + test_data2, 201, test_data2) # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(template_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_OK_and_assert_equal_expected_response(self, template_url, test_data_creator.TaskTemplate()) + GET_OK_and_assert_equal_expected_response(self, template_url, test_data) def test_default_work_relation_selection_template_PROTECT_behavior_on_template_deleted(self): # POST with dependency + test_data = test_data_creator.WorkRelationSelectionTemplate() template_url = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', - test_data_creator.WorkRelationSelectionTemplate(), 201, - test_data_creator.WorkRelationSelectionTemplate())['url'] - test_data = dict(test_data_creator.DefaultTemplates("defaulttemplate2")) - test_data['template'] = template_url + test_data, 201, + test_data)['url'] + test_data2 = dict(test_data_creator.DefaultTemplates("defaulttemplate2")) + test_data2['template'] = template_url POST_and_assert_expected_response(self, BASE_URL + '/default_work_relation_selection_template/', - test_data, 201, test_data) + test_data2, 201, test_data2) # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(template_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_OK_and_assert_equal_expected_response(self, template_url, test_data_creator.WorkRelationSelectionTemplate()) + GET_OK_and_assert_equal_expected_response(self, template_url, test_data) class CycleTestCase(unittest.TestCase): @@ -778,20 +809,13 @@ class ProjectTestCase(unittest.TestCase): def test_nested_projects_are_filtered_according_to_cycle(self): cycle_1 = models.Cycle.objects.create(**Cycle_test_data()) - cycle_2 = models.Cycle.objects.create(**Cycle_test_data()) test_data_1 = dict(Project_test_data()) # uuid makes project unique test_data_1['cycle'] = cycle_1 project_1 = models.Project.objects.create(**test_data_1) - test_data_2 = dict(Project_test_data()) # uuid makes project unique - test_data_2['cycle'] = cycle_2 - project_2 = models.Project.objects.create(**test_data_2) + # assert the returned list contains related items, A list of length 1 is retrieved - GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/cycle/%s/project/' % cycle_2.name, test_data_2, 1) - # assert an existing related item is returned, name is pk - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/cycle/%s/project/%s' % (cycle_2.name, project_2.name), test_data_2) - # assert an existing unrelated item is not returned, name is pk - GET_and_assert_equal_expected_code(self, BASE_URL + '/cycle/%s/project/%s' % (cycle_2.name, project_1.name), 404) - + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/cycle/%s/project/' % cycle_1.name, test_data_1, 1) + class ResourceTypeTestCase(unittest.TestCase): def test_resource_type_list_apiformat(self): r = requests.get(BASE_URL + '/resource_type/?format=api', auth=AUTH) @@ -1148,27 +1172,16 @@ class SchedulingUnitDraftTestCase(unittest.TestCase): def test_nested_SchedulingUnitDraft_are_filtered_according_to_SchedulingSet(self): # setup test_data_1 = SchedulingUnitDraft_test_data("scheduler unit draft two one") - test_data_2 = SchedulingUnitDraft_test_data("scheduler unit draft two two") sst_test_data_1 = SchedulingSet_test_data("scheduler set one") - sst_test_data_2 = SchedulingSet_test_data("scheduler set two") scheduling_set_1 = models.SchedulingSet.objects.create(**sst_test_data_1) - scheduling_set_2 = models.SchedulingSet.objects.create(**sst_test_data_2) test_data_1 = dict(test_data_1) test_data_1['scheduling_set'] = scheduling_set_1 scheduling_unit_draft_1 = models.SchedulingUnitDraft.objects.create(**test_data_1) - test_data_2 = dict(test_data_2) - test_data_2['scheduling_set'] = scheduling_set_2 - scheduling_unit_draft_2 = models.SchedulingUnitDraft.objects.create(**test_data_2) # assert the returned list contains related items, A list of length 1 is retrieved GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/scheduling_set/%s/scheduling_unit_draft/' - % scheduling_set_2.id, test_data_2, 1) - # assert an existing related item is returned - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_set/%s/scheduling_unit_draft/%s/' % - (scheduling_set_2.id, scheduling_unit_draft_2.id), test_data_2) - # assert an existing unrelated item is not returned - GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_set/%s/scheduling_unit_draft/%s/' % - (scheduling_set_2.id, scheduling_unit_draft_1.id), 404) + % scheduling_set_1.id, test_data_1, 1) + def test_SchedulingUnitDraft_contains_list_of_related_SchedulingUnitBlueprint(self): @@ -1347,23 +1360,13 @@ class TaskDraftTestCase(unittest.TestCase): # setup test_data_1 = TaskDraft_test_data("task draft one") - test_data_2 = TaskDraft_test_data("task draft two") sudt_test_data_1 = SchedulingUnitDraft_test_data("scheduling unit draft one") - sudt_test_data_2 = SchedulingUnitDraft_test_data("scheduling unit draft two") scheduling_unit_draft_1 = models.SchedulingUnitDraft.objects.create(**sudt_test_data_1) - scheduling_unit_draft_2 = models.SchedulingUnitDraft.objects.create(**sudt_test_data_2) test_data_1 = dict(test_data_1) test_data_1['scheduling_unit_draft'] = scheduling_unit_draft_1 task_draft_1 = models.TaskDraft.objects.create(**test_data_1) - test_data_2 = dict(test_data_2) - test_data_2['scheduling_unit_draft'] = scheduling_unit_draft_2 - task_draft_2 = models.TaskDraft.objects.create(**test_data_2) # assert the returned list contains related items, A list of length 1 is retrieved - GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/scheduling_unit_draft/%s/task_draft/' % scheduling_unit_draft_2.id, test_data_2, 1) - # assert an existing related item is returned - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_unit_draft/%s/task_draft/%s/' % (scheduling_unit_draft_2.id, task_draft_2.id), test_data_2) - # assert an existing unrelated item is not returned - GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_draft/%s/task_draft/%s/' % (scheduling_unit_draft_2.id, task_draft_1.id), 404) + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/scheduling_unit_draft/%s/task_draft/' % scheduling_unit_draft_1.id, test_data_1, 1) def test_TaskDraft_contains_list_of_related_TaskBlueprint(self): @@ -1407,8 +1410,8 @@ class TaskRelationDraftTestCase(unittest.TestCase): cls.producer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskDraft(), '/task_draft/') cls.consumer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskDraft(), '/task_draft/') cls.template_url = test_data_creator.post_data_and_get_url(test_data_creator.WorkRelationSelectionTemplate(), '/work_relation_selection_template/') - cls.input_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectors(), '/task_connectors/') - cls.output_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectors(), '/task_connectors/') + cls.input_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnector(), '/task_connector/') + cls.output_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnector(), '/task_connector/') def test_task_relation_draft_list_apiformat(self): r = requests.get(BASE_URL + '/task_relation_draft/?format=api', auth=AUTH) @@ -1521,7 +1524,7 @@ class TaskRelationDraftTestCase(unittest.TestCase): GET_and_assert_equal_expected_code(self, url, 404) def test_task_relation_draft_CASCADE_behavior_on_input_deleted(self): - input_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectors(), '/task_connectors/') + input_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnector(), '/task_connector/') trd_test_data = test_data_creator.TaskRelationDraft(input_url=input_url, producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, output_url=self.output_url) # POST new item @@ -1538,7 +1541,7 @@ class TaskRelationDraftTestCase(unittest.TestCase): GET_and_assert_equal_expected_code(self, url, 404) def test_task_relation_draft_CASCADE_behavior_on_output_deleted(self): - output_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectors(), '/task_connectors/') + output_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnector(), '/task_connector/') trd_test_data = test_data_creator.TaskRelationDraft(output_url=output_url, producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_url=self.input_url) # POST new item with dependency @@ -1575,26 +1578,15 @@ class TaskRelationDraftTestCase(unittest.TestCase): # setup test_data_1 = TaskRelationDraft_test_data() - test_data_2 = TaskRelationDraft_test_data() tdt_test_data_1 = TaskDraft_test_data() - tdt_test_data_2 = TaskDraft_test_data() task_draft_1 = models.TaskDraft.objects.create(**tdt_test_data_1) - task_draft_2 = models.TaskDraft.objects.create(**tdt_test_data_2) test_data_1 = dict(test_data_1) test_data_1['producer'] = task_draft_1 task_relation_draft_1 = models.TaskRelationDraft.objects.create(**test_data_1) - test_data_2 = dict(test_data_2) - test_data_2['consumer'] = task_draft_2 - task_relation_draft_2 = models.TaskRelationDraft.objects.create(**test_data_2) - + # assert the returned list contains related items, A list of length 1 is retrieved - GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_draft/%s/task_relation_draft/' % task_draft_2.id, test_data_2, 1) + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_draft/%s/task_relation_draft/' % task_draft_1.id, test_data_1, 1) # assert an existing related producer is returned - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_draft/%s/task_relation_draft/%s/' % (task_draft_1.id, task_relation_draft_1.id), test_data_1) - # assert an existing related consumer is returned - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_draft/%s/task_relation_draft/%s/' % (task_draft_2.id, task_relation_draft_2.id), test_data_2) - # assert an existing unrelated item is not returned - GET_and_assert_equal_expected_code(self, BASE_URL + '/task_draft/%s/task_relation_draft/%s/' % (task_draft_2.id, task_relation_draft_1.id), 404) class SchedulingUnitBlueprintTestCase(unittest.TestCase): @@ -1718,25 +1710,15 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase): # setup test_data_1 = SchedulingUnitBlueprint_test_data("scheduler unit blue print three one") - test_data_2 = SchedulingUnitBlueprint_test_data("scheduler unit blue print three two") sudt_test_data_1 = SchedulingUnitDraft_test_data() - sudt_test_data_2 = SchedulingUnitDraft_test_data() scheduling_unit_draft_1 = models.SchedulingUnitDraft.objects.create(**sudt_test_data_1) - scheduling_unit_draft_2 = models.SchedulingUnitDraft.objects.create(**sudt_test_data_2) test_data_1 = dict(test_data_1) test_data_1['draft'] = scheduling_unit_draft_1 scheduling_unit_blueprint_1 = models.SchedulingUnitBlueprint.objects.create(**test_data_1) - test_data_2 = dict(test_data_2) - test_data_2['draft'] = scheduling_unit_draft_2 - scheduling_unit_blueprint_2 = models.SchedulingUnitBlueprint.objects.create(**test_data_2) # assert the returned list contains related items, A list of length 1 is retrieved - GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/scheduling_unit_draft/%s/scheduling_unit_blueprint/' % scheduling_unit_draft_2.id, test_data_2, 1) - # assert an existing related item is returned - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_unit_draft/%s/scheduling_unit_blueprint/%s/' % (scheduling_unit_draft_2.id, scheduling_unit_blueprint_2.id), test_data_2) - # assert an existing unrelated item is not returned - GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_draft/%s/scheduling_unit_blueprint/%s/' % (scheduling_unit_draft_2.id, scheduling_unit_blueprint_1.id), 404) - + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/scheduling_unit_draft/%s/scheduling_unit_blueprint/' % scheduling_unit_draft_1.id, test_data_1, 1) + class TaskBlueprintTestCase(unittest.TestCase): @classmethod @@ -1906,27 +1888,16 @@ class TaskBlueprintTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_blueprint/%s/' % id2, test_data_2) def test_nested_TaskBlueprint_are_filtered_according_to_TaskDraft(self): - # setup test_data_1 = TaskBlueprint_test_data("task blue print three one") - test_data_2 = TaskBlueprint_test_data("task blue print three two") tdt_test_data_1 = TaskDraft_test_data("task draft two one") - tdt_test_data_2 = TaskDraft_test_data("task draft two one") task_draft_1 = models.TaskDraft.objects.create(**tdt_test_data_1) - task_draft_2 = models.TaskDraft.objects.create(**tdt_test_data_2) test_data_1 = dict(test_data_1) test_data_1['draft'] = task_draft_1 task_blueprint_1 = models.TaskBlueprint.objects.create(**test_data_1) - test_data_2 = dict(test_data_2) - test_data_2['draft'] = task_draft_2 - task_blueprint_2 = models.TaskBlueprint.objects.create(**test_data_2) # assert the returned list contains related items, A list of length 1 is retrieved - GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_draft/%s/task_blueprint/' % task_draft_2.id, test_data_2, 1) - # assert an existing related item is returned - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_draft/%s/task_blueprint/%s/' % (task_draft_2.id, task_blueprint_2.id), test_data_2) - # assert an existing unrelated item is not returned - GET_and_assert_equal_expected_code(self, BASE_URL + '/task_draft/%s/task_blueprint/%s/' % (task_draft_2.id, task_blueprint_1.id), 404) + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_draft/%s/task_blueprint/' % task_draft_1.id, test_data_1, 1) def test_TaskBlueprint_contains_list_of_related_Subtask(self): @@ -1974,8 +1945,8 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): cls.producer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/') cls.consumer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/') cls.template_url = test_data_creator.post_data_and_get_url(test_data_creator.WorkRelationSelectionTemplate(), '/work_relation_selection_template/') - cls.input_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectors(), '/task_connectors/') - cls.output_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectors(), '/task_connectors/') + cls.input_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnector(), '/task_connector/') + cls.output_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnector(), '/task_connector/') def test_task_relation_blueprint_list_apiformat(self): r = requests.get(BASE_URL + '/task_relation_blueprint/?format=api', auth=AUTH) @@ -2154,7 +2125,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): GET_and_assert_equal_expected_code(self, url, 404) def test_task_relation_blueprint_CASCADE_behavior_on_input_deleted(self): - input_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectors(), '/task_connectors/') + input_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnector(), '/task_connector/') trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url) # POST new item @@ -2171,7 +2142,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): GET_and_assert_equal_expected_code(self, url, 404) def test_task_relation_blueprint_CASCADE_behavior_on_output_deleted(self): - output_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectors(), '/task_connectors/') + output_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnector(), '/task_connector/') trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=output_url, consumer_url=self.consumer_url, producer_url=self.producer_url) # POST new item with dependency @@ -2209,24 +2180,14 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): # setup test_data_1 = TaskRelationBlueprint_test_data() - test_data_2 = TaskRelationBlueprint_test_data() trdt_test_data_1 = TaskRelationDraft_test_data() - trdt_test_data_2 = TaskRelationDraft_test_data() task_relation_draft_1 = models.TaskRelationDraft.objects.create(**trdt_test_data_1) - task_relation_draft_2 = models.TaskRelationDraft.objects.create(**trdt_test_data_2) test_data_1 = dict(test_data_1) test_data_1['draft'] = task_relation_draft_1 task_relation_blueprint_1 = models.TaskRelationBlueprint.objects.create(**test_data_1) - test_data_2 = dict(test_data_2) - test_data_2['draft'] = task_relation_draft_2 - task_relation_blueprint_2 = models.TaskRelationBlueprint.objects.create(**test_data_2) - + # assert the returned list contains related items, A list of length 1 is retrieved - GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_relation_draft/%s/task_relation_blueprint/' % task_relation_draft_2.id, test_data_2, 1) - # assert an existing related item is returned - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_relation_draft/%s/task_relation_blueprint/%s/' % (task_relation_draft_2.id, task_relation_blueprint_2.id), test_data_2) - # assert an existing unrelated item is not returned - GET_and_assert_equal_expected_code(self, BASE_URL + '/task_relation_draft/%s/task_relation_blueprint/%s/' % (task_relation_draft_2.id, task_relation_blueprint_1.id), 404) + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_relation_draft/%s/task_relation_blueprint/' % task_relation_draft_1.id, test_data_1, 1) def test_nested_TaskRelationBlueprint_are_filtered_according_to_TaskBlueprint(self): @@ -2247,11 +2208,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_blueprint/%s/task_relation_blueprint/' % task_blueprint_1.id, test_data_1, 1) # assert the returned list contains related consumer GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_blueprint/%s/task_relation_blueprint/' % task_blueprint_2.id, test_data_2, 1) - # assert an existing related item is returned - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_blueprint/%s/task_relation_blueprint/%s/' % (task_blueprint_2.id, task_relation_blueprint_2.id), test_data_2) - # assert an existing unrelated item is not returned - GET_and_assert_equal_expected_code(self, BASE_URL + '/task_blueprint/%s/task_relation_blueprint/%s/' % (task_blueprint_2.id, task_relation_blueprint_1.id), 404) - + if __name__ == "__main__": logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', diff --git a/SAS/TMSS/test/t_tmssapp_specification_django_API.py b/SAS/TMSS/test/t_tmssapp_specification_django_API.py index d4301723b0c597e5054c3f7de2dad64db244d8f8..175e48e305ab6f7de886c7af3de5920bbc96b5ce 100755 --- a/SAS/TMSS/test/t_tmssapp_specification_django_API.py +++ b/SAS/TMSS/test/t_tmssapp_specification_django_API.py @@ -26,7 +26,7 @@ import uuid import logging logger = logging.getLogger(__name__) -logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.DEBUG) +logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) # todo: Tags? -> Decide how to deal with them first. # todo: Immutability of Blueprints on db level? @@ -39,6 +39,7 @@ from lofar.sas.tmss.test.tmss_database_unittest_setup import * from lofar.sas.tmss.test.tmss_test_data_django_models import * from django.db.utils import IntegrityError +from django.core.exceptions import ValidationError class GeneratorTemplateTest(unittest.TestCase): @@ -130,6 +131,21 @@ class TaskTemplateTest(unittest.TestCase): self.assertLess(before, entry.updated_at) self.assertGreater(after, entry.updated_at) + def test_TaskTemplate_name_version_unique(self): + test_data = TaskTemplate_test_data(name="my_name", version="1") + entry1 = models.TaskTemplate.objects.create(**test_data) + + with self.assertRaises(IntegrityError): + entry2 = models.TaskTemplate.objects.create(**test_data) + + test_data2 = dict(**test_data) + test_data2['version'] = "2" + entry2 = models.TaskTemplate.objects.create(**test_data2) + + with self.assertRaises(IntegrityError): + entry2.version = '1' + entry2.save() + class WorkRelationSelectionTemplateTest(unittest.TestCase): def test_WorkRelationSelectionTemplate_gets_created_with_correct_creation_timestamp(self): @@ -157,27 +173,27 @@ class WorkRelationSelectionTemplateTest(unittest.TestCase): self.assertGreater(after, entry.updated_at) -class TaskConnectorsTest(unittest.TestCase): +class TaskConnectorTest(unittest.TestCase): - def test_POST_TaskConnectors_prevents_missing_input_of(self): + def test_POST_TaskConnector_prevents_missing_input_of(self): # setup - test_data_1 = dict(TaskConnectors_test_data()) + test_data_1 = dict(TaskConnector_test_data()) test_data_1['input_of'] = None # assert with self.assertRaises(IntegrityError): - models.TaskConnectors.objects.create(**test_data_1) + models.TaskConnector.objects.create(**test_data_1) - def test_POST_TaskConnectors_prevents_missing_output_of(self): + def test_POST_TaskConnector_prevents_missing_output_of(self): # setup - test_data_1 = dict(TaskConnectors_test_data()) + test_data_1 = dict(TaskConnector_test_data()) test_data_1['output_of'] = None # assert with self.assertRaises(IntegrityError): - models.TaskConnectors.objects.create(**test_data_1) + models.TaskConnector.objects.create(**test_data_1) class CycleTest(unittest.TestCase): diff --git a/SAS/TMSS/test/t_tmssapp_specification_permissions.py b/SAS/TMSS/test/t_tmssapp_specification_permissions.py index cc356399963a8d553c330d54d09135dc00ed8808..0e8ebd686bd17a53a0746993d73ec7e4127604d6 100755 --- a/SAS/TMSS/test/t_tmssapp_specification_permissions.py +++ b/SAS/TMSS/test/t_tmssapp_specification_permissions.py @@ -56,6 +56,8 @@ class CyclePermissionTestCase(unittest.TestCase): # refresh user to update cache, see: https://docs.djangoproject.com/en/3.0/topics/auth/default/#permission-caching user = User.objects.get(username='paulus') + while user.has_perm('tmssapp.add_cycle'): + user = User.objects.get(username='paulus') self.assertFalse(user.has_perm('tmssapp.add_cycle')) @@ -69,6 +71,8 @@ class CyclePermissionTestCase(unittest.TestCase): # refresh user to update cache, see: https://docs.djangoproject.com/en/3.0/topics/auth/default/#permission-caching user = User.objects.get(username='paulus') + while not user.has_perm('tmssapp.add_cycle'): + user = User.objects.get(username='paulus') self.assertTrue(user.has_perm('tmssapp.add_cycle')) @@ -82,6 +86,8 @@ class CyclePermissionTestCase(unittest.TestCase): # refresh user to update cache, see: https://docs.djangoproject.com/en/3.0/topics/auth/default/#permission-caching user = User.objects.get(username='paulus') + while not user.has_perm('tmssapp.add_cycle'): + user = User.objects.get(username='paulus') # add count = len(models.Cycle.objects.all()) @@ -100,6 +106,8 @@ class CyclePermissionTestCase(unittest.TestCase): # refresh user to update cache, see: https://docs.djangoproject.com/en/3.0/topics/auth/default/#permission-caching user = User.objects.get(username='paulus') + while not user.has_perm('tmssapp.add_cycle'): + user = User.objects.get(username='paulus') # add count = len(models.Cycle.objects.all()) diff --git a/SAS/TMSS/test/test_utils.py b/SAS/TMSS/test/test_utils.py index 34e44c6f384073421611136ed7c6d8e7c24b39c2..88a250084b484b67931a20a822bac8b38655462b 100644 --- a/SAS/TMSS/test/test_utils.py +++ b/SAS/TMSS/test/test_utils.py @@ -32,6 +32,9 @@ from lofar.common.dbcredentials import Credentials, DBCredentials from lofar.common.util import find_free_port, waitForInterrupt from lofar.sas.tmss.test.ldap_test_service import TestLDAPServer from lofar.sas.tmss.tmss.exceptions import TMSSException +from lofar.messaging.config import DEFAULT_BROKER, DEFAULT_BUSNAME +from lofar.common.testing.dbcredentials import TemporaryCredentials +from lofar.sas.tmss.client.tmss_http_rest_client import TMSSsession def assertDataWithUrls(self, data, expected): """ @@ -113,12 +116,15 @@ class TMSSPostgresTestMixin(PostgresTestMixin): class TMSSDjangoServerInstance(): ''' Creates a running django TMSS server at the requested port with the requested database credentials. ''' - def __init__(self, db_dbcreds_id: str="TMSS", ldap_dbcreds_id: str="TMSS_LDAP", host: str='127.0.0.1', port: int=8000): + def __init__(self, db_dbcreds_id: str="TMSS", ldap_dbcreds_id: str="TMSS_LDAP", host: str='127.0.0.1', port: int=8000, + exchange: str=os.environ.get("TMSS_EXCHANGE", DEFAULT_BUSNAME), broker: str=os.environ.get("TMSS_EXCHANGE", DEFAULT_BUSNAME)): self._db_dbcreds_id = db_dbcreds_id self._ldap_dbcreds_id = ldap_dbcreds_id self.host = host self.port = port self._server_process = None + self._exchange = exchange + self._broker = broker @property def address(self): @@ -160,6 +166,8 @@ class TMSSDjangoServerInstance(): # set these here, run django setup, and start the server os.environ["TMSS_LDAPCREDENTIALS"] = self.ldap_dbcreds_id os.environ["TMSS_DBCREDENTIALS"] = self.database_dbcreds_id + os.environ["TMSS_EXCHANGE"] = self._exchange + os.environ["TMSS_BROKER"] = self._broker os.environ["DJANGO_SETTINGS_MODULE"] = "lofar.sas.tmss.tmss.settings" django.setup() @@ -241,19 +249,32 @@ class TMSSDjangoServerInstance(): class TMSSTestEnvironment: '''Create and run a test django TMSS server against a newly created test database and a test ldap server (and cleanup automagically)''' - def __init__(self, host: str='127.0.0.1', preferred_django_port: int=8000): + def __init__(self, host: str='127.0.0.1', preferred_django_port: int=8000, + exchange: str=os.environ.get("TMSS_EXCHANGE", DEFAULT_BUSNAME), broker: str=os.environ.get("TMSS_BROKER", DEFAULT_BROKER)): self.ldap_server = TestLDAPServer(user='test', password='test') self.database = TMSSTestDatabaseInstance() self.django_server = TMSSDjangoServerInstance(db_dbcreds_id=self.database.dbcreds_id, ldap_dbcreds_id=self.ldap_server.dbcreds_id, host=host, - port=find_free_port(preferred_django_port)) + port=find_free_port(preferred_django_port), + exchange=exchange, + broker=broker) + self.client_credentials = TemporaryCredentials(user=self.ldap_server.dbcreds.user, + password=self.ldap_server.dbcreds.password) def start(self): self.ldap_server.start() self.database.create() self.django_server.start() + # store client credentials in the TemporaryCredentials file... + self.client_credentials.dbcreds.host = self.django_server.host + self.client_credentials.dbcreds.port = self.django_server.port + self.client_credentials.dbcreds.type = "http" + self.client_credentials.create() + # ... and set TMSS_CLIENT_DBCREDENTIALS environment variable, sp anybody or anything (any test) can use it automagically + os.environ['TMSS_CLIENT_DBCREDENTIALS'] = self.client_credentials.dbcreds_id + # apart from the running django server with a REST API, # it is also convenient to provide a working django setup for the 'normal' django API (via models.objects) # so: do setup_django @@ -271,6 +292,7 @@ class TMSSTestEnvironment: self.django_server.stop() self.ldap_server.stop() self.database.destroy() + self.client_credentials.destroy() def __enter__(self): try: @@ -284,6 +306,8 @@ class TMSSTestEnvironment: def __exit__(self, exc_type, exc_val, exc_tb): self.stop() + def create_tmss_client(self): + return TMSSsession.create_from_dbcreds_for_ldap(self.client_credentials.dbcreds_id) def main_test_database(): """instantiate, run and destroy a test postgress django database""" @@ -307,7 +331,7 @@ def main_test_database(): def main_test_environment(): """instantiate, run and destroy a full tmss test environment (postgress database, ldap server, django server)""" - from optparse import OptionParser + from optparse import OptionParser, OptionGroup os.environ['TZ'] = 'UTC' parser = OptionParser('%prog [options]', @@ -316,11 +340,15 @@ def main_test_environment(): help="expose the TMSS Django REST API via this host. [default=%default]") parser.add_option("-p", "--port", dest="port", type="int", default=find_free_port(8000), help="try to use this port for the DJANGO REST API. If not available, then a random free port is used and logged. [default=%default]") + group = OptionGroup(parser, 'Messaging options') + group.add_option('-b', '--broker', dest='broker', type='string', default=DEFAULT_BROKER, help='Address of the message broker, default: %default') + group.add_option('-e', "--exchange", dest="exchange", type="string", default=DEFAULT_BUSNAME, help="Bus or queue where the TMSS messages are published. [default: %default]") + parser.add_option_group(group) (options, args) = parser.parse_args() logging.basicConfig(format = '%(asctime)s %(levelname)s %(message)s', level = logging.INFO) - with TMSSTestEnvironment(host=options.host, preferred_django_port=options.port) as instance: + with TMSSTestEnvironment(host=options.host, preferred_django_port=options.port, exchange=options.exchange, broker=options.broker) as instance: # print some nice info for the user to use the test servers... # use print instead of log for clean lines. for h in logging.root.handlers: @@ -332,12 +360,16 @@ def main_test_environment(): print("*****************************************************") print("DB Credentials ID: %s" % (instance.database.dbcreds_id, )) print("LDAP Credentials ID: %s" % (instance.django_server.ldap_dbcreds_id, )) + print("TMSS Client Credentials ID: %s" % (instance.client_credentials.dbcreds_id, )) print("Django URL: %s" % (instance.django_server.url)) print() print("Example cmdlines to run tmss or tmss_manage_django:") print("TMSS_DBCREDENTIALS=%s TMSS_LDAPCREDENTIALS=%s tmss" % (instance.database.dbcreds_id, instance.django_server.ldap_dbcreds_id)) print("TMSS_DBCREDENTIALS=%s TMSS_LDAPCREDENTIALS=%s tmss_manage_django" % (instance.database.dbcreds_id, instance.django_server.ldap_dbcreds_id)) print() + print("Example cmdline to run tmss client call:") + print("TMSS_CLIENT_DBCREDENTIALS=%s tmss_set_subtask_state <id> <state>" % (instance.client_credentials.dbcreds_id, )) + print() print("Press Ctrl-C to exit (and remove the test database and django server automatically)") waitForInterrupt() diff --git a/SAS/TMSS/test/testdata/subtasks.json b/SAS/TMSS/test/testdata/subtasks.json index 9bb8b375e5b1c2bc7fd59d0557f5b9e42f2b0bce..70f8b97d95e9c2c830bcc42092bcf0144a506f9e 100644 --- a/SAS/TMSS/test/testdata/subtasks.json +++ b/SAS/TMSS/test/testdata/subtasks.json @@ -32,7 +32,6 @@ "specifications_doc": 1, "do_cancel": null, "priority": 1, - "scheduler_input_doc": 1, "state": "defined", "task_blueprint": null, "specifications_template": 1, @@ -52,7 +51,6 @@ "specifications_doc": 1, "do_cancel": null, "priority": 1, - "scheduler_input_doc": 1, "state": "defined", "task_blueprint": null, "specifications_template": 1, @@ -72,7 +70,6 @@ "specifications_doc": 1, "do_cancel": null, "priority": 1, - "scheduler_input_doc": 1, "state": "defined", "task_blueprint": null, "specifications_template": 1, diff --git a/SAS/TMSS/test/tmss_test_data_django_models.py b/SAS/TMSS/test/tmss_test_data_django_models.py index 0473b8c7b18a101acfb3c973adb66f55a9e263eb..2c7609339029bfe643e2bbc9938d52d74d6b3403 100644 --- a/SAS/TMSS/test/tmss_test_data_django_models.py +++ b/SAS/TMSS/test/tmss_test_data_django_models.py @@ -34,10 +34,13 @@ from datetime import datetime import uuid import json -def GeneratorTemplate_test_data(name="my_GeneratorTemplate") -> dict: +def GeneratorTemplate_test_data(name="my_GeneratorTemplate", version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"name": name, "description": 'My one observation', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "create_function": 'Funky', "tags": ["TMSS", "TESTING"]} @@ -47,29 +50,38 @@ def DefaultGeneratorTemplate_test_data(name=None, template=None) -> dict: 'template': template, 'tags':[]} -def SchedulingUnitTemplate_test_data(name="my_SchedulingUnitTemplate") -> dict: +def SchedulingUnitTemplate_test_data(name="my_SchedulingUnitTemplate", version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"name": name, "description": 'My SchedulingUnitTemplate description', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} -def TaskTemplate_test_data(name="my TaskTemplate") -> dict: +def TaskTemplate_test_data(name="my TaskTemplate", version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"validation_code_js":"", "name": name, "description": 'My TaskTemplate description', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} -def WorkRelationSelectionTemplate_test_data(name="my_WorkRelationSelectionTemplate") -> dict: +def WorkRelationSelectionTemplate_test_data(name="my_WorkRelationSelectionTemplate", version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"name": name, "description": 'My WorkRelationSelectionTemplate description', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} -def TaskConnectors_test_data() -> dict: +def TaskConnector_test_data() -> dict: return {"role": models.Role.objects.get(value='calibrator'), "datatype": models.Datatype.objects.get(value='instrument model'), "output_of": models.TaskTemplate.objects.create(**TaskTemplate_test_data()), @@ -124,7 +136,7 @@ def SchedulingSet_test_data(name="my_scheduling_set") -> dict: return {"name": name, "description": "", "tags": [], - "generator_doc": "{}", + "generator_doc": {}, "project": models.Project.objects.create(**Project_test_data()), "generator_template": models.GeneratorTemplate.objects.create(**GeneratorTemplate_test_data()), "generator_source": None} @@ -133,7 +145,7 @@ def SchedulingUnitDraft_test_data(name="my_scheduling_unit_draft") -> dict: return {"name": name, "description": "", "tags": [], - "requirements_doc": "{}", + "requirements_doc": {}, "copy_reason": models.CopyReason.objects.get(value='template'), "generator_instance_doc": "para", "copies": None, @@ -155,19 +167,19 @@ def TaskDraft_test_data(name: str="my_task_draft", specifications_template: mode def TaskRelationDraft_test_data() -> dict: return {"tags": [], - "selection_doc": "{}", + "selection_doc": {}, "dataformat": models.Dataformat.objects.get(value='Beamformed'), "producer": models.TaskDraft.objects.create(**TaskDraft_test_data()), "consumer": models.TaskDraft.objects.create(**TaskDraft_test_data()), - "input": models.TaskConnectors.objects.create(**TaskConnectors_test_data()), - "output": models.TaskConnectors.objects.create(**TaskConnectors_test_data()), + "input": models.TaskConnector.objects.create(**TaskConnector_test_data()), + "output": models.TaskConnector.objects.create(**TaskConnector_test_data()), "selection_template": models.WorkRelationSelectionTemplate.objects.create(**WorkRelationSelectionTemplate_test_data())} def SchedulingUnitBlueprint_test_data(name='my_scheduling_unit_blueprint') -> dict: return {"name": name, "description": "", "tags": [], - "requirements_doc": "{}", + "requirements_doc": {}, "do_cancel": False, "draft": models.SchedulingUnitDraft.objects.create(**SchedulingUnitDraft_test_data()), "requirements_template": models.SchedulingUnitTemplate.objects.create(**SchedulingUnitTemplate_test_data())} @@ -179,7 +191,7 @@ def TaskBlueprint_test_data(name='my_task_blueprint', task_draft: models.TaskDra return {"name": name, "description": "", "tags": [], - "specifications_doc": task_draft.specifications_doc if isinstance(task_draft.specifications_doc, str) else json.dumps(task_draft.specifications_doc), + "specifications_doc": task_draft.specifications_doc, "do_cancel": False, "draft": task_draft, "specifications_template": task_draft.specifications_template, @@ -187,66 +199,75 @@ def TaskBlueprint_test_data(name='my_task_blueprint', task_draft: models.TaskDra def TaskRelationBlueprint_test_data() -> dict: return { "tags": [], - "selection_doc": "{}", + "selection_doc": {}, "dataformat": models.Dataformat.objects.get(value='Beamformed'), - "input": models.TaskConnectors.objects.create(**TaskConnectors_test_data()), - "output": models.TaskConnectors.objects.create(**TaskConnectors_test_data()), + "input": models.TaskConnector.objects.create(**TaskConnector_test_data()), + "output": models.TaskConnector.objects.create(**TaskConnector_test_data()), "draft": models.TaskRelationDraft.objects.create(**TaskRelationDraft_test_data()), "selection_template": models.WorkRelationSelectionTemplate.objects.create(**WorkRelationSelectionTemplate_test_data()), "producer": models.TaskBlueprint.objects.create(**TaskBlueprint_test_data()), "consumer": models.TaskBlueprint.objects.create(**TaskBlueprint_test_data())} -def SubtaskTemplate_test_data(schema: object=None) -> dict: +def SubtaskTemplate_test_data(schema: object=None, version:str=None) -> dict: if schema is None: schema = {} + if version is None: + version = str(uuid.uuid4()) + return {"type": models.SubtaskType.objects.get(value='copy'), "name": "observation", "description": 'My one observation', - "version": 'v0.314159265359', + "version": version, "schema": schema, "realtime": True, "queue": False, "tags": ["TMSS", "TESTING"]} -def DataproductSpecificationsTemplate_test_data() -> dict: +def DataproductSpecificationsTemplate_test_data(version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"name": "data", "description": 'My one date', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} -def DataproductFeedbackTemplate_test_data() -> dict: +def DataproductFeedbackTemplate_test_data(version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"name": "data", "description": 'My one date', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} -def SubtaskOutput_test_data(subtask: models.Subtask=None, connector: models.SubtaskConnector=None) -> dict: +def SubtaskOutput_test_data(subtask: models.Subtask=None) -> dict: if subtask is None: subtask = models.Subtask.objects.create(**Subtask_test_data()) - if connector is None: - connector = models.SubtaskConnector.objects.create(**SubtaskConnector_test_data(output_of=subtask.specifications_template, input_of=subtask.specifications_template)) - return {"subtask": subtask, - "connector": connector, "tags":[]} -def SubtaskInput_test_data() -> dict: - return {"subtask": models.Subtask.objects.create(**Subtask_test_data()), +def SubtaskInput_test_data(subtask: models.Subtask=None, producer: models.SubtaskOutput=None) -> dict: + if subtask is None: + subtask = models.Subtask.objects.create(**Subtask_test_data()) + + if producer is None: + producer = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data()) + + return {"subtask": subtask, "task_relation_blueprint": models.TaskRelationBlueprint.objects.create(**TaskRelationBlueprint_test_data()), - "connector": models.SubtaskConnector.objects.create(**SubtaskConnector_test_data()), - "producer": models.SubtaskOutput.objects.create(**SubtaskOutput_test_data()), - #"dataproducts": models.Dataproduct.objects.create(**dpt.get_test_data()), - "selection_doc": "{}", + "producer": producer, + "selection_doc": {}, "selection_template": models.SubtaskInputSelectionTemplate.objects.create(**SubtaskInputSelectionTemplate_test_data()), "tags":[]} def Subtask_test_data(task_blueprint: models.TaskBlueprint=None, subtask_template: models.SubtaskTemplate=None, - specifications_doc: str=None, start_time=None, stop_time=None, cluster=None) -> dict: + specifications_doc: dict=None, start_time=None, stop_time=None, cluster=None) -> dict: if task_blueprint is None: task_blueprint = models.TaskBlueprint.objects.create(**TaskBlueprint_test_data()) @@ -268,7 +289,7 @@ def Subtask_test_data(task_blueprint: models.TaskBlueprint=None, subtask_templat return { "start_time": start_time, "stop_time": stop_time, - "state": models.SubtaskState.objects.get(value='scheduling'), + "state": models.SubtaskState.objects.get(value='defining'), "specifications_doc": specifications_doc, "task_blueprint": task_blueprint, "specifications_template": subtask_template, @@ -276,8 +297,7 @@ def Subtask_test_data(task_blueprint: models.TaskBlueprint=None, subtask_templat "do_cancel": datetime.utcnow().isoformat(), "priority": 1, "schedule_method": models.ScheduleMethod.objects.get(value='manual'), - "cluster": cluster, - "scheduler_input_doc": "{}"} + "cluster": cluster} def Dataproduct_test_data(producer: models.SubtaskOutput=None, filename: str="my_file.ext", @@ -294,29 +314,16 @@ def Dataproduct_test_data(producer: models.SubtaskOutput=None, "dataformat": dataformat, "deleted_since": None, "pinned_since": None, - "specifications_doc": "{}", + "specifications_doc": {}, "specifications_template": models.DataproductSpecificationsTemplate.objects.create(**DataproductSpecificationsTemplate_test_data()), "tags": ["TMSS", "TESTING"], "producer": producer, "do_cancel": None, "expected_size": 1234, "size": 123, - "feedback_doc": "{}", + "feedback_doc": {}, "feedback_template": models.DataproductFeedbackTemplate.objects.create(**DataproductFeedbackTemplate_test_data())} -def SubtaskConnector_test_data(output_of: models.SubtaskTemplate=None, input_of: models.SubtaskTemplate=None) -> dict: - if output_of is None: - output_of = models.SubtaskTemplate.objects.create(**SubtaskTemplate_test_data()) - - if input_of is None: - input_of = models.SubtaskTemplate.objects.create(**SubtaskTemplate_test_data()) - - return {"role": models.Role.objects.get(value='calibrator'), - "datatype": models.Datatype.objects.get(value='instrument model'), - "output_of": output_of, - "input_of": input_of, - "tags": []} - def AntennaSet_test_data() -> dict: return {"name": "observation", "description": 'My one observation', @@ -355,10 +362,13 @@ def DataproductHash_test_data() -> dict: "hash": "myhash_1", "tags": ['tmss', 'testing']} -def SubtaskInputSelectionTemplate_test_data() -> dict: +def SubtaskInputSelectionTemplate_test_data(version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"name": "data", "description": 'My one date', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} diff --git a/SAS/TMSS/test/tmss_test_data_rest.py b/SAS/TMSS/test/tmss_test_data_rest.py index 656a2f5cd70cf451241ab4fb0c93af52ba4600cb..e3bdab1480e8fa79c9e697d2481ed457f17f5226 100644 --- a/SAS/TMSS/test/tmss_test_data_rest.py +++ b/SAS/TMSS/test/tmss_test_data_rest.py @@ -45,37 +45,49 @@ class TMSSRESTTestDataCreator(): ####################################################### - def GeneratorTemplate(self, name="generatortemplate"): + def GeneratorTemplate(self, name="generatortemplate", version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"name": name, "description": 'My one observation', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "create_function": 'Funky', "tags": ["TMSS", "TESTING"]} - def SchedulingUnitTemplate(self, name="schedulingunittemplate1"): + def SchedulingUnitTemplate(self, name="schedulingunittemplate1", version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return { "name": name, "description": 'My description', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} - def TaskTemplate(self, name="tasktemplate1"): + def TaskTemplate(self, name="tasktemplate1", version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"name": name, "description": 'My one observation', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"], "validation_code_js": "???"} - def WorkRelationSelectionTemplate(self, name="workrelationselectiontemplate1"): + def WorkRelationSelectionTemplate(self, name="workrelationselectiontemplate1", version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"name": name, "description": 'My one observation', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} - def TaskConnectors(self, role="correlator", input_of_url=None, output_of_url=None): + def TaskConnector(self, role="correlator", input_of_url=None, output_of_url=None): if input_of_url is None: input_of_url = self.post_data_and_get_url(self.TaskTemplate(), '/task_template/') @@ -212,10 +224,10 @@ class TMSSRESTTestDataCreator(): template_url = self.post_data_and_get_url(self.WorkRelationSelectionTemplate(), '/work_relation_selection_template/') if input_url is None: - input_url = self.post_data_and_get_url(self.TaskConnectors(), '/task_connectors/') + input_url = self.post_data_and_get_url(self.TaskConnector(), '/task_connector/') if output_url is None: - output_url = self.post_data_and_get_url(self.TaskConnectors(), '/task_connectors/') + output_url = self.post_data_and_get_url(self.TaskConnector(), '/task_connector/') return {"tags": [], "selection_doc": "{}", @@ -278,10 +290,10 @@ class TMSSRESTTestDataCreator(): template_url = self.post_data_and_get_url(self.WorkRelationSelectionTemplate(), '/work_relation_selection_template/') if input_url is None: - input_url = self.post_data_and_get_url(self.TaskConnectors(), '/task_connectors/') + input_url = self.post_data_and_get_url(self.TaskConnector(), '/task_connector/') if output_url is None: - output_url = self.post_data_and_get_url(self.TaskConnectors(), '/task_connectors/') + output_url = self.post_data_and_get_url(self.TaskConnector(), '/task_connector/') # test data return {"tags": [], @@ -294,30 +306,42 @@ class TMSSRESTTestDataCreator(): "producer": producer_url, "consumer": consumer_url} - def SubtaskTemplate(self, name="subtask1", schema=None): + def SubtaskTemplate(self, name="subtask_template_1", schema=None, subtask_type_url: str=None, version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + if schema is None: schema = {} - return {"type": self.django_api_url + '/subtask_type/copy/', + if subtask_type_url is None: + subtask_type_url = self.django_api_url + '/subtask_type/observation/' + + return {"type": subtask_type_url, "name": name, "description": 'My one observation', - "version": 'v0.314159265359', + "version": version, "schema": schema, "realtime": True, "queue": False, "tags": ["TMSS", "TESTING"]} - def DataproductSpecificationsTemplate(self, name="my_DataproductSpecificationsTemplate"): + def DataproductSpecificationsTemplate(self, name="my_DataproductSpecificationsTemplate", version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"name": name, "description": 'My one date', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} - def DataproductFeedbackTemplate(self, name="my_DataproductFeedbackTemplate"): + def DataproductFeedbackTemplate(self, name="my_DataproductFeedbackTemplate", version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"name": name, "description": 'My one date', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} @@ -335,7 +359,7 @@ class TMSSRESTTestDataCreator(): "location": "upstairs", "tags": ['tmss', 'testing']} - def Subtask(self, cluster_url=None, task_blueprint_url=None, specifications_template_url=None, specifications_doc=None): + def Subtask(self, cluster_url=None, task_blueprint_url=None, specifications_template_url=None, specifications_doc=None, state:str="defining"): if cluster_url is None: cluster_url = self.post_data_and_get_url(self.Cluster(), '/cluster/') @@ -346,11 +370,11 @@ class TMSSRESTTestDataCreator(): specifications_template_url = self.post_data_and_get_url(self.SubtaskTemplate(), '/subtask_template/') if specifications_doc is None: - specifications_doc = "{}" + specifications_doc = requests.get(specifications_template_url + 'default_specification/', auth=self.auth).content.decode('utf-8') return {"start_time": datetime.utcnow().isoformat(), "stop_time": datetime.utcnow().isoformat(), - "state": self.django_api_url + '/subtask_state/scheduling/', + "state": self.django_api_url + '/subtask_state/%s/' % (state,), "specifications_doc": specifications_doc, "task_blueprint": task_blueprint_url, "specifications_template": specifications_template_url, @@ -358,34 +382,16 @@ class TMSSRESTTestDataCreator(): "do_cancel": datetime.utcnow().isoformat(), "priority": 1, "schedule_method": self.django_api_url + '/schedule_method/manual/', - "cluster": cluster_url, - "scheduler_input_doc": "{}" } + "cluster": cluster_url} - def SubtaskOutput(self, subtask_url=None, subtask_connector_url=None): + def SubtaskOutput(self, subtask_url=None): if subtask_url is None: subtask_url = self.post_data_and_get_url(self.Subtask(), '/subtask/') - if subtask_connector_url is None: - subtask_connector_url = self.post_data_and_get_url(self.SubtaskConnector(), '/subtask_connector/') - + return {"subtask": subtask_url, - "connector": subtask_connector_url, - "tags": []} - - def SubtaskConnector(self, input_of_url=None, output_of_url=None): - if input_of_url is None: - input_of_url = self.post_data_and_get_url(self.SubtaskTemplate(), '/subtask_template/') - - if output_of_url is None: - output_of_url = self.post_data_and_get_url(self.SubtaskTemplate(), '/subtask_template/') - - return {"role": self.django_api_url + '/role/correlator/', - "datatype": self.django_api_url + '/datatype/image/', - "dataformats": [self.django_api_url + '/dataformat/Beamformed/'], - "output_of": output_of_url, - "input_of": input_of_url, "tags": []} - + def Dataproduct(self, filename="my_filename", specifications_template_url=None, subtask_output_url=None, dataproduct_feedback_template_url=None): if specifications_template_url is None: specifications_template_url = self.post_data_and_get_url(self.SubtaskTemplate(), '/dataproduct_specifications_template/') @@ -455,14 +461,17 @@ class TMSSRESTTestDataCreator(): "corrupted_since": datetime.utcnow().isoformat(), "tags": ['tmss', 'testing']} - def SubtaskInputSelectionTemplate(self, name="my_SubtaskInputSelectionTemplate"): + def SubtaskInputSelectionTemplate(self, name="my_SubtaskInputSelectionTemplate", version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"name": name, "description": 'My one date', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} - def SubtaskInput(self, subtask_url=None, task_relation_blueprint_url=None, dataproduct_urls=None, subtask_connector_url=None, subtask_output_url=None, subtask_input_selection_template_url=None): + def SubtaskInput(self, subtask_url=None, task_relation_blueprint_url=None, dataproduct_urls=None, subtask_output_url=None, subtask_input_selection_template_url=None): if subtask_url is None: subtask_url = self.post_data_and_get_url(self.Subtask(), '/subtask/') @@ -473,9 +482,7 @@ class TMSSRESTTestDataCreator(): dataproduct_urls = [self.post_data_and_get_url(self.Dataproduct(), '/dataproduct/'), self.post_data_and_get_url(self.Dataproduct(), '/dataproduct/')] - if subtask_connector_url is None: - subtask_connector_url = self.post_data_and_get_url(self.SubtaskConnector(), '/subtask_connector/') - + if subtask_output_url is None: subtask_output_url = self.post_data_and_get_url(self.SubtaskOutput(), '/subtask_output/') @@ -484,7 +491,6 @@ class TMSSRESTTestDataCreator(): return {"subtask": subtask_url, "task_relation_blueprint": task_relation_blueprint_url, - "connector": subtask_connector_url, "producer": subtask_output_url, "dataproducts": dataproduct_urls, "selection_doc": "{}", diff --git a/SubSystems/RAServices/RAServices.ini b/SubSystems/RAServices/RAServices.ini index 0f013968492ce3d0a6dadfb22705bf26edf2219f..bc649306d58920185ea4e191d774735985bb638d 100644 --- a/SubSystems/RAServices/RAServices.ini +++ b/SubSystems/RAServices/RAServices.ini @@ -16,3 +16,6 @@ programs=ltastorageoverviewscraper,ltastorageoverviewwebservice [group:Messaging] programs=messagelogger + +[group:TMSS] +programs=tmss_subtask_scheduling_service diff --git a/doc/mainpage.md b/doc/mainpage.md index 620ae713c7f5ce68bf000835747d18e55650c7f7..9ddfc98d2e505451cf0157c1240817061bb2e97e 100644 --- a/doc/mainpage.md +++ b/doc/mainpage.md @@ -5,8 +5,8 @@ ### Introduction Welcome to the LOFAR Software Documentation, the documentation generated from the -[LOFAR SVN tree](https://svn.astron.nl/LOFAR/) ([Browser View](https://svn.astron.nl/viewvc/LOFAR/))) using -[Doxygen](http://www.stack.nl/~dimitri/doxygen/). Its target audience are *Developers*, *Software Support Personnel* +[LOFAR Git tree](https://git.astron.nl/ro/lofar.git) ([Browser View](https://git.astron.nl/ro/lofar))) using +[Doxygen](http://www.doxygen.nl/). Its target audience are *Developers*, *Software Support Personnel* and *System Administrators*. For *End-User* Documentation and information for *Operators* please refer to the [LOFAR-wiki](https://www.astron.nl/lofarwiki). @@ -105,5 +105,5 @@ Todo's and bugs identified in the source code are listed on their individual pag ### Copyright & Licenses The copyright of this documentation and all LOFAR source code is owned by [ASTRON](http://www.astron.nl/) unless granted -otherwise by the applicable license(s) - refer to the [COPYING](https://svn.astron.nl/LOFAR/trunk/COPYING) file for more +otherwise by the applicable license(s) - refer to the [COPYING](https://git.astron.nl/ro/lofar/-/raw/master/COPYING) file for more information.