diff --git a/.gitignore b/.gitignore index 00941bb5af067040269d23ab91d781bfb6cd5bc7..cfd4dc461a50e0a01b60ca0f88152e9ca9a2d787 100644 --- a/.gitignore +++ b/.gitignore @@ -27,3 +27,4 @@ tangostationcontrol/docs/build **/pending_log_messages.db **/.eggs +docker-compose/alerta-web/alerta-secrets.json diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index eef96688ba969a95709f27c6b0d96e226662d1e3..5c6e486d84dde4fcd4c4c02db86bc0cc72152c39 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -103,6 +103,7 @@ docker_build_image_all: - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-boot latest - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-docker latest - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-observation_control latest + - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-pdu latest - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-recv latest - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-sdp latest - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-sst latest @@ -253,6 +254,17 @@ docker_build_image_device_apspu: script: # Do not remove 'bash' or statement will be ignored by primitive docker shell - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-apspu $tag +docker_build_image_device_pdu: + extends: .base_docker_images_except + only: + refs: + - merge_requests + changes: + - docker-compose/device-pdu.yml + - docker-compose/lofar-device-base/* + script: +# Do not remove 'bash' or statement will be ignored by primitive docker shell + - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-pdu $tag docker_build_image_device_tilebeam: extends: .base_docker_images_except only: @@ -447,6 +459,10 @@ integration_test_docker: - name: docker:dind variables: DOCKER_TLS_CERTDIR: "/certs" + artifacts: + when: always + paths: + - log/ before_script: - | if [[ "$CI_COMMIT_BRANCH" == "$CI_DEFAULT_BRANCH" && -z "$CI_COMMIT_TAG" ]]; then @@ -458,6 +474,7 @@ integration_test_docker: fi - apk add --update make bash docker-compose - apk add --update bind-tools + - apk add --update postgresql14-client gzip - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY script: - touch /root/.Xauthority @@ -465,20 +482,24 @@ integration_test_docker: - export BASH_SOURCE=$(pwd)/bootstrap/etc/lofar20rc.sh # Hack HOSTNAME env variable into host.docker.internal, set in docker-compose - export HOSTNAME=host.docker.internal -# - export HOSTNAME=$(hostname -i) -# - export HOSTNAME=$(cat /run/systemd/netif/leases/2 | grep ^ADDRESS= | awk -F'=' '{print $2}') # source the lofarrc file and mask its non zero exit code - . bootstrap/etc/lofar20rc.sh || true # TANGO_HOST must be unset our databaseds will be unreachable - unset TANGO_HOST -## Allow docker image script to execute -# - chmod u+x $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh # Do not remove 'bash' or statement will be ignored by primitive docker shell - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh pull $tag -## Allow integration test to execute -# - chmod u+x $CI_PROJECT_DIR/sbin/run_integration_test.sh # Do not remove 'bash' or statement will be ignored by primitive docker shell - - bash $CI_PROJECT_DIR/sbin/run_integration_test.sh + - bash -e $CI_PROJECT_DIR/sbin/run_integration_test.sh + after_script: +# Collect output of all containers + - | + mkdir -p log + for container in $(docker ps -a --format "{{.Names}}") + do + echo "Saving log for container $container" + docker logs "${container}" >& "log/${container}.log" + done + PGPASSWORD=password pg_dump --host=docker --username=postgres hdb 2>log/archiver-timescale-dump.log | gzip > log/archiver-timescale-dump.txt.gz wheel_packaging: stage: packaging artifacts: diff --git a/CDB/LOFAR_ConfigDb.json b/CDB/LOFAR_ConfigDb.json index 75da3c1142f40bb4034d7100812bd5dbe3ecf983..2032c4de777aa785dc8bfcff70c73da0bfc197b3 100644 --- a/CDB/LOFAR_ConfigDb.json +++ b/CDB/LOFAR_ConfigDb.json @@ -14,6 +14,13 @@ } } }, + "PDU": { + "STAT": { + "PDU": { + "STAT/PDU/1": {} + } + } + }, "TileBeam": { "STAT": { "TileBeam": { diff --git a/CDB/integration_ConfigDb.json b/CDB/integration_ConfigDb.json deleted file mode 100644 index 7cfbd82751791d7968315923edfc2ba971480308..0000000000000000000000000000000000000000 --- a/CDB/integration_ConfigDb.json +++ /dev/null @@ -1,305 +0,0 @@ -{ - "servers": { - "APSCT": { - "STAT": { - "APSCT": { - "STAT/APSCT/1": { - "properties": { - "OPC_Server_Name": [ - "apsct-sim" - ], - "OPC_Server_Port": [ - "4843" - ], - "OPC_Time_Out": [ - "5.0" - ] - } - } - } - } - }, - "APSPU": { - "STAT": { - "APSPU": { - "STAT/APSPU/1": { - "properties": { - "OPC_Server_Name": [ - "apspu-sim" - ], - "OPC_Server_Port": [ - "4843" - ], - "OPC_Time_Out": [ - "5.0" - ] - } - } - } - } - }, - "RECV": { - "STAT": { - "RECV": { - "STAT/RECV/1": { - "properties": { - "OPC_Server_Name": [ - "recv-sim" - ], - "OPC_Server_Port": [ - "4840" - ], - "OPC_Time_Out": [ - "5.0" - ] - } - } - } - } - }, - "SDP": { - "STAT": { - "SDP": { - "STAT/SDP/1": { - "properties": { - "OPC_Server_Name": [ - "sdptr-sim" - ], - "OPC_Server_Port": [ - "4840" - ], - "OPC_Time_Out": [ - "5.0" - ], - "FPGA_sdp_info_station_id_RW_default": [ - "901", - "901", - "901", - "901", - "901", - "901", - "901", - "901", - "901", - "901", - "901", - "901", - "901", - "901", - "901", - "901" - ], - "polled_attr": [ - "fpga_temp_r", - "1000", - "state", - "1000", - "status", - "1000", - "fpga_mask_rw", - "1000", - "fpga_scrap_r", - "1000", - "fpga_scrap_rw", - "1000", - "fpga_status_r", - "1000", - "fpga_version_r", - "1000", - "fpga_weights_r", - "1000", - "fpga_weights_rw", - "1000", - "tr_busy_r", - "1000", - "tr_reload_rw", - "1000", - "tr_tod_r", - "1000", - "tr_uptime_r", - "1000" - ] - } - } - } - } - }, - "SST": { - "STAT": { - "SST": { - "STAT/SST/1": { - "properties": { - "Statistics_Client_UDP_Port": [ - "5001" - ], - "Statistics_Client_TCP_Port": [ - "5101" - ], - "OPC_Server_Name": [ - "sdptr-sim" - ], - "OPC_Server_Port": [ - "4840" - ], - "OPC_Time_Out": [ - "5.0" - ], - "FPGA_sst_offload_hdr_eth_destination_mac_RW_default": [ - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd" - ], - "FPGA_sst_offload_hdr_ip_destination_address_RW_default": [ - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250" - ], - "FPGA_sst_offload_hdr_udp_destination_port_RW_default": [ - "5001", - "5001", - "5001", - "5001", - "5001", - "5001", - "5001", - "5001", - "5001", - "5001", - "5001", - "5001", - "5001", - "5001", - "5001", - "5001" - ] - } - } - } - } - }, - "XST": { - "STAT": { - "XST": { - "STAT/XST/1": { - "properties": { - "Statistics_Client_UDP_Port": [ - "5002" - ], - "Statistics_Client_TCP_Port": [ - "5102" - ], - "OPC_Server_Name": [ - "sdptr-sim" - ], - "OPC_Server_Port": [ - "4840" - ], - "OPC_Time_Out": [ - "5.0" - ], - "FPGA_xst_offload_hdr_eth_destination_mac_RW_default": [ - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd", - "6c:2b:59:97:be:dd" - ], - "FPGA_xst_offload_hdr_ip_destination_address_RW_default": [ - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250", - "10.99.250.250" - ], - "FPGA_xst_offload_hdr_udp_destination_port_RW_default": [ - "5002", - "5002", - "5002", - "5002", - "5002", - "5002", - "5002", - "5002", - "5002", - "5002", - "5002", - "5002", - "5002", - "5002", - "5002", - "5002" - ] - } - } - } - } - }, - "UNB2": { - "STAT": { - "UNB2": { - "STAT/UNB2/1": { - "properties": { - "OPC_Server_Name": [ - "unb2-sim" - ], - "OPC_Server_Port": [ - "4841" - ], - "OPC_Time_Out": [ - "5.0" - ] - } - } - } - } - } - } -} diff --git a/CDB/integrations/recvcluster_ConfigDb.json b/CDB/integrations/recvcluster_ConfigDb.json new file mode 100644 index 0000000000000000000000000000000000000000..1b27092a198f26f2916e27a3043c8d5528e064a1 --- /dev/null +++ b/CDB/integrations/recvcluster_ConfigDb.json @@ -0,0 +1,2278 @@ +{ + "servers": { + "TileBeam": { + "STAT": { + "TileBeam": { + "STAT/TileBeam/2": {}, + "STAT/TileBeam/3": {}, + "STAT/TileBeam/4": {} + } + } + }, + "RECV": { + "STAT": { + "RECV": { + "STAT/RECV/2": { + "attribute_properties": { + "Ant_mask_RW": { + "archive_period": [ + "600000" + ] + }, + "CLK_Enable_PWR_R": { + "archive_period": [ + "600000" + ] + }, + "CLK_I2C_STATUS_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "CLK_PLL_error_R": { + "archive_period": [ + "600000" + ] + }, + "CLK_PLL_locked_R": { + "archive_period": [ + "600000" + ] + }, + "CLK_monitor_rate_RW": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "CLK_translator_busy_R": { + "archive_period": [ + "600000" + ] + }, + "HBA_element_LNA_pwr_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "HBA_element_LNA_pwr_RW": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "HBA_element_beamformer_delays_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "HBA_element_beamformer_delays_RW": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "HBA_element_led_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "HBA_element_led_RW": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "HBA_element_pwr_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "HBA_element_pwr_RW": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "RCU_ADC_lock_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "RCU_I2C_STATUS_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "RCU_ID_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "RCU_LED0_R": { + "archive_period": [ + "600000" + ] + }, + "RCU_LED0_RW": { + "archive_period": [ + "600000" + ] + }, + "RCU_LED1_R": { + "archive_period": [ + "600000" + ] + }, + "RCU_LED1_RW": { + "archive_period": [ + "600000" + ] + }, + "RCU_Pwr_dig_R": { + "archive_period": [ + "600000" + ] + }, + "RCU_attenuator_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "RCU_attenuator_RW": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "RCU_band_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "RCU_band_RW": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "RCU_mask_RW": { + "archive_period": [ + "600000" + ] + }, + "RCU_monitor_rate_RW": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1.0", + "1.0" + ], + "rel_change": [ + "-1.0", + "1.0" + ] + }, + "RCU_temperature_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1.0", + "1.0" + ], + "rel_change": [ + "-1.0", + "1.0" + ] + }, + "RCU_translator_busy_R": { + "archive_period": [ + "600000" + ] + }, + "RCU_version_R": { + "archive_period": [ + "600000" + ] + }, + "State": { + "archive_period": [ + "600000" + ], + "event_period": [ + "0" + ] + }, + "Status": { + "archive_period": [ + "600000" + ], + "event_period": [ + "0" + ] + } + }, + "properties": { + "HBAT_reference_itrf": [ + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786" + ], + "HBAT_antenna_itrf_offsets": [ + "-1.847", + "-1.180", + " 1.493", + "-1.581", + " 0.003", + " 1.186", + "-1.315", + " 1.185", + " 0.880", + "-1.049", + " 2.367", + " 0.573", + "-0.882", + "-1.575", + " 0.804", + "-0.616", + "-0.393", + " 0.498", + "-0.350", + " 0.789", + " 0.191", + "-0.083", + " 1.971", + "-0.116", + " 0.083", + "-1.971", + " 0.116", + " 0.350", + "-0.789", + "-0.191", + " 0.616", + " 0.393", + "-0.498", + " 0.882", + " 1.575", + "-0.804", + " 1.049", + "-2.367", + "-0.573", + " 1.315", + "-1.185", + "-0.880", + " 1.581", + "-0.003", + "-1.186", + " 1.847", + " 1.180", + "-1.493" + ], + "OPC_Server_Name": [ + "recv-sim" + ], + "OPC_Server_Port": [ + "4840" + ], + "OPC_Time_Out": [ + "5.0" + ], + "polled_attr": [ + "state", + "1000", + "status", + "1000", + "ant_mask_rw", + "1000", + "rcu_adc_lock_r", + "1000", + "rcu_attenuator_r", + "1000", + "rcu_attenuator_rw", + "1000", + "rcu_band_r", + "1000", + "rcu_band_rw", + "1000", + "rcu_id_r", + "1000", + "rcu_led0_r", + "1000", + "rcu_led0_rw", + "1000", + "rcu_mask_rw", + "1000", + "rcu_monitor_rate_rw", + "1000", + "rcu_pwr_dig_r", + "1000", + "rcu_temperature_r", + "1000", + "rcu_version_r", + "1000", + "hba_element_beamformer_delays_r", + "1000", + "hba_element_beamformer_delays_rw", + "1000", + "hba_element_led_r", + "1000", + "hba_element_led_rw", + "1000", + "hba_element_pwr_r", + "1000", + "hba_element_pwr_rw", + "1000", + "clk_enable_pwr_r", + "1000", + "clk_i2c_status_r", + "1000", + "clk_monitor_rate_rw", + "1000", + "clk_pll_error_r", + "1000", + "clk_pll_locked_r", + "1000", + "clk_translator_busy_r", + "1000", + "hba_element_lna_pwr_r", + "1000", + "hba_element_lna_pwr_rw", + "1000", + "rcu_i2c_status_r", + "1000", + "rcu_led1_r", + "1000", + "rcu_led1_rw", + "1000", + "rcu_translator_busy_r", + "1000" + ] + } + }, + "STAT/RECV/3": { + "attribute_properties": { + "Ant_mask_RW": { + "archive_period": [ + "600000" + ] + }, + "CLK_Enable_PWR_R": { + "archive_period": [ + "600000" + ] + }, + "CLK_I2C_STATUS_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "CLK_PLL_error_R": { + "archive_period": [ + "600000" + ] + }, + "CLK_PLL_locked_R": { + "archive_period": [ + "600000" + ] + }, + "CLK_monitor_rate_RW": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "CLK_translator_busy_R": { + "archive_period": [ + "600000" + ] + }, + "HBA_element_LNA_pwr_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "HBA_element_LNA_pwr_RW": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "HBA_element_beamformer_delays_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "HBA_element_beamformer_delays_RW": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "HBA_element_led_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "HBA_element_led_RW": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "HBA_element_pwr_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "HBA_element_pwr_RW": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "RCU_ADC_lock_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "RCU_I2C_STATUS_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "RCU_ID_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "RCU_LED0_R": { + "archive_period": [ + "600000" + ] + }, + "RCU_LED0_RW": { + "archive_period": [ + "600000" + ] + }, + "RCU_LED1_R": { + "archive_period": [ + "600000" + ] + }, + "RCU_LED1_RW": { + "archive_period": [ + "600000" + ] + }, + "RCU_Pwr_dig_R": { + "archive_period": [ + "600000" + ] + }, + "RCU_attenuator_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "RCU_attenuator_RW": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "RCU_band_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "RCU_band_RW": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "RCU_mask_RW": { + "archive_period": [ + "600000" + ] + }, + "RCU_monitor_rate_RW": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1.0", + "1.0" + ], + "rel_change": [ + "-1.0", + "1.0" + ] + }, + "RCU_temperature_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1.0", + "1.0" + ], + "rel_change": [ + "-1.0", + "1.0" + ] + }, + "RCU_translator_busy_R": { + "archive_period": [ + "600000" + ] + }, + "RCU_version_R": { + "archive_period": [ + "600000" + ] + }, + "State": { + "archive_period": [ + "600000" + ], + "event_period": [ + "0" + ] + }, + "Status": { + "archive_period": [ + "600000" + ], + "event_period": [ + "0" + ] + } + }, + "properties": { + "HBAT_reference_itrf": [ + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786" + ], + "HBAT_antenna_itrf_offsets": [ + "-1.847", + "-1.180", + " 1.493", + "-1.581", + " 0.003", + " 1.186", + "-1.315", + " 1.185", + " 0.880", + "-1.049", + " 2.367", + " 0.573", + "-0.882", + "-1.575", + " 0.804", + "-0.616", + "-0.393", + " 0.498", + "-0.350", + " 0.789", + " 0.191", + "-0.083", + " 1.971", + "-0.116", + " 0.083", + "-1.971", + " 0.116", + " 0.350", + "-0.789", + "-0.191", + " 0.616", + " 0.393", + "-0.498", + " 0.882", + " 1.575", + "-0.804", + " 1.049", + "-2.367", + "-0.573", + " 1.315", + "-1.185", + "-0.880", + " 1.581", + "-0.003", + "-1.186", + " 1.847", + " 1.180", + "-1.493" + ], + "OPC_Server_Name": [ + "recv-sim" + ], + "OPC_Server_Port": [ + "4840" + ], + "OPC_Time_Out": [ + "5.0" + ], + "polled_attr": [ + "state", + "1000", + "status", + "1000", + "ant_mask_rw", + "1000", + "rcu_adc_lock_r", + "1000", + "rcu_attenuator_r", + "1000", + "rcu_attenuator_rw", + "1000", + "rcu_band_r", + "1000", + "rcu_band_rw", + "1000", + "rcu_id_r", + "1000", + "rcu_led0_r", + "1000", + "rcu_led0_rw", + "1000", + "rcu_mask_rw", + "1000", + "rcu_monitor_rate_rw", + "1000", + "rcu_pwr_dig_r", + "1000", + "rcu_temperature_r", + "1000", + "rcu_version_r", + "1000", + "hba_element_beamformer_delays_r", + "1000", + "hba_element_beamformer_delays_rw", + "1000", + "hba_element_led_r", + "1000", + "hba_element_led_rw", + "1000", + "hba_element_pwr_r", + "1000", + "hba_element_pwr_rw", + "1000", + "clk_enable_pwr_r", + "1000", + "clk_i2c_status_r", + "1000", + "clk_monitor_rate_rw", + "1000", + "clk_pll_error_r", + "1000", + "clk_pll_locked_r", + "1000", + "clk_translator_busy_r", + "1000", + "hba_element_lna_pwr_r", + "1000", + "hba_element_lna_pwr_rw", + "1000", + "rcu_i2c_status_r", + "1000", + "rcu_led1_r", + "1000", + "rcu_led1_rw", + "1000", + "rcu_translator_busy_r", + "1000" + ] + } + }, + "STAT/RECV/4": { + "attribute_properties": { + "Ant_mask_RW": { + "archive_period": [ + "600000" + ] + }, + "CLK_Enable_PWR_R": { + "archive_period": [ + "600000" + ] + }, + "CLK_I2C_STATUS_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "CLK_PLL_error_R": { + "archive_period": [ + "600000" + ] + }, + "CLK_PLL_locked_R": { + "archive_period": [ + "600000" + ] + }, + "CLK_monitor_rate_RW": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "CLK_translator_busy_R": { + "archive_period": [ + "600000" + ] + }, + "HBA_element_LNA_pwr_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "HBA_element_LNA_pwr_RW": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "HBA_element_beamformer_delays_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "HBA_element_beamformer_delays_RW": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "HBA_element_led_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "HBA_element_led_RW": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "HBA_element_pwr_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "HBA_element_pwr_RW": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "RCU_ADC_lock_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "RCU_I2C_STATUS_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "RCU_ID_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "RCU_LED0_R": { + "archive_period": [ + "600000" + ] + }, + "RCU_LED0_RW": { + "archive_period": [ + "600000" + ] + }, + "RCU_LED1_R": { + "archive_period": [ + "600000" + ] + }, + "RCU_LED1_RW": { + "archive_period": [ + "600000" + ] + }, + "RCU_Pwr_dig_R": { + "archive_period": [ + "600000" + ] + }, + "RCU_attenuator_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "RCU_attenuator_RW": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "RCU_band_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "RCU_band_RW": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1", + "1" + ], + "rel_change": [ + "-1", + "1" + ] + }, + "RCU_mask_RW": { + "archive_period": [ + "600000" + ] + }, + "RCU_monitor_rate_RW": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1.0", + "1.0" + ], + "rel_change": [ + "-1.0", + "1.0" + ] + }, + "RCU_temperature_R": { + "archive_period": [ + "600000" + ], + "archive_rel_change": [ + "-1.0", + "1.0" + ], + "rel_change": [ + "-1.0", + "1.0" + ] + }, + "RCU_translator_busy_R": { + "archive_period": [ + "600000" + ] + }, + "RCU_version_R": { + "archive_period": [ + "600000" + ] + }, + "State": { + "archive_period": [ + "600000" + ], + "event_period": [ + "0" + ] + }, + "Status": { + "archive_period": [ + "600000" + ], + "event_period": [ + "0" + ] + } + }, + "properties": { + "HBAT_reference_itrf": [ + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786", + "3826577.066", + "461022.948", + "5064892.786" + ], + "HBAT_antenna_itrf_offsets": [ + "-1.847", + "-1.180", + " 1.493", + "-1.581", + " 0.003", + " 1.186", + "-1.315", + " 1.185", + " 0.880", + "-1.049", + " 2.367", + " 0.573", + "-0.882", + "-1.575", + " 0.804", + "-0.616", + "-0.393", + " 0.498", + "-0.350", + " 0.789", + " 0.191", + "-0.083", + " 1.971", + "-0.116", + " 0.083", + "-1.971", + " 0.116", + " 0.350", + "-0.789", + "-0.191", + " 0.616", + " 0.393", + "-0.498", + " 0.882", + " 1.575", + "-0.804", + " 1.049", + "-2.367", + "-0.573", + " 1.315", + "-1.185", + "-0.880", + " 1.581", + "-0.003", + "-1.186", + " 1.847", + " 1.180", + "-1.493" + ], + "OPC_Server_Name": [ + "recv-sim" + ], + "OPC_Server_Port": [ + "4840" + ], + "OPC_Time_Out": [ + "5.0" + ], + "polled_attr": [ + "state", + "1000", + "status", + "1000", + "ant_mask_rw", + "1000", + "rcu_adc_lock_r", + "1000", + "rcu_attenuator_r", + "1000", + "rcu_attenuator_rw", + "1000", + "rcu_band_r", + "1000", + "rcu_band_rw", + "1000", + "rcu_id_r", + "1000", + "rcu_led0_r", + "1000", + "rcu_led0_rw", + "1000", + "rcu_mask_rw", + "1000", + "rcu_monitor_rate_rw", + "1000", + "rcu_pwr_dig_r", + "1000", + "rcu_temperature_r", + "1000", + "rcu_version_r", + "1000", + "hba_element_beamformer_delays_r", + "1000", + "hba_element_beamformer_delays_rw", + "1000", + "hba_element_led_r", + "1000", + "hba_element_led_rw", + "1000", + "hba_element_pwr_r", + "1000", + "hba_element_pwr_rw", + "1000", + "clk_enable_pwr_r", + "1000", + "clk_i2c_status_r", + "1000", + "clk_monitor_rate_rw", + "1000", + "clk_pll_error_r", + "1000", + "clk_pll_locked_r", + "1000", + "clk_translator_busy_r", + "1000", + "hba_element_lna_pwr_r", + "1000", + "hba_element_lna_pwr_rw", + "1000", + "rcu_i2c_status_r", + "1000", + "rcu_led1_r", + "1000", + "rcu_led1_rw", + "1000", + "rcu_translator_busy_r", + "1000" + ] + } + } + } + } + } + } +} \ No newline at end of file diff --git a/CDB/stations/DTS_ConfigDb.json b/CDB/stations/DTS_ConfigDb.json index 7017e28e75f1883d61c89038dbb6f95892995b90..398ef7d63577ce62f61c2374b9335a905ebce566 100644 --- a/CDB/stations/DTS_ConfigDb.json +++ b/CDB/stations/DTS_ConfigDb.json @@ -357,6 +357,24 @@ "902", "902", "902" + ], + "TR_fpga_mask_RW_default": [ + "True", + "True", + "True", + "True", + "False", + "False", + "False", + "False", + "False", + "False", + "False", + "False", + "False", + "False", + "False", + "False" ] } } diff --git a/CDB/stations/dummy_positions_ConfigDb.json b/CDB/stations/dummy_positions_ConfigDb.json index 926145a0c6a2f652c637ae4ab409b57b07c655d7..460f49d2e5c9af9d968c11be32f11819a4ab943e 100644 --- a/CDB/stations/dummy_positions_ConfigDb.json +++ b/CDB/stations/dummy_positions_ConfigDb.json @@ -5,6 +5,9 @@ "RECV": { "STAT/RECV/1": { "properties": { + "Antenna_Field_Reference_ETRS": [ + "3826896.631", "460979.131", "5064657.943" + ], "HBAT_reference_ETRS": [ "3826886.142", "460980.772", "5064665.668", "3826887.237", "460985.643", "5064664.406", diff --git a/bin/start-ds.sh b/bin/start-ds.sh index 9485820209750ad155752a35f09186658a78e3c2..b9b958ecd85a9eed927c99862d3e675fe166c6f9 100755 --- a/bin/start-ds.sh +++ b/bin/start-ds.sh @@ -35,7 +35,7 @@ else mkdir -p /tmp/tangostationcontrol python3 setup.py build --build-base /tmp/tangostationcontrol egg_info --egg-base /tmp/tangostationcontrol bdist_wheel --dist-dir /tmp/tangostationcontrol || exit 1 # shellcheck disable=SC2012 - sudo pip install "$(ls -Art /tmp/tangostationcontrol/*.whl | tail -n 1)" + pip install "$(ls -Art /tmp/tangostationcontrol/*.whl | tail -n 1)" fi # Return to the stored the directory, this preserves the working_dir argument in diff --git a/bin/wait-for-it.sh b/bin/wait-for-it.sh new file mode 100755 index 0000000000000000000000000000000000000000..0bd0c104de461817575aafc4962fcc5ee786945e --- /dev/null +++ b/bin/wait-for-it.sh @@ -0,0 +1,165 @@ +#!/usr/bin/env bash +# shellcheck disable=SC2086,SC2064,SC2206,SC2124 +# Use this script to test if a given TCP host/port are available +# +# Source: https://github.com/vishnubob/wait-for-it +# License: MIT + +cmdname=$(basename $0) + +echoerr() { if [[ $QUIET -ne 1 ]]; then echo "$@" 1>&2; fi } + +usage() +{ + cat << USAGE >&2 +Usage: + $cmdname host:port [-s] [-t timeout] [-- command args] + -h HOST | --host=HOST Host or IP under test + -p PORT | --port=PORT TCP port under test + Alternatively, you specify the host and port as host:port + -s | --strict Only execute subcommand if the test succeeds + -q | --quiet Don't output any status messages + -t TIMEOUT | --timeout=TIMEOUT + Timeout in seconds, zero for no timeout + -- COMMAND ARGS Execute command with args after the test finishes +USAGE + exit 1 +} + +wait_for() +{ + if [[ $TIMEOUT -gt 0 ]]; then + echoerr "$cmdname: waiting $TIMEOUT seconds for $HOST:$PORT" + else + echoerr "$cmdname: waiting for $HOST:$PORT without a timeout" + fi + start_ts=$(date +%s) + while : + do + (echo > /dev/tcp/$HOST/$PORT) >/dev/null 2>&1 + result=$? + if [[ $result -eq 0 ]]; then + end_ts=$(date +%s) + echoerr "$cmdname: $HOST:$PORT is available after $((end_ts - start_ts)) seconds" + break + fi + sleep 1 + done + return $result +} + +wait_for_wrapper() +{ + # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692 + if [[ $QUIET -eq 1 ]]; then + timeout $TIMEOUT $0 --quiet --child --host=$HOST --port=$PORT --timeout=$TIMEOUT & + else + timeout $TIMEOUT $0 --child --host=$HOST --port=$PORT --timeout=$TIMEOUT & + fi + PID=$! + trap "kill -INT -$PID" INT + wait $PID + RESULT=$? + if [[ $RESULT -ne 0 ]]; then + echoerr "$cmdname: timeout occurred after waiting $TIMEOUT seconds for $HOST:$PORT" + fi + return $RESULT +} + +# process arguments +while [[ $# -gt 0 ]] +do + case "$1" in + *:* ) + hostport=(${1//:/ }) + HOST=${hostport[0]} + PORT=${hostport[1]} + shift 1 + ;; + --child) + CHILD=1 + shift 1 + ;; + -q | --quiet) + QUIET=1 + shift 1 + ;; + -s | --strict) + STRICT=1 + shift 1 + ;; + -h) + HOST="$2" + if [[ $HOST == "" ]]; then break; fi + shift 2 + ;; + --host=*) + HOST="${1#*=}" + shift 1 + ;; + -p) + PORT="$2" + if [[ $PORT == "" ]]; then break; fi + shift 2 + ;; + --port=*) + PORT="${1#*=}" + shift 1 + ;; + -t) + TIMEOUT="$2" + if [[ $TIMEOUT == "" ]]; then break; fi + shift 2 + ;; + --timeout=*) + TIMEOUT="${1#*=}" + shift 1 + ;; + --) + shift + CLI="$@" + break + ;; + --help) + usage + ;; + *) + echoerr "Unknown argument: $1" + usage + ;; + esac +done + +if [[ "$HOST" == "" || "$PORT" == "" ]]; then + echoerr "Error: you need to provide a host and port to test." + usage +fi + +TIMEOUT=${TIMEOUT:-15} +STRICT=${STRICT:-0} +CHILD=${CHILD:-0} +QUIET=${QUIET:-0} + +if [[ $CHILD -gt 0 ]]; then + wait_for + RESULT=$? + exit $RESULT +else + if [[ $TIMEOUT -gt 0 ]]; then + wait_for_wrapper + RESULT=$? + else + wait_for + RESULT=$? + fi +fi + +if [[ $CLI != "" ]]; then + if [[ $RESULT -ne 0 && $STRICT -eq 1 ]]; then + echoerr "$cmdname: strict mode, refusing to execute subprocess" + exit $RESULT + fi + exec $CLI +else + exit $RESULT +fi diff --git a/docker-compose/Makefile b/docker-compose/Makefile index 5aa715b84f64f4038ce946aa2cd9c33f9c53ba23..d329b9bbb5bd6cbaff6cd0d06f9bf1f93ed47ae5 100644 --- a/docker-compose/Makefile +++ b/docker-compose/Makefile @@ -114,7 +114,9 @@ ifeq ($(NETWORK_MODE),host) MYSQL_HOST := $(shell hostname):3306 else ifeq ($(TANGO_HOST),) - TANGO_HOST := $(CONTAINER_NAME_PREFIX)databaseds:10000 + # Use FQDN for TANGO_HOST to avoid confusion in the archiver, which also + # adds the domain. + TANGO_HOST := $(CONTAINER_NAME_PREFIX)databaseds.$(NETWORK_MODE):10000 else TANGO_HOST := $(TANGO_HOST) endif @@ -181,6 +183,7 @@ bootstrap: pull build # first start, initialise from scratch $(MAKE) start dsconfig # boot up containers to load configurations sleep 5 # wait for dsconfig container to come up ../sbin/update_ConfigDb.sh ../CDB/LOFAR_ConfigDb.json # load default configuration + ../sbin/update_ConfigDb.sh ../CDB/tango-archiver-data/archiver-devices.json # load default archive configuration ../sbin/update_ConfigDb.sh ../CDB/stations/simulators_ConfigDb.json # by default, use simulators start: up ## start a service (usage: make start <servicename>) diff --git a/docker-compose/alerta-web/Dockerfile b/docker-compose/alerta-web/Dockerfile index 8a1845a7a3b9aed0a20cf30505be1cd3218bf729..80431da39da9ddb7ff0c28997660163234eb6d57 100644 --- a/docker-compose/alerta-web/Dockerfile +++ b/docker-compose/alerta-web/Dockerfile @@ -1,3 +1,14 @@ FROM alerta/alerta-web -RUN pip install git+https://github.com/alerta/alerta-contrib.git#subdirectory=plugins/slack +RUN bash -c 'source /venv/bin/activate; pip install git+https://github.com/alerta/alerta-contrib.git#subdirectory=plugins/slack' +RUN bash -c 'source /venv/bin/activate; pip install git+https://github.com/alerta/alerta-contrib.git#subdirectory=plugins/jira' + +COPY grafana-plugin /tmp/grafana-plugin +RUN bash -c 'source /venv/bin/activate; pip install /tmp/grafana-plugin' + +COPY lofar-plugin /tmp/lofar-plugin +RUN bash -c 'source /venv/bin/activate; pip install /tmp/lofar-plugin' + +COPY alertad.conf /app/alertad.conf +COPY alerta.conf /app/alerta.conf +COPY config.json /web/config.json diff --git a/docker-compose/alerta-web/README.md b/docker-compose/alerta-web/README.md new file mode 100644 index 0000000000000000000000000000000000000000..8900026226cb6e3ee9c987792f24b44d8beff374 --- /dev/null +++ b/docker-compose/alerta-web/README.md @@ -0,0 +1,23 @@ +You need: + +* Your own Slack App: + * Give it channel write rights + * Get the OAuth token + * Install it in your slack + * Invite the app into your channel + * Feed the OAuth token to the config + * Add it to alerta-secrets.json +* Grafana: + * By default, grafana resends alarms every 4h, configure this in the notification settings to faster resend deleted alarms for testing + * Add alerts by hand + * add "Summary" as alert text + * add label "severity": "major"/"minor"/etc (see https://docs.alerta.io/webui/configuration.html#severity-colors) + +* Create alerta-secrets.json in this directory: + +Example alerta-secrets.json: + +{ + "SLACK_TOKEN": "xoxb-...", + "SLACK_CHANNEL": "#lofar20-alerta" +} diff --git a/docker-compose/alerta-web/alerta-secrets.json b/docker-compose/alerta-web/alerta-secrets.json new file mode 100644 index 0000000000000000000000000000000000000000..8fb44d7b830c3090408fb3bd576fa297e0e2dcc9 --- /dev/null +++ b/docker-compose/alerta-web/alerta-secrets.json @@ -0,0 +1,4 @@ +{ + "SLACK_TOKEN": "xoxb-get-this-from-your-slack-app", + "SLACK_CHANNEL": "#your-channel" +} diff --git a/docker-compose/alerta-web/alerta.conf b/docker-compose/alerta-web/alerta.conf new file mode 100644 index 0000000000000000000000000000000000000000..64c8ec7019847aff146f166699aef5fc933c7560 --- /dev/null +++ b/docker-compose/alerta-web/alerta.conf @@ -0,0 +1,7 @@ +[DEFAULT] +sslverify = no +output = presto +endpoint = http://localhost:8080/api +timezone = Europe/London +key = NpzX0z_fX8TVKZtXpzop-pi2MhaGnLawKVqbJBoA +debug = yes diff --git a/docker-compose/alerta-web/alertad.conf b/docker-compose/alerta-web/alertad.conf new file mode 100644 index 0000000000000000000000000000000000000000..dc7b6c2e295ae4230a9373ed26f148d6aad59cd0 --- /dev/null +++ b/docker-compose/alerta-web/alertad.conf @@ -0,0 +1,59 @@ +DEBUG = True +SECRET = "T=&7xvF2S&x7w_JAcq$h1x5ocfA)8H2i" + +# Allow non-admin views +CUSTOMER_VIEWS = True + +# Never timeout alerts +ALERT_TIMEOUT = 0 +# Auto unack after a day +ACK_TIMEOUT = 24 * 3600 +# Auto unshelve after 2 hours +SHELVE_TIMEOUT = 2 * 3600 + +# Use custom date formats +DATE_FORMAT_MEDIUM_DATE = "dd DD/MM HH:mm" +DATE_FORMAT_LONG_DATE = "yyyy-MM-DD HH:mm:ss.sss" + +# Default overview settings +COLUMNS = ['severity', 'status', 'createTime', 'lastReceiveTime', 'resource', 'grafanaDashboardHtml', 'grafanaPanelHtml', 'event', 'text'] +DEFAULT_FILTER = {'status': ['open']} +SORT_LIST_BY = "createTime" +AUTO_REFRESH_INTERVAL = 5000 # ms + +# ------------------------------------ +# Plugin configuration +# ------------------------------------ + +PLUGINS = ['reject', 'blackout', 'acked_by', 'enhance', 'grafana', 'lofar', 'slack'] + +# Slack plugin settings, see https://github.com/alerta/alerta-contrib/tree/master/plugins/slack +import os, json + +with open("/run/secrets/alerta-secrets") as secrets_file: + secrets = json.load(secrets_file) + +SLACK_WEBHOOK_URL = 'https://slack.com/api/chat.postMessage' +SLACK_TOKEN = secrets["SLACK_TOKEN"] +SLACK_CHANNEL = secrets["SLACK_CHANNEL"] +SLACK_ATTACHMENTS = True +BASE_URL = os.environ.get("BASE_URL", "") + +# for the Slack message configuration syntax, see https://api.slack.com/methods/chat.postMessage +# and https://app.slack.com/block-kit-builder +SLACK_PAYLOAD = { + "channel": "{{ channel }}", + "emoji": ":fire:", + "text": "*{{ alert.severity|capitalize }}* :: *{{ alert.resource }}* :: _{{ alert.event }}_\n\n```{{ alert.text }}```", + "attachments": [{ + "color": "{{ color }}", + "fields": [ + {"title": "Device", "value": "{{ alert.attributes.lofarDevice }}", "short": True }, + {"title": "Attribute", "value": "{{ alert.attributes.lofarAttribute }}", "short": True }, + {"title": "Resource", "value": "{{ alert.resource }}", "short": True }, + {"title": "Status", "value": "{{ status|capitalize }}", "short": True }, + {"title": "Dashboards", "value": "<{{ config.BASE_URL }}/#/alert/{{ alert.id }}|Alerta>\nGrafana <{{ alert.attributes.grafanaDashboardUrl }}|Dashboard> <{{ alert.attributes.grafanaPanelUrl }}|Panel>", "short": True }, + {"title": "Configure", "value": "Grafana <{{ alert.attributes.grafanaAlertUrl }}|View> <{{ alert.attributes.grafanaSilenceUrl }}|Silence>", "short": True }, + ], + }] +} diff --git a/docker-compose/alerta-web/config.json b/docker-compose/alerta-web/config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ae8e0a0ef78ef99f8fd1b0ea68d851624f84d --- /dev/null +++ b/docker-compose/alerta-web/config.json @@ -0,0 +1 @@ +{"endpoint": "/api"} diff --git a/docker-compose/alerta-web/grafana-plugin/alerta_grafana.py b/docker-compose/alerta-web/grafana-plugin/alerta_grafana.py new file mode 100644 index 0000000000000000000000000000000000000000..7f6b840a4e6517bd5be2afa083ee317196725e0e --- /dev/null +++ b/docker-compose/alerta-web/grafana-plugin/alerta_grafana.py @@ -0,0 +1,60 @@ +import os +import json +import logging + +from alerta.plugins import PluginBase + +LOG = logging.getLogger() + + +class EnhanceGrafana(PluginBase): + """ + Plugin for parsing alerts coming from Grafana + """ + + def pre_receive(self, alert, **kwargs): + # Parse Grafana-specific fields + alert.attributes['grafanaStatus'] = alert.raw_data.get('status', '') + + def htmlify(link: str, desc: str) -> str: + return f'<a href="{link}" target="_blank">{desc}</a>'; + + # User-specified "Panel ID" annotation + panelURL = alert.raw_data.get('panelURL', '') + if panelURL: + alert.attributes['grafanaPanelUrl'] = panelURL + alert.attributes['grafanaPanelHtml'] = htmlify(panelURL, "Grafana Panel") + + # User-specified "Dashboard UID" annotation + dashboardURL = alert.raw_data.get('dashboardURL', '') + if dashboardURL: + alert.attributes['grafanaDashboardUrl'] = dashboardURL + alert.attributes['grafanaDashboardHtml'] = htmlify(dashboardURL, "Grafana Dashboard") + + alertURL = alert.raw_data.get('generatorURL', '') + if alertURL: + # expose alert view URL, as user may not have edit rights + # Convert from + # http://host:3000/alerting/kujybCynk/edit + # to + # http://host:3000/alerting/grafana/kujybCynk/view + alertURL = alertURL.replace("/alerting/", "/alerting/grafana/").replace("/edit", "/view") + + alert.attributes['grafanaAlertUrl'] = alertURL + alert.attributes['grafanaAlertHtml'] = htmlify(alertURL, "Grafana Alert") + + silenceURL = alert.raw_data.get('silenceURL', '') + if silenceURL: + alert.attributes['grafanaSilenceUrl'] = silenceURL + alert.attributes['grafanaSilenceHtml'] = htmlify(silenceURL, "Grafana Silence Alert") + + return alert + + def post_receive(self, alert, **kwargs): + return + + def status_change(self, alert, status, text, **kwargs): + return + + def take_action(self, alert, action, text, **kwargs): + raise NotImplementedError diff --git a/docker-compose/alerta-web/grafana-plugin/setup.py b/docker-compose/alerta-web/grafana-plugin/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..cb06d95919fde788f299e8318bfc23ef01dbfb79 --- /dev/null +++ b/docker-compose/alerta-web/grafana-plugin/setup.py @@ -0,0 +1,24 @@ + +from setuptools import setup, find_packages + +version = '1.0.0' + +setup( + name="alerta-grafana", + version=version, + description='Alerta plugin for enhancing Grafana alerts', + url='https://git.astron.nl/lofar2.0/tango', + license='Apache License 2.0', + author='Jan David Mol', + author_email='mol@astron.nl', + packages=find_packages(), + py_modules=['alerta_grafana'], + include_package_data=True, + zip_safe=True, + entry_points={ + 'alerta.plugins': [ + 'grafana = alerta_grafana:EnhanceGrafana' + ] + }, + python_requires='>=3.5' +) diff --git a/docker-compose/alerta-web/lofar-plugin/alerta_lofar.py b/docker-compose/alerta-web/lofar-plugin/alerta_lofar.py new file mode 100644 index 0000000000000000000000000000000000000000..c4f618d2d6675feab78fce49cedc9f8030766c97 --- /dev/null +++ b/docker-compose/alerta-web/lofar-plugin/alerta_lofar.py @@ -0,0 +1,41 @@ +import os +import json +import logging + +from alerta.plugins import PluginBase + +LOG = logging.getLogger() + + +class EnhanceLOFAR(PluginBase): + """ + Plugin for enhancing alerts with LOFAR-specific information + """ + + def pre_receive(self, alert, **kwargs): + # Parse LOFAR-specific fields + for tag in alert.tags: + try: + key, value = tag.split("=", 1) + except ValueError: + continue + + if key == "device": + alert.attributes['lofarDevice'] = value + + if key == "name": + alert.attributes['lofarAttribute'] = value + + if key == "station": + alert.resource = value + + return alert + + def post_receive(self, alert, **kwargs): + return + + def status_change(self, alert, status, text, **kwargs): + return + + def take_action(self, alert, action, text, **kwargs): + raise NotImplementedError diff --git a/docker-compose/alerta-web/lofar-plugin/setup.py b/docker-compose/alerta-web/lofar-plugin/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..70ab552180a5ad10a978fb10f1deeb0d87319bb7 --- /dev/null +++ b/docker-compose/alerta-web/lofar-plugin/setup.py @@ -0,0 +1,24 @@ + +from setuptools import setup, find_packages + +version = '1.0.0' + +setup( + name="alerta-lofar", + version=version, + description='Alerta plugin for enhancing LOFAR alerts', + url='https://git.astron.nl/lofar2.0/tango', + license='Apache License 2.0', + author='Jan David Mol', + author_email='mol@astron.nl', + packages=find_packages(), + py_modules=['alerta_lofar'], + include_package_data=True, + zip_safe=True, + entry_points={ + 'alerta.plugins': [ + 'lofar = alerta_lofar:EnhanceLOFAR' + ] + }, + python_requires='>=3.5' +) diff --git a/docker-compose/alerta-web/rules.json b/docker-compose/alerta-web/rules.json new file mode 100644 index 0000000000000000000000000000000000000000..ca8df8cf7b01a4bd014387e045a2492d35292300 --- /dev/null +++ b/docker-compose/alerta-web/rules.json @@ -0,0 +1 @@ +{"test":[{"name":"test2","interval":"10s","rules":[{"expr":"","for":"20s","labels":{"severity":"major"},"annotations":{"__dashboardUid__":"nC8N_kO7k","__panelId__":"9","summary":"My test alert"},"grafana_alert":{"id":3,"orgId":1,"title":"FPGA processing error 2","condition":"B","data":[{"refId":"A","queryType":"","relativeTimeRange":{"from":600,"to":0},"datasourceUid":"ZqArtG97z","model":{"exemplar":false,"expr":"device_attribute{device=\"stat/sdp/1\",name=\"FPGA_error_R\"}","format":"time_series","group":[],"hide":false,"interval":"","intervalMs":1000,"legendFormat":"","maxDataPoints":43200,"metricColumn":"name","rawQuery":true,"rawSql":"SELECT\n data_time AS \"time\",\n x::text,\n device,\n name,\n case when value then 1 else 0 end AS value\nFROM lofar_array_boolean\nWHERE\n $__timeFilter(data_time) AND\n name = 'fpga_error_r'\nORDER BY 1,2","refId":"A","select":[[{"params":["x"],"type":"column"}],[{"params":["value"],"type":"column"}]],"table":"lofar_array_boolean","timeColumn":"data_time","timeColumnType":"timestamptz","where":[{"name":"$__timeFilter","params":[],"type":"macro"},{"datatype":"text","name":"","params":["name","=","'fpga_error_r'"],"type":"expression"}]}},{"refId":"B","queryType":"","relativeTimeRange":{"from":0,"to":0},"datasourceUid":"-100","model":{"conditions":[{"evaluator":{"params":[0,0],"type":"gt"},"operator":{"type":"and"},"query":{"params":[]},"reducer":{"params":[],"type":"avg"},"type":"query"}],"datasource":{"type":"__expr__","uid":"__expr__"},"expression":"A","hide":false,"intervalMs":1000,"maxDataPoints":43200,"reducer":"last","refId":"B","settings":{"mode":"dropNN"},"type":"reduce"}}],"updated":"2022-04-04T14:18:48Z","intervalSeconds":10,"version":1,"uid":"waXdSCynk","namespace_uid":"9DkbdYy7z","namespace_id":6,"rule_group":"test2","no_data_state":"OK","exec_err_state":"Error"}}]},{"name":"test","interval":"10s","rules":[{"expr":"","for":"20s","labels":{"severity":"major"},"annotations":{"__dashboardUid__":"nC8N_kO7k","__panelId__":"9","summary":"My test alert"},"grafana_alert":{"id":2,"orgId":1,"title":"FPGA processing error","condition":"B","data":[{"refId":"A","queryType":"","relativeTimeRange":{"from":600,"to":0},"datasourceUid":"ZqArtG97z","model":{"exemplar":false,"expr":"device_attribute{device=\"stat/sdp/1\",name=\"FPGA_error_R\"}","format":"time_series","group":[],"hide":false,"interval":"","intervalMs":1000,"legendFormat":"","maxDataPoints":43200,"metricColumn":"name","rawQuery":true,"rawSql":"SELECT\n data_time AS \"time\",\n x::text,\n device,\n name,\n case when value then 1 else 0 end AS value\nFROM lofar_array_boolean\nWHERE\n $__timeFilter(data_time) AND\n name = 'fpga_error_r'\nORDER BY 1,2","refId":"A","select":[[{"params":["x"],"type":"column"}],[{"params":["value"],"type":"column"}]],"table":"lofar_array_boolean","timeColumn":"data_time","timeColumnType":"timestamptz","where":[{"name":"$__timeFilter","params":[],"type":"macro"},{"datatype":"text","name":"","params":["name","=","'fpga_error_r'"],"type":"expression"}]}},{"refId":"B","queryType":"","relativeTimeRange":{"from":0,"to":0},"datasourceUid":"-100","model":{"conditions":[{"evaluator":{"params":[0,0],"type":"gt"},"operator":{"type":"and"},"query":{"params":[]},"reducer":{"params":[],"type":"avg"},"type":"query"}],"datasource":{"type":"__expr__","uid":"__expr__"},"expression":"A","hide":false,"intervalMs":1000,"maxDataPoints":43200,"reducer":"last","refId":"B","settings":{"mode":"dropNN"},"type":"reduce"}}],"updated":"2022-04-04T14:16:22Z","intervalSeconds":10,"version":1,"uid":"MIt4Ijs7k","namespace_uid":"9DkbdYy7z","namespace_id":6,"rule_group":"test","no_data_state":"OK","exec_err_state":"Error"}}]}]} \ No newline at end of file diff --git a/docker-compose/alerta.yml b/docker-compose/alerta.yml index 5465fe19fe10a563df8527bc8cc64c4d93ee2895..2ae3be42c17e450007914facd2a686c7cce1d63e 100644 --- a/docker-compose/alerta.yml +++ b/docker-compose/alerta.yml @@ -3,6 +3,10 @@ version: '2.1' volumes: alerta-postgres-data: {} +secrets: + alerta-secrets: + file: alerta-web/alerta-secrets.json + services: alerta-web: build: alerta-web @@ -13,13 +17,16 @@ services: - "8081:8080" depends_on: - alerta-db + secrets: + - alerta-secrets environment: - DEBUG=1 # remove this line to turn DEBUG off - DATABASE_URL=postgres://postgres:postgres@alerta-db:5432/monitoring + - BASE_URL=http://${HOSTNAME}:8081 + - DASHBOARD_URL=http://${HOSTNAME}:8081 - AUTH_REQUIRED=True - ADMIN_USERS=admin #default password: alerta - ADMIN_KEY=demo-key - - PLUGINS=reject,blackout,normalise,enhance restart: always alerta-db: diff --git a/docker-compose/archiver-timescale.yml b/docker-compose/archiver-timescale.yml index a154b96d3338a039237b5d6f1933933c0969ecb3..a0f3e02f27a40cfe49009002026ecc2ebed68f47 100644 --- a/docker-compose/archiver-timescale.yml +++ b/docker-compose/archiver-timescale.yml @@ -1,5 +1,8 @@ version: '2' +volumes: + archiver-timescale-data: {} + services: archiver-timescale: image: timescaledb @@ -12,6 +15,8 @@ services: - "5432:5432/tcp" extra_hosts: - "host.docker.internal:host-gateway" + volumes: + - alerta-postgres-data:/var/lib/postgresql/data depends_on: - databaseds environment: diff --git a/docker-compose/device-pdu.yml b/docker-compose/device-pdu.yml new file mode 100644 index 0000000000000000000000000000000000000000..524748d14dff00d69a35f219c702f095da4b2d2a --- /dev/null +++ b/docker-compose/device-pdu.yml @@ -0,0 +1,42 @@ +# +# Requires: +# - lofar-device-base.yml +# +version: '2' + +volumes: + iers-data: {} + +services: + device-pdu: + image: device-pdu + # build explicitly, as docker-compose does not understand a local image + # being shared among services. + build: + context: .. + dockerfile: docker-compose/lofar-device-base/Dockerfile + args: + SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION} + container_name: ${CONTAINER_NAME_PREFIX}device-pdu + logging: + driver: "json-file" + options: + max-size: "100m" + max-file: "10" + networks: + - control + ports: + - "5714:5714" # unique port for this DS + extra_hosts: + - "host.docker.internal:host-gateway" + volumes: + - ..:/opt/lofar/tango:rw + environment: + - TANGO_HOST=${TANGO_HOST} + working_dir: /opt/lofar/tango + entrypoint: + - bin/start-ds.sh + # configure CORBA to _listen_ on 0:port, but tell others we're _reachable_ through ${HOSTNAME}:port, since CORBA + # can't know about our Docker port forwarding + - l2ss-pdu PDU STAT -v -ORBendPoint giop:tcp:0:5714 -ORBendPointPublish giop:tcp:${HOSTNAME}:5714 + restart: unless-stopped diff --git a/docker-compose/grafana.yml b/docker-compose/grafana.yml index f298db2746961b7d30d2e147192d0dfc58530725..73c508440cd63ad201b0b4199c2443b18be804a5 100644 --- a/docker-compose/grafana.yml +++ b/docker-compose/grafana.yml @@ -24,6 +24,8 @@ services: # - grafana-configs:/etc/grafana ports: - "3000:3000" + environment: + - GF_SERVER_DOMAIN=${HOSTNAME} logging: driver: syslog options: diff --git a/docker-compose/grafana/Dockerfile b/docker-compose/grafana/Dockerfile index e51cce5eeaa0310c1ecd698d8d797e3163ce4457..7eceb9c154c654da53eb0a4b060df945013bf766 100644 --- a/docker-compose/grafana/Dockerfile +++ b/docker-compose/grafana/Dockerfile @@ -3,6 +3,7 @@ FROM grafana/grafana # Install some plugins RUN grafana-cli plugins install briangann-datatable-panel RUN grafana-cli plugins install ae3e-plotly-panel +RUN grafana-cli plugins install yesoreyeram-infinity-datasource COPY grafana.ini /etc/grafana/ diff --git a/docker-compose/grafana/README.md b/docker-compose/grafana/README.md new file mode 100644 index 0000000000000000000000000000000000000000..754c00a75abde5600ee65088d057558eabe02352 --- /dev/null +++ b/docker-compose/grafana/README.md @@ -0,0 +1,21 @@ +# Post configuration + +To export all current alert rules, use: + +To import rules into a fresh Grafana instance: + + * Obtain an 'editor' API key through the Grafan GUI (cogwheel -> API keys), + * Run: + + curl http://localhost:3000/api/alertmanager/grafana/config/api/v1/alerts -H 'Authorization: Bearer (api key)' > alerting.json + curl localhost:3000/api/ruler/grafana/api/v1/rules > rules.json + + * Delete the UIDs in alerting.json + +To import rules into a fresh Grafana instance: + + * Obtain an 'editor' API key through the Grafan GUI (cogwheel -> API keys), + * Run (first without piping to bash): + + python3 import-rules.py -c alerting.json -r rules.json -B key | bash + diff --git a/docker-compose/grafana/alerting.json b/docker-compose/grafana/alerting.json index 1a08e2cebfe5ebb77b22afbca6a0f70dd86ff4e5..d5193964ae1127c0f76cc60a05dfc8f0dd4e1bf4 100644 --- a/docker-compose/grafana/alerting.json +++ b/docker-compose/grafana/alerting.json @@ -2,7 +2,8 @@ "template_files": {}, "alertmanager_config": { "route": { - "receiver": "Alerta" + "receiver": "Alerta", + "repeat_interval": "10m" }, "templates": null, "receivers": [ @@ -10,7 +11,6 @@ "name": "Alerta", "grafana_managed_receiver_configs": [ { - "uid": "ROaAvQEnz", "name": "Alerta", "type": "webhook", "disableResolveMessage": false, diff --git a/docker-compose/grafana/dashboards/home.json b/docker-compose/grafana/dashboards/home.json index 98250c378ec60c9a79205cbb5afc3e125f75e31c..7aa5d7aad44152e32732a4b6c37f165694066f91 100644 --- a/docker-compose/grafana/dashboards/home.json +++ b/docker-compose/grafana/dashboards/home.json @@ -20,20 +20,313 @@ }, "editable": true, "fiscalYearStartMonth": 0, - "gnetId": null, "graphTooltip": 0, - "id": 5, + "id": 8, "links": [], "liveNow": false, "panels": [ + { + "alignNumbersToRightEnabled": true, + "columnAliases": [], + "columnFiltersEnabled": false, + "columnWidthHints": [], + "columns": [], + "compactRowsEnabled": true, + "datasource": { + "type": "yesoreyeram-infinity-datasource", + "uid": "alertaui" + }, + "datatablePagingType": "simple_numbers", + "datatableTheme": "basic_theme", + "emptyData": false, + "fontSize": "100%", + "gridPos": { + "h": 6, + "w": 24, + "x": 0, + "y": 0 + }, + "hoverEnabled": true, + "id": 58, + "infoEnabled": false, + "lengthChangeEnabled": true, + "orderColumnEnabled": true, + "pagingTypes": [ + { + "$$hashKey": "object:142", + "text": "Page number buttons only", + "value": "numbers" + }, + { + "$$hashKey": "object:143", + "text": "'Previous' and 'Next' buttons only", + "value": "simple" + }, + { + "$$hashKey": "object:144", + "text": "'Previous' and 'Next' buttons, plus page numbers", + "value": "simple_numbers" + }, + { + "$$hashKey": "object:145", + "text": "'First', 'Previous', 'Next' and 'Last' buttons", + "value": "full" + }, + { + "$$hashKey": "object:146", + "text": "'First', 'Previous', 'Next' and 'Last' buttons, plus page numbers", + "value": "full_numbers" + }, + { + "$$hashKey": "object:147", + "text": "'First' and 'Last' buttons, plus page numbers", + "value": "first_last_numbers" + } + ], + "panelHeight": 130, + "pluginVersion": "8.4.5", + "rowNumbersEnabled": false, + "rowsPerPage": 5, + "scroll": false, + "scrollHeight": "default", + "searchEnabled": true, + "searchHighlightingEnabled": false, + "showCellBorders": false, + "showHeader": true, + "showRowBorders": true, + "sort": { + "col": 0, + "desc": true + }, + "sortByColumns": [ + { + "$$hashKey": "object:17", + "columnData": 0, + "sortMethod": "desc" + } + ], + "sortByColumnsData": [ + [ + 0, + "desc" + ] + ], + "stripedRowsEnabled": true, + "styles": [ + { + "$$hashKey": "object:19", + "dateFormat": "dd DD/MM HH:mm", + "pattern": "Time", + "type": "date" + }, + { + "$$hashKey": "object:45", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "Panel|Dashboard|Alert", + "sanitize": true, + "splitPattern": "/ /", + "thresholds": [], + "type": "string", + "unit": "short", + "valueMaps": [] + }, + { + "$$hashKey": "object:76", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "link": true, + "mappingType": 1, + "pattern": "Alerta Link", + "splitPattern": "/ /", + "thresholds": [], + "type": "string", + "unit": "short", + "valueMaps": [] + }, + { + "$$hashKey": "object:867", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "Since", + "splitPattern": "/ /", + "thresholds": [], + "type": "date", + "unit": "short" + } + ], + "targets": [ + { + "columns": [ + { + "selector": "createTime", + "text": "Since", + "type": "string" + }, + { + "selector": "severity", + "text": "Severity", + "type": "string" + }, + { + "selector": "status", + "text": "Status", + "type": "string" + }, + { + "selector": "event", + "text": "Name", + "type": "string" + }, + { + "selector": "text", + "text": "Description", + "type": "string" + }, + { + "selector": "attributes.lofarDevice", + "text": "Device", + "type": "string" + }, + { + "selector": "attributes.lofarAttribute", + "text": "Attribute", + "type": "string" + }, + { + "selector": "attributes.grafanaDashboardHtml", + "text": "Dashboard", + "type": "string" + }, + { + "selector": "attributes.grafanaPanelHtml", + "text": "Panel", + "type": "string" + }, + { + "selector": "attributes.grafanaAlertHtml", + "text": "Alert", + "type": "string" + }, + { + "selector": "href", + "text": "Alerta Link", + "type": "string" + } + ], + "datasource": { + "type": "yesoreyeram-infinity-datasource", + "uid": "alertaui" + }, + "filters": [], + "format": "table", + "global_query_id": "", + "hide": false, + "refId": "A", + "root_selector": "", + "source": "url", + "type": "json", + "url": "http://alerta-web:8080/api/alerts", + "url_options": { + "data": "", + "method": "GET" + } + } + ], + "themeOptions": { + "dark": "./styles/dark.scss", + "light": "./styles/light.scss" + }, + "themes": [ + { + "$$hashKey": "object:117", + "disabled": false, + "text": "Basic", + "value": "basic_theme" + }, + { + "$$hashKey": "object:118", + "disabled": true, + "text": "Bootstrap", + "value": "bootstrap_theme" + }, + { + "$$hashKey": "object:119", + "disabled": true, + "text": "Foundation", + "value": "foundation_theme" + }, + { + "$$hashKey": "object:120", + "disabled": true, + "text": "ThemeRoller", + "value": "themeroller_theme" + } + ], + "title": "Alerta Alerts", + "transform": "table", + "type": "briangann-datatable-panel" + }, + { + "description": "", + "gridPos": { + "h": 5, + "w": 24, + "x": 0, + "y": 6 + }, + "id": 60, + "links": [ + { + "targetBlank": true, + "title": "Details", + "url": "/alerting/list" + } + ], + "options": { + "alertInstanceLabelFilter": "", + "alertName": "", + "dashboardAlerts": false, + "groupBy": [], + "groupMode": "default", + "maxItems": 20, + "sortOrder": 1, + "stateFilter": { + "error": true, + "firing": true, + "inactive": false, + "noData": false, + "normal": false, + "pending": true + } + }, + "title": "Firing Alerts", + "type": "alertlist" + }, { "collapsed": false, - "datasource": null, "gridPos": { "h": 1, "w": 24, "x": 0, - "y": 0 + "y": 11 }, "id": 15, "panels": [], @@ -41,7 +334,10 @@ "type": "row" }, { - "datasource": "Prometheus", + "datasource": { + "type": "prometheus", + "uid": "6W2nM-Vnz" + }, "description": "Progress of station initialisation", "fieldConfig": { "defaults": { @@ -76,7 +372,7 @@ "h": 6, "w": 4, "x": 0, - "y": 1 + "y": 12 }, "id": 43, "options": { @@ -92,7 +388,7 @@ "showThresholdMarkers": false, "text": {} }, - "pluginVersion": "8.2.1", + "pluginVersion": "8.4.5", "targets": [ { "exemplar": true, @@ -106,7 +402,10 @@ "type": "gauge" }, { - "datasource": "Prometheus", + "datasource": { + "type": "prometheus", + "uid": "6W2nM-Vnz" + }, "fieldConfig": { "defaults": { "color": { @@ -166,7 +465,7 @@ "h": 9, "w": 6, "x": 4, - "y": 1 + "y": 12 }, "id": 4, "options": { @@ -187,7 +486,7 @@ }, "textMode": "value_and_name" }, - "pluginVersion": "8.2.1", + "pluginVersion": "8.4.5", "targets": [ { "exemplar": true, @@ -211,7 +510,10 @@ "type": "stat" }, { - "datasource": "ELK logs", + "datasource": { + "type": "elasticsearch", + "uid": "RuQjz8V7z" + }, "fieldConfig": { "defaults": { "color": { @@ -266,7 +568,7 @@ "h": 9, "w": 10, "x": 10, - "y": 1 + "y": 12 }, "id": 32, "options": { @@ -276,7 +578,8 @@ "placement": "bottom" }, "tooltip": { - "mode": "single" + "mode": "single", + "sort": "none" } }, "targets": [ @@ -329,13 +632,12 @@ "type": "timeseries" }, { - "datasource": null, "description": "Links to other dashboards", "gridPos": { "h": 9, "w": 4, "x": 20, - "y": 1 + "y": 12 }, "id": 47, "options": { @@ -348,7 +650,7 @@ "showStarred": false, "tags": [] }, - "pluginVersion": "8.2.1", + "pluginVersion": "8.4.5", "targets": [ { "format": "time_series", @@ -383,7 +685,10 @@ "type": "dashlist" }, { - "datasource": "Prometheus", + "datasource": { + "type": "prometheus", + "uid": "6W2nM-Vnz" + }, "description": "", "fieldConfig": { "defaults": { @@ -415,13 +720,20 @@ "h": 3, "w": 4, "x": 0, - "y": 7 + "y": 18 }, "id": 44, "options": { + "footer": { + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, "showHeader": false }, - "pluginVersion": "8.2.1", + "pluginVersion": "8.4.5", "targets": [ { "exemplar": true, @@ -485,7 +797,10 @@ "type": "table" }, { - "datasource": "ELK logs", + "datasource": { + "type": "elasticsearch", + "uid": "RuQjz8V7z" + }, "description": "List of the errors in the selected timespan", "fieldConfig": { "defaults": { @@ -579,14 +894,21 @@ "h": 5, "w": 24, "x": 0, - "y": 10 + "y": 21 }, "id": 56, "options": { + "footer": { + "fields": "", + "reducer": [ + "sum" + ], + "show": false + }, "showHeader": true, "sortBy": [] }, - "pluginVersion": "8.2.1", + "pluginVersion": "8.4.5", "targets": [ { "alias": "", @@ -676,12 +998,11 @@ }, { "collapsed": false, - "datasource": null, "gridPos": { "h": 1, "w": 24, "x": 0, - "y": 15 + "y": 26 }, "id": 49, "panels": [], @@ -689,7 +1010,10 @@ "type": "row" }, { - "datasource": "Prometheus", + "datasource": { + "type": "prometheus", + "uid": "6W2nM-Vnz" + }, "description": "State of APSCT", "fieldConfig": { "defaults": { @@ -717,7 +1041,7 @@ "h": 3, "w": 21, "x": 0, - "y": 16 + "y": 27 }, "id": 24, "options": { @@ -735,7 +1059,7 @@ "text": {}, "textMode": "name" }, - "pluginVersion": "8.2.1", + "pluginVersion": "8.4.5", "targets": [ { "exemplar": true, @@ -797,7 +1121,10 @@ "type": "stat" }, { - "datasource": "Prometheus", + "datasource": { + "type": "prometheus", + "uid": "6W2nM-Vnz" + }, "description": "State of APSPU", "fieldConfig": { "defaults": { @@ -825,7 +1152,7 @@ "h": 3, "w": 3, "x": 21, - "y": 16 + "y": 27 }, "id": 50, "options": { @@ -843,7 +1170,7 @@ "text": {}, "textMode": "name" }, - "pluginVersion": "8.2.1", + "pluginVersion": "8.4.5", "targets": [ { "exemplar": true, @@ -859,12 +1186,11 @@ }, { "collapsed": true, - "datasource": null, "gridPos": { "h": 1, "w": 24, "x": 0, - "y": 19 + "y": 30 }, "id": 53, "panels": [], @@ -872,7 +1198,10 @@ "type": "row" }, { - "datasource": "Prometheus", + "datasource": { + "type": "prometheus", + "uid": "6W2nM-Vnz" + }, "description": "State of Unboard 2 I2C Bus", "fieldConfig": { "defaults": { @@ -904,7 +1233,7 @@ "h": 3, "w": 24, "x": 0, - "y": 20 + "y": 31 }, "id": 54, "options": { @@ -922,7 +1251,7 @@ "text": {}, "textMode": "name" }, - "pluginVersion": "8.2.1", + "pluginVersion": "8.4.5", "targets": [ { "exemplar": true, @@ -970,12 +1299,11 @@ }, { "collapsed": false, - "datasource": null, "gridPos": { "h": 1, "w": 24, "x": 0, - "y": 23 + "y": 34 }, "id": 17, "panels": [], @@ -983,7 +1311,10 @@ "type": "row" }, { - "datasource": "Prometheus", + "datasource": { + "type": "prometheus", + "uid": "6W2nM-Vnz" + }, "fieldConfig": { "defaults": { "color": { @@ -1014,7 +1345,7 @@ "h": 8, "w": 6, "x": 0, - "y": 24 + "y": 35 }, "id": 21, "options": { @@ -1032,7 +1363,7 @@ "text": {}, "textMode": "name" }, - "pluginVersion": "8.2.1", + "pluginVersion": "8.4.5", "targets": [ { "exemplar": true, @@ -1047,7 +1378,10 @@ "type": "stat" }, { - "datasource": "Prometheus", + "datasource": { + "type": "prometheus", + "uid": "6W2nM-Vnz" + }, "fieldConfig": { "defaults": { "color": { @@ -1078,7 +1412,7 @@ "h": 8, "w": 6, "x": 6, - "y": 24 + "y": 35 }, "id": 25, "options": { @@ -1096,7 +1430,7 @@ "text": {}, "textMode": "name" }, - "pluginVersion": "8.2.1", + "pluginVersion": "8.4.5", "targets": [ { "exemplar": true, @@ -1112,7 +1446,10 @@ "type": "stat" }, { - "datasource": "Prometheus", + "datasource": { + "type": "prometheus", + "uid": "6W2nM-Vnz" + }, "description": "", "fieldConfig": { "defaults": { @@ -1144,7 +1481,7 @@ "h": 8, "w": 6, "x": 12, - "y": 24 + "y": 35 }, "id": 51, "options": { @@ -1162,7 +1499,7 @@ "text": {}, "textMode": "name" }, - "pluginVersion": "8.2.1", + "pluginVersion": "8.4.5", "targets": [ { "exemplar": true, @@ -1179,12 +1516,11 @@ }, { "collapsed": false, - "datasource": null, "gridPos": { "h": 1, "w": 24, "x": 0, - "y": 32 + "y": 43 }, "id": 19, "panels": [], @@ -1192,7 +1528,10 @@ "type": "row" }, { - "datasource": "Prometheus", + "datasource": { + "type": "prometheus", + "uid": "6W2nM-Vnz" + }, "description": "", "fieldConfig": { "defaults": { @@ -1224,7 +1563,7 @@ "h": 8, "w": 5, "x": 0, - "y": 33 + "y": 44 }, "id": 11, "options": { @@ -1242,7 +1581,7 @@ "text": {}, "textMode": "name" }, - "pluginVersion": "8.2.1", + "pluginVersion": "8.4.5", "targets": [ { "exemplar": true, @@ -1260,7 +1599,10 @@ "type": "stat" }, { - "datasource": "Prometheus", + "datasource": { + "type": "prometheus", + "uid": "6W2nM-Vnz" + }, "description": "", "fieldConfig": { "defaults": { @@ -1292,7 +1634,7 @@ "h": 8, "w": 5, "x": 5, - "y": 33 + "y": 44 }, "id": 9, "options": { @@ -1310,7 +1652,7 @@ "text": {}, "textMode": "name" }, - "pluginVersion": "8.2.1", + "pluginVersion": "8.4.5", "targets": [ { "exemplar": true, @@ -1328,7 +1670,10 @@ "type": "stat" }, { - "datasource": "Prometheus", + "datasource": { + "type": "prometheus", + "uid": "6W2nM-Vnz" + }, "description": "Number of inputs that are fed from the SDP wave-form generator", "fieldConfig": { "defaults": { @@ -1366,7 +1711,7 @@ "h": 4, "w": 3, "x": 10, - "y": 33 + "y": 44 }, "id": 12, "options": { @@ -1384,7 +1729,7 @@ "text": {}, "textMode": "value" }, - "pluginVersion": "8.2.1", + "pluginVersion": "8.4.5", "targets": [ { "exemplar": true, @@ -1403,12 +1748,11 @@ }, { "collapsed": false, - "datasource": null, "gridPos": { "h": 1, "w": 24, "x": 0, - "y": 41 + "y": 52 }, "id": 27, "panels": [], @@ -1416,7 +1760,10 @@ "type": "row" }, { - "datasource": "Prometheus", + "datasource": { + "type": "prometheus", + "uid": "6W2nM-Vnz" + }, "description": "", "fieldConfig": { "defaults": { @@ -1448,7 +1795,7 @@ "h": 8, "w": 5, "x": 0, - "y": 42 + "y": 53 }, "id": 28, "options": { @@ -1466,7 +1813,7 @@ "text": {}, "textMode": "name" }, - "pluginVersion": "8.2.1", + "pluginVersion": "8.4.5", "targets": [ { "exemplar": true, @@ -1484,7 +1831,10 @@ "type": "stat" }, { - "datasource": "Prometheus", + "datasource": { + "type": "prometheus", + "uid": "6W2nM-Vnz" + }, "description": "", "fieldConfig": { "defaults": { @@ -1546,7 +1896,7 @@ "h": 8, "w": 5, "x": 5, - "y": 42 + "y": 53 }, "id": 29, "options": { @@ -1556,7 +1906,8 @@ "placement": "bottom" }, "tooltip": { - "mode": "single" + "mode": "single", + "sort": "none" } }, "pluginVersion": "8.1.2", @@ -1593,7 +1944,10 @@ "type": "timeseries" }, { - "datasource": "Prometheus", + "datasource": { + "type": "prometheus", + "uid": "6W2nM-Vnz" + }, "description": "", "fieldConfig": { "defaults": { @@ -1655,7 +2009,7 @@ "h": 8, "w": 5, "x": 10, - "y": 42 + "y": 53 }, "id": 30, "options": { @@ -1665,7 +2019,8 @@ "placement": "bottom" }, "tooltip": { - "mode": "single" + "mode": "single", + "sort": "none" } }, "pluginVersion": "8.1.2", @@ -1686,7 +2041,10 @@ "type": "timeseries" }, { - "datasource": "Prometheus", + "datasource": { + "type": "prometheus", + "uid": "6W2nM-Vnz" + }, "description": "Rate of SSTs replicated to connected clients.", "fieldConfig": { "defaults": { @@ -1748,7 +2106,7 @@ "h": 8, "w": 5, "x": 15, - "y": 42 + "y": 53 }, "id": 33, "options": { @@ -1758,7 +2116,8 @@ "placement": "bottom" }, "tooltip": { - "mode": "single" + "mode": "single", + "sort": "none" } }, "pluginVersion": "8.1.2", @@ -1779,7 +2138,10 @@ "type": "timeseries" }, { - "datasource": "Prometheus", + "datasource": { + "type": "prometheus", + "uid": "6W2nM-Vnz" + }, "description": "Load of TCPReplicator class, which sends statistics packets to connected clients.", "fieldConfig": { "defaults": { @@ -1843,7 +2205,7 @@ "h": 8, "w": 3, "x": 20, - "y": 42 + "y": 53 }, "id": 34, "options": { @@ -1853,7 +2215,8 @@ "placement": "bottom" }, "tooltip": { - "mode": "single" + "mode": "single", + "sort": "none" } }, "pluginVersion": "8.1.2", @@ -1875,12 +2238,11 @@ }, { "collapsed": false, - "datasource": null, "gridPos": { "h": 1, "w": 24, "x": 0, - "y": 50 + "y": 61 }, "id": 36, "panels": [], @@ -1888,7 +2250,10 @@ "type": "row" }, { - "datasource": "Prometheus", + "datasource": { + "type": "prometheus", + "uid": "6W2nM-Vnz" + }, "description": "", "fieldConfig": { "defaults": { @@ -1920,7 +2285,7 @@ "h": 4, "w": 5, "x": 0, - "y": 51 + "y": 62 }, "id": 37, "options": { @@ -1938,7 +2303,7 @@ "text": {}, "textMode": "name" }, - "pluginVersion": "8.2.1", + "pluginVersion": "8.4.5", "targets": [ { "exemplar": true, @@ -1956,7 +2321,10 @@ "type": "stat" }, { - "datasource": "Prometheus", + "datasource": { + "type": "prometheus", + "uid": "6W2nM-Vnz" + }, "description": "", "fieldConfig": { "defaults": { @@ -2018,7 +2386,7 @@ "h": 8, "w": 5, "x": 5, - "y": 51 + "y": 62 }, "id": 38, "options": { @@ -2028,7 +2396,8 @@ "placement": "bottom" }, "tooltip": { - "mode": "single" + "mode": "single", + "sort": "none" } }, "pluginVersion": "8.1.2", @@ -2065,7 +2434,10 @@ "type": "timeseries" }, { - "datasource": "Prometheus", + "datasource": { + "type": "prometheus", + "uid": "6W2nM-Vnz" + }, "description": "", "fieldConfig": { "defaults": { @@ -2127,7 +2499,7 @@ "h": 8, "w": 5, "x": 10, - "y": 51 + "y": 62 }, "id": 39, "options": { @@ -2137,7 +2509,8 @@ "placement": "bottom" }, "tooltip": { - "mode": "single" + "mode": "single", + "sort": "none" } }, "pluginVersion": "8.1.2", @@ -2158,7 +2531,10 @@ "type": "timeseries" }, { - "datasource": "Prometheus", + "datasource": { + "type": "prometheus", + "uid": "6W2nM-Vnz" + }, "description": "Rate of XSTs replicated to connected clients.", "fieldConfig": { "defaults": { @@ -2220,7 +2596,7 @@ "h": 8, "w": 5, "x": 15, - "y": 51 + "y": 62 }, "id": 40, "options": { @@ -2230,7 +2606,8 @@ "placement": "bottom" }, "tooltip": { - "mode": "single" + "mode": "single", + "sort": "none" } }, "pluginVersion": "8.1.2", @@ -2251,7 +2628,10 @@ "type": "timeseries" }, { - "datasource": "Prometheus", + "datasource": { + "type": "prometheus", + "uid": "6W2nM-Vnz" + }, "description": "Load of TCPReplicator class, which sends statistics packets to connected clients.", "fieldConfig": { "defaults": { @@ -2315,7 +2695,7 @@ "h": 8, "w": 3, "x": 20, - "y": 51 + "y": 62 }, "id": 41, "options": { @@ -2325,7 +2705,8 @@ "placement": "bottom" }, "tooltip": { - "mode": "single" + "mode": "single", + "sort": "none" } }, "pluginVersion": "8.1.2", @@ -2346,7 +2727,10 @@ "type": "timeseries" }, { - "datasource": "Prometheus", + "datasource": { + "type": "prometheus", + "uid": "6W2nM-Vnz" + }, "description": "", "fieldConfig": { "defaults": { @@ -2378,7 +2762,7 @@ "h": 4, "w": 5, "x": 0, - "y": 55 + "y": 66 }, "id": 45, "options": { @@ -2396,7 +2780,7 @@ "text": {}, "textMode": "name" }, - "pluginVersion": "8.2.1", + "pluginVersion": "8.4.5", "targets": [ { "exemplar": true, @@ -2415,7 +2799,7 @@ } ], "refresh": false, - "schemaVersion": 31, + "schemaVersion": 35, "style": "dark", "tags": [], "templating": { @@ -2429,5 +2813,6 @@ "timezone": "", "title": "Home", "uid": "nC8N_kO7k", - "version": 6 + "version": 1, + "weekStart": "" } diff --git a/docker-compose/grafana/datasources/alertaui.yaml b/docker-compose/grafana/datasources/alertaui.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8fa7ddcfe36d5b1fcaf04a79a7defe166c26bcf8 --- /dev/null +++ b/docker-compose/grafana/datasources/alertaui.yaml @@ -0,0 +1,41 @@ +apiVersion: 1 + +datasources: + # <string, required> name of the datasource. Required + - name: Alerta UI + # <string, required> datasource type. Required + type: yesoreyeram-infinity-datasource + # <string, required> access mode. proxy or direct (Server or Browser in the UI). Required + access: proxy + # <int> org id. will default to orgId 1 if not specified + orgId: 1 + # <string> custom UID which can be used to reference this datasource in other parts of the configuration, if not specified will be generated automatically + uid: alertaui + # <string> url + url: http://alerta-web:8080/api + # <string> Deprecated, use secureJsonData.password + password: + # <string> database user, if used + user: postgres + # <string> database name, if used + database: hdb + # <bool> enable/disable basic auth + basicAuth: false + # <string> basic auth username + basicAuthUser: + # <string> Deprecated, use secureJsonData.basicAuthPassword + basicAuthPassword: + # <bool> enable/disable with credentials headers + withCredentials: + # <bool> mark as default datasource. Max one per org + isDefault: false + # <map> fields that will be converted to json and stored in jsonData + jsonData: + secureQueryName1: "api-key" + # <string> json object of data that will be encrypted. + secureJsonData: + secureQueryValue1: "demo-key" + version: 1 + # <bool> allow users to edit datasources from the UI. + editable: false + diff --git a/docker-compose/grafana/grafana.ini b/docker-compose/grafana/grafana.ini index 82f1f4bb004e5ba3c1078226e96decf09cdca4f5..acfabe0f10190c2b07ae579d21bd1abfc1891ff3 100644 --- a/docker-compose/grafana/grafana.ini +++ b/docker-compose/grafana/grafana.ini @@ -58,7 +58,7 @@ ;static_root_path = public # enable gzip -;enable_gzip = false +enable_gzip = true # https certs & key file ;cert_file = @@ -867,7 +867,9 @@ enabled = true [panels] # If set to true Grafana will allow script tags in text panels. Not recommended as it enable XSS vulnerabilities. -;disable_sanitize_html = false + +# enable this to allow us to create mash ups with other pages +disable_sanitize_html = true [plugins] ;enable_alpha = false diff --git a/docker-compose/grafana/import-rules.py b/docker-compose/grafana/import-rules.py new file mode 100755 index 0000000000000000000000000000000000000000..340215ce1e53744aef3a2722f69c3ecdfd28ca82 --- /dev/null +++ b/docker-compose/grafana/import-rules.py @@ -0,0 +1,74 @@ +#!/usr/bin/python3 +import json +import os +import argparse + +parser = argparse.ArgumentParser( + formatter_class=argparse.RawDescriptionHelpFormatter, + description= +""" +Generate rule import files and script for Grafana. + +This script expands a given rules.json file into individual rules and +prints the bash commands to import them in Grafana. + +To export rules from Grafana, use + curl <grafana>/api/ruler/grafana/api/v1/rules > rules.json +""") +parser.add_argument( + '-c', '--alert-config-file', type=str, required=False, help="Input alertmanager configuration JSON to parse, output of 'curl <grafana>/api/ruler/grafana/api/v1/rules' [%(default)s]") +parser.add_argument( + '-r', '--rules-file', type=str, required=True, help="Input rules JSON to parse, output of 'curl <grafana>/api/ruler/grafana/api/v1/rules' [%(default)s]") +parser.add_argument( + '-o', '--output-dir', type=str, default="rules", help="Directory to store the output [%(default)s]") +parser.add_argument( + '-B', '--authorization-bearer', type=str, default="abcdefghijklmnopqrstuvwxyz", help="Authorization bearer from the Grafana 'editor' API key [%(default)s]") +parser.add_argument( + '-g', '--grafana_url', type=str, default="http://localhost:3000", help="Base URL of Grafana [%(default)s]") +parser.add_argument( + '-u', '--update', default=False, action='store_true', help="Update existing alerts, instead of creating new ones [%(default)s]") + +args = parser.parse_args() + +if args.alert_config_file: + print(f"echo Importing alert configuration file {args.alert_config_file}") + print(f"curl -X POST {args.grafana_url}/api/alertmanager/grafana/config/api/v1/alerts -H 'Content-Type: application/json' -H 'Accept: application/json' -H 'Authorization: Bearer {args.authorization_bearer}' -d '@{args.alert_config_file}'") + print(f"echo ''") + +with open(args.rules_file) as f: + data=json.load(f) + + try: + os.mkdir(args.output_dir) + except FileExistsError as e: + pass + + # the rules are of format {"folder": [{alert}, {alert}] } + for folder, rules in data.items(): + try: + os.mkdir(f"{args.output_dir}/{folder}") + except FileExistsError as e: + pass + + # print command to create folder + payload = json.dumps({"title": folder}) + print(f"echo Creating folder {folder}") + print(f"curl -X POST {args.grafana_url}/api/folders -H 'Content-Type: application/json' -H 'Accept: application/json' -H 'Authorization: Bearer {args.authorization_bearer}' -d '{payload}'") + print(f"echo ''") + + for rule in rules: + rule_filename = f"{args.output_dir}/{folder}/{rule['name']}.json" + + if not args.update: + # strip rule UIDs + for subrule in rule["rules"]: + del subrule["grafana_alert"]["uid"] + + # dump this rule + with open(rule_filename, "w") as rule_file: + json.dump(rule, rule_file) + + # print import statement for this rule + print(f"echo Processing rule {folder}/{rule['name']}") + print(f"curl -X POST {args.grafana_url}/api/ruler/grafana/api/v1/rules/{folder} -H 'Content-Type: application/json' -H 'Accept: application/json' -H 'Authorization: Bearer {args.authorization_bearer}' -d '@{rule_filename}'") + print(f"echo ''") diff --git a/docker-compose/grafana/rules.json b/docker-compose/grafana/rules.json new file mode 100644 index 0000000000000000000000000000000000000000..81467dd918dd0be60e7bde30dcce798d8f209892 --- /dev/null +++ b/docker-compose/grafana/rules.json @@ -0,0 +1 @@ +{"station":[{"name":"FPGA processing error","interval":"10s","rules":[{"expr":"","for":"20s","labels":{"severity":"major"},"annotations":{"__dashboardUid__":"nC8N_kO7k","__panelId__":"9","summary":"One or more FPGAs are unusable."},"grafana_alert":{"id":1,"orgId":1,"title":"FPGA processing error","condition":"B","data":[{"refId":"A","queryType":"","relativeTimeRange":{"from":600,"to":0},"datasourceUid":"ZqArtG97z","model":{"format":"time_series","group":[],"hide":false,"intervalMs":1000,"maxDataPoints":43200,"metricColumn":"none","rawQuery":true,"rawSql":"SELECT\n $__timeGroup(data_time, $__interval),\n x::text,\n device,\n name,\n value\nFROM lofar_array_boolean\nWHERE\n $__timeFilter(data_time) AND\n name = 'fpga_error_r'\nORDER BY 1,2","refId":"A","select":[[{"params":["value_r"],"type":"column"}]],"table":"att_scalar_devdouble","timeColumn":"data_time","timeColumnType":"timestamp","where":[{"name":"$__timeFilter","params":[],"type":"macro"}]}},{"refId":"B","queryType":"","relativeTimeRange":{"from":0,"to":0},"datasourceUid":"-100","model":{"conditions":[{"evaluator":{"params":[0],"type":"gt"},"operator":{"type":"and"},"query":{"params":["A"]},"reducer":{"params":[],"type":"last"},"type":"query"}],"datasource":{"type":"__expr__","uid":"-100"},"expression":"A","hide":false,"intervalMs":1000,"maxDataPoints":43200,"reducer":"last","refId":"B","settings":{"mode":"dropNN"},"type":"reduce"}}],"updated":"2022-04-04T18:01:53Z","intervalSeconds":10,"version":3,"uid":"kujybCynk","namespace_uid":"R_jsbCynz","namespace_id":6,"rule_group":"FPGA processing error","no_data_state":"NoData","exec_err_state":"Alerting"}}]}]} \ No newline at end of file diff --git a/docker-compose/jupyter/Dockerfile b/docker-compose/jupyter/Dockerfile index cc1652e4a45bc14805632ec1d4056beaab1fd34c..4017b5c8e3faaf21690d83f35b2e994fbfd31e15 100644 --- a/docker-compose/jupyter/Dockerfile +++ b/docker-compose/jupyter/Dockerfile @@ -5,9 +5,8 @@ FROM ${SOURCE_IMAGE} # that are needed for temporary storage the proper owner and access rights. ARG CONTAINER_EXECUTION_UID=1000 -# Create homedir -ENV HOME=/home/user -RUN sudo mkdir -p ${HOME} +# Create new user with uid but only if uid not used +RUN sudo adduser --disabled-password --system --uid ${CONTAINER_EXECUTION_UID} --no-create-home --home ${HOME} user || exit 0 RUN sudo chown ${CONTAINER_EXECUTION_UID} -R ${HOME} COPY requirements.txt ./ diff --git a/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py b/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py index cf3e092e8f3b8a58ada27252a7862a00f49ad870..1434393a85f0ed506d30303964f43c6c5ee0fa33 100644 --- a/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py +++ b/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py @@ -8,6 +8,7 @@ xst = DeviceProxy("STAT/XST/1") unb2 = DeviceProxy("STAT/UNB2/1") boot = DeviceProxy("STAT/Boot/1") tilebeam = DeviceProxy("STAT/TileBeam/1") +pdu = DeviceProxy("STAT/PDU/1") beamlet = DeviceProxy("STAT/Beamlet/1") digitalbeam = DeviceProxy("STAT/DigitalBeam/1") docker = DeviceProxy("STAT/Docker/1") diff --git a/docker-compose/tango.yml b/docker-compose/tango.yml index 166891b51f9a1ac59d3fcf837c3486f3199ad798..5a6839f44a356113ae1fc525a0ff6e3290e777cd 100644 --- a/docker-compose/tango.yml +++ b/docker-compose/tango.yml @@ -80,12 +80,10 @@ services: - TANGO_HOST=${TANGO_HOST} command: > sh -c "wait-for-it.sh ${TANGO_HOST} --timeout=30 --strict -- - json2tango -w -a -u /tango-archiver-data/archiver-devices.json && sleep infinity" volumes: - ..:/opt/lofar/tango:rw - ${HOME}:/hosthome - - ../CDB/tango-archiver-data:/tango-archiver-data logging: driver: syslog options: diff --git a/docker-compose/timescaledb/resources/10_lofar_views.sh b/docker-compose/timescaledb/resources/10_lofar_views.sh deleted file mode 100644 index 7d7fb8cec25bd53f017fd1086636cec1be769755..0000000000000000000000000000000000000000 --- a/docker-compose/timescaledb/resources/10_lofar_views.sh +++ /dev/null @@ -1,427 +0,0 @@ -#!/bin/bash -psql << EOF -\c hdb - --- NOTE: We concatenate domain/family/member here, which means we can't index --- the resulting column. However, queries also supply the attribute name, --- which we can index. The scan on the device name is then limited to --- entries which have the same attribute name across devices. - -CREATE OR REPLACE VIEW lofar_scalar_double AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - 0 AS x, - value_r as value - FROM att_scalar_devdouble att - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_array_devboolean AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - array_element.idx - 1 AS x, - array_element.val as value - FROM att_array_devboolean att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_array_devuchar AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - array_element.idx - 1 AS x, - array_element.val as value - FROM att_array_devuchar att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_array_devshort AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - array_element.idx - 1 AS x, - array_element.val as value - FROM att_array_devshort att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_array_devushort AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - array_element.idx - 1 AS x, - array_element.val as value - FROM att_array_devushort att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_array_devlong AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - array_element.idx - 1 AS x, - array_element.val as value - FROM att_array_devlong att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_array_devulong AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - array_element.idx - 1 AS x, - array_element.val as value - FROM att_array_devulong att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_array_devlong64 AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - array_element.idx - 1 AS x, - array_element.val as value - FROM att_array_devlong64 att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_array_devulong64 AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - array_element.idx - 1 AS x, - array_element.val as value - FROM att_array_devulong64 att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_array_devfloat AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - array_element.idx - 1 AS x, - array_element.val as value - FROM att_array_devfloat att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_array_devdouble AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - array_element.idx - 1 AS x, - array_element.val as value - FROM att_array_devdouble att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_array_devstring AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - array_element.idx - 1 AS x, - array_element.val as value - FROM att_array_devstring att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_array_devstate AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - array_element.idx - 1 AS x, - array_element.val as value - FROM att_array_devstate att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_array_devencoded AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - array_element.idx - 1 AS x, - array_element.val as value - FROM att_array_devencoded att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_array_devenum AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - array_element.idx - 1 AS x, - array_element.val as value - FROM att_array_devenum att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_image_devboolean AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, - (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, - array_element.val as value - FROM att_image_devboolean att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_image_devuchar AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, - (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, - array_element.val as value - FROM att_image_devuchar att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_image_devshort AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, - (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, - array_element.val as value - FROM att_image_devshort att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_image_devushort AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, - (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, - array_element.val as value - FROM att_image_devushort att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_image_devlong AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, - (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, - array_element.val as value - FROM att_image_devlong att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_image_devulong AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, - (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, - array_element.val as value - FROM att_image_devulong att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_image_devlong64 AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, - (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, - array_element.val as value - FROM att_image_devlong64 att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_image_devulong64 AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, - (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, - array_element.val as value - FROM att_image_devulong64 att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_image_devfloat AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, - (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, - array_element.val as value - FROM att_image_devfloat att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_image_devdouble AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, - (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, - array_element.val as value - FROM att_image_devdouble att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_image_devstring AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, - (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, - array_element.val as value - FROM att_image_devstring att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_image_devstate AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, - (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, - array_element.val as value - FROM att_image_devstate att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_image_devencoded AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, - (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, - array_element.val as value - FROM att_image_devencoded att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; - -CREATE OR REPLACE VIEW lofar_image_devenum AS - SELECT - att.data_time AS data_time, - CONCAT_WS('/', domain, family, member) AS device, - ac.name AS name, - (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, - (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, - array_element.val as value - FROM att_image_devenum att - -- add array values, and their index - JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE - -- add the device information - JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id - WHERE att.value_r IS NOT NULL; -EOF diff --git a/docker-compose/timescaledb/resources/13_lofar_views.sql b/docker-compose/timescaledb/resources/13_lofar_views.sql index 9df29a278d2c12b3a058e3973f79007a95f3f379..791e191452f769fbae60136a1746e4e2c0a1fc7c 100644 --- a/docker-compose/timescaledb/resources/13_lofar_views.sql +++ b/docker-compose/timescaledb/resources/13_lofar_views.sql @@ -7,6 +7,21 @@ -- DOUBLE -- +CREATE OR REPLACE VIEW lofar_image_double AS + SELECT + att.data_time AS data_time, + CONCAT_WS('/', domain, family, member) AS device, + ac.name AS name, + (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, + (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, + array_element.val as value + FROM att_image_devdouble att + -- add array values, and their index + JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE + -- add the device information + JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id + WHERE att.value_r IS NOT NULL; + CREATE OR REPLACE VIEW lofar_array_double AS SELECT att.data_time AS data_time, @@ -21,7 +36,7 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - CREATE OR REPLACE VIEW lofar_scalar_double AS +CREATE OR REPLACE VIEW lofar_scalar_double AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, @@ -32,15 +47,30 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - -- BOOLEAN -- +-- BOOLEAN -- + +CREATE OR REPLACE VIEW lofar_image_boolean AS + SELECT + att.data_time AS data_time, + CONCAT_WS('/', domain, family, member) AS device, + ac.name AS name, + (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, + (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, + CASE WHEN array_element.val THEN 1 ELSE 0 END AS value + FROM att_image_devboolean att + -- add array values, and their index + JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE + -- add the device information + JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id + WHERE att.value_r IS NOT NULL; - CREATE OR REPLACE VIEW lofar_array_boolean AS +CREATE OR REPLACE VIEW lofar_array_boolean AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, ac.name AS name, array_element.idx - 1 AS x, - array_element.val as value + CASE WHEN array_element.val THEN 1 ELSE 0 END AS value FROM att_array_devboolean att -- add array values, and their index JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE @@ -48,19 +78,35 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - CREATE OR REPLACE VIEW lofar_scalar_boolean AS +CREATE OR REPLACE VIEW lofar_scalar_boolean AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, ac.name AS name, - value_r as value + CASE WHEN value_r THEN 1 ELSE 0 END AS value FROM att_scalar_devboolean att -- add the device information JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - -- UCHAR -- - CREATE OR REPLACE VIEW lofar_array_uchar AS +-- UCHAR -- + +CREATE OR REPLACE VIEW lofar_image_uchar AS + SELECT + att.data_time AS data_time, + CONCAT_WS('/', domain, family, member) AS device, + ac.name AS name, + (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, + (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, + array_element.val as value + FROM att_image_devuchar att + -- add array values, and their index + JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE + -- add the device information + JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id + WHERE att.value_r IS NOT NULL; + +CREATE OR REPLACE VIEW lofar_array_uchar AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, @@ -74,7 +120,7 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - CREATE OR REPLACE VIEW lofar_scalar_uchar AS +CREATE OR REPLACE VIEW lofar_scalar_uchar AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, @@ -85,8 +131,24 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - -- SHORT -- - CREATE OR REPLACE VIEW lofar_array_short AS +-- SHORT -- + +CREATE OR REPLACE VIEW lofar_image_short AS + SELECT + att.data_time AS data_time, + CONCAT_WS('/', domain, family, member) AS device, + ac.name AS name, + (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, + (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, + array_element.val as value + FROM att_image_devshort att + -- add array values, and their index + JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE + -- add the device information + JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id + WHERE att.value_r IS NOT NULL; + +CREATE OR REPLACE VIEW lofar_array_short AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, @@ -100,7 +162,7 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - CREATE OR REPLACE VIEW lofar_scalar_short AS +CREATE OR REPLACE VIEW lofar_scalar_short AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, @@ -111,8 +173,24 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - -- USHORT -- - CREATE OR REPLACE VIEW lofar_array_ushort AS +-- USHORT -- + +CREATE OR REPLACE VIEW lofar_image_ushort AS + SELECT + att.data_time AS data_time, + CONCAT_WS('/', domain, family, member) AS device, + ac.name AS name, + (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, + (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, + array_element.val as value + FROM att_image_devushort att + -- add array values, and their index + JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE + -- add the device information + JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id + WHERE att.value_r IS NOT NULL; + +CREATE OR REPLACE VIEW lofar_array_ushort AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, @@ -126,7 +204,7 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - CREATE OR REPLACE VIEW lofar_scalar_ushort AS +CREATE OR REPLACE VIEW lofar_scalar_ushort AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, @@ -137,8 +215,24 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - -- LONG -- - CREATE OR REPLACE VIEW lofar_array_long AS +-- LONG -- + +CREATE OR REPLACE VIEW lofar_image_long AS + SELECT + att.data_time AS data_time, + CONCAT_WS('/', domain, family, member) AS device, + ac.name AS name, + (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, + (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, + array_element.val as value + FROM att_image_devlong att + -- add array values, and their index + JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE + -- add the device information + JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id + WHERE att.value_r IS NOT NULL; + +CREATE OR REPLACE VIEW lofar_array_long AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, @@ -152,7 +246,7 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - CREATE OR REPLACE VIEW lofar_scalar_long AS +CREATE OR REPLACE VIEW lofar_scalar_long AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, @@ -163,8 +257,24 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - -- ULONG -- - CREATE OR REPLACE VIEW lofar_array_ulong AS +-- ULONG -- + +CREATE OR REPLACE VIEW lofar_image_ulong AS + SELECT + att.data_time AS data_time, + CONCAT_WS('/', domain, family, member) AS device, + ac.name AS name, + (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, + (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, + array_element.val as value + FROM att_image_devulong att + -- add array values, and their index + JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE + -- add the device information + JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id + WHERE att.value_r IS NOT NULL; + +CREATE OR REPLACE VIEW lofar_array_ulong AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, @@ -178,7 +288,7 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - CREATE OR REPLACE VIEW lofar_scalar_ulong AS +CREATE OR REPLACE VIEW lofar_scalar_ulong AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, @@ -189,8 +299,24 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - -- LONG64 -- - CREATE OR REPLACE VIEW lofar_array_long64 AS +-- LONG64 -- + +CREATE OR REPLACE VIEW lofar_image_long64 AS + SELECT + att.data_time AS data_time, + CONCAT_WS('/', domain, family, member) AS device, + ac.name AS name, + (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, + (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, + array_element.val as value + FROM att_image_devlong64 att + -- add array values, and their index + JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE + -- add the device information + JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id + WHERE att.value_r IS NOT NULL; + +CREATE OR REPLACE VIEW lofar_array_long64 AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, @@ -204,7 +330,7 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - CREATE OR REPLACE VIEW lofar_scalar_long64 AS +CREATE OR REPLACE VIEW lofar_scalar_long64 AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, @@ -215,8 +341,24 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - -- ULONG64 -- - CREATE OR REPLACE VIEW lofar_array_ulong64 AS +-- ULONG64 -- + +CREATE OR REPLACE VIEW lofar_image_ulong64 AS + SELECT + att.data_time AS data_time, + CONCAT_WS('/', domain, family, member) AS device, + ac.name AS name, + (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, + (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, + array_element.val as value + FROM att_image_devulong64 att + -- add array values, and their index + JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE + -- add the device information + JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id + WHERE att.value_r IS NOT NULL; + +CREATE OR REPLACE VIEW lofar_array_ulong64 AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, @@ -230,7 +372,7 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - CREATE OR REPLACE VIEW lofar_scalar_ulong64 AS +CREATE OR REPLACE VIEW lofar_scalar_ulong64 AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, @@ -241,8 +383,24 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - -- FLOAT -- - CREATE OR REPLACE VIEW lofar_array_float AS +-- FLOAT -- + +CREATE OR REPLACE VIEW lofar_image_float AS + SELECT + att.data_time AS data_time, + CONCAT_WS('/', domain, family, member) AS device, + ac.name AS name, + (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, + (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, + array_element.val as value + FROM att_image_devfloat att + -- add array values, and their index + JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE + -- add the device information + JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id + WHERE att.value_r IS NOT NULL; + +CREATE OR REPLACE VIEW lofar_array_float AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, @@ -256,7 +414,7 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - CREATE OR REPLACE VIEW lofar_scalar_float AS +CREATE OR REPLACE VIEW lofar_scalar_float AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, @@ -267,8 +425,24 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - -- STRING -- - CREATE OR REPLACE VIEW lofar_array_string AS +-- STRING -- + +CREATE OR REPLACE VIEW lofar_image_string AS + SELECT + att.data_time AS data_time, + CONCAT_WS('/', domain, family, member) AS device, + ac.name AS name, + (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, + (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, + array_element.val as value + FROM att_image_devstring att +-- add array values, and their index + JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE + -- add the device information + JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id + WHERE att.value_r IS NOT NULL; + +CREATE OR REPLACE VIEW lofar_array_string AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, @@ -282,7 +456,7 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - CREATE OR REPLACE VIEW lofar_scalar_string AS +CREATE OR REPLACE VIEW lofar_scalar_string AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, @@ -293,8 +467,24 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - -- STATE -- - CREATE OR REPLACE VIEW lofar_array_state AS +-- STATE -- + +CREATE OR REPLACE VIEW lofar_image_state AS + SELECT + att.data_time AS data_time, + CONCAT_WS('/', domain, family, member) AS device, + ac.name AS name, + (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, + (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, + array_element.val as value + FROM att_image_devstate att + -- add array values, and their index + JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE + -- add the device information + JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id + WHERE att.value_r IS NOT NULL; + +CREATE OR REPLACE VIEW lofar_array_state AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, @@ -308,7 +498,7 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - CREATE OR REPLACE VIEW lofar_scalar_state AS +CREATE OR REPLACE VIEW lofar_scalar_state AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, @@ -319,8 +509,24 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - -- ENCODED -- - CREATE OR REPLACE VIEW lofar_array_encoded AS +-- ENCODED -- + +CREATE OR REPLACE VIEW lofar_image_encoded AS + SELECT + att.data_time AS data_time, + CONCAT_WS('/', domain, family, member) AS device, + ac.name AS name, + (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, + (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, + array_element.val as value + FROM att_image_devencoded att + -- add array values, and their index + JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE + -- add the device information + JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id + WHERE att.value_r IS NOT NULL; + +CREATE OR REPLACE VIEW lofar_array_encoded AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, @@ -334,7 +540,7 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - CREATE OR REPLACE VIEW lofar_scalar_encoded AS +CREATE OR REPLACE VIEW lofar_scalar_encoded AS SELECT att.data_time AS data_time, CONCAT_WS('/', domain, family, member) AS device, @@ -345,7 +551,23 @@ CREATE OR REPLACE VIEW lofar_array_double AS JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - -- ENUM -- +-- ENUM -- + +CREATE OR REPLACE VIEW lofar_image_enum AS + SELECT + att.data_time AS data_time, + CONCAT_WS('/', domain, family, member) AS device, + ac.name AS name, + (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x, + (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y, + array_element.val as value + FROM att_image_devenum att + -- add array values, and their index + JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE + -- add the device information + JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id + WHERE att.value_r IS NOT NULL; + CREATE OR REPLACE VIEW lofar_array_enum AS SELECT att.data_time AS data_time, @@ -370,8 +592,3 @@ CREATE OR REPLACE VIEW lofar_scalar_enum AS -- add the device information JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id WHERE att.value_r IS NOT NULL; - - - - - \ No newline at end of file diff --git a/sbin/run_integration_test.sh b/sbin/run_integration_test.sh index 780faa73fc2d0b42bc53d7903ef07b40e0f94081..b703eae61fb5572463f58c3079d133ed30ad37ee 100755 --- a/sbin/run_integration_test.sh +++ b/sbin/run_integration_test.sh @@ -11,34 +11,49 @@ fi cd "$LOFAR20_DIR/docker-compose" || exit 1 +# Start the database server first +make build databaseds dsconfig +make start databaseds dsconfig + +# Give dsconfig and databaseds time to start +sleep 1 # dsconfig container must be up and running... +# shellcheck disable=SC2016 +echo '/usr/local/bin/wait-for-it.sh ${TANGO_HOST} --strict --timeout=300 -- true' | make run dsconfig bash - + +DEVICES="device-boot device-apsct device-apspu device-sdp device-pdu device-recv device-sst device-unb2 device-xst device-beamlet device-digitalbeam device-tilebeam device-pdu" +SIMULATORS="sdptr-sim recv-sim unb2-sim apsct-sim apspu-sim" + # Build only the required images, please do not build everything that makes CI # take really long to finish, especially grafana / jupyter / prometheus. # jupyter is physically large > 2.5gb and overlayfs is really slow. -make build device-sdp device-recv device-sst device-unb2 device-xst device-beamlet device-digitalbeam device-tilebeam -make build sdptr-sim recv-sim unb2-sim apsct-sim apspu-sim -make build databaseds dsconfig elk integration-test + +# shellcheck disable=SC2086 +make build $DEVICES $SIMULATORS +make build elk integration-test make build archiver-timescale hdbppts-cm hdbppts-es # Start and stop sequence -make stop device-boot device-docker device-apsct device-apspu device-sdp device-recv device-sst device-unb2 device-xst device-beamlet device-digitalbeam device-tilebeam sdptr-sim recv-sim unb2-sim apsct-sim apspu-sim hdbppts-es hdbppts-cm archiver-timescale -make start databaseds dsconfig elk - -# Give dsconfig and databaseds time to start -sleep 60 +# shellcheck disable=SC2086 +make stop $DEVICES $SIMULATORS hdbppts-es hdbppts-cm archiver-timescale +make stop device-docker # this one does not test well in docker-in-docker +make start elk # Update the dsconfig # Do not remove `bash`, otherwise statement ignored by gitlab ci shell! bash "${LOFAR20_DIR}"/sbin/update_ConfigDb.sh "${LOFAR20_DIR}"/CDB/LOFAR_ConfigDb.json +bash "${LOFAR20_DIR}"/sbin/update_ConfigDb.sh "${LOFAR20_DIR}"/CDB/tango-archiver-data/archiver-devices.json bash "${LOFAR20_DIR}"/sbin/update_ConfigDb.sh "${LOFAR20_DIR}"/CDB/stations/simulators_ConfigDb.json bash "${LOFAR20_DIR}"/sbin/update_ConfigDb.sh "${LOFAR20_DIR}"/CDB/stations/dummy_positions_ConfigDb.json cd "$LOFAR20_DIR/docker-compose" || exit 1 -make start sdptr-sim recv-sim unb2-sim apsct-sim apspu-sim +# shellcheck disable=SC2086 +make start $SIMULATORS # Give the simulators time to start sleep 5 -make start device-boot device-apsct device-apspu device-sdp device-recv device-sst device-unb2 device-xst device-tilebeam device-beamlet device-digitalbeam +# shellcheck disable=SC2086 +make start $DEVICES # Archive devices -> starting order is important make start archiver-timescale hdbppts-cm hdbppts-es @@ -46,9 +61,20 @@ make start archiver-timescale hdbppts-cm hdbppts-es # TODO(Corne Lukken): Use a nicer more reliable mechanism sleep 60 +# Give archiver-timescale time to start +# shellcheck disable=SC2016 +echo '/usr/local/bin/wait-for-it.sh archiver-timescale:5432 --strict --timeout=300 -- true' | make run dsconfig bash - + # Start the integration test cd "$LOFAR20_DIR/docker-compose" || exit 1 make up integration-test # Run the default integration tests make run integration-test default + +# Configure integration test for recv_cluster module +bash "${LOFAR20_DIR}"/sbin/update_ConfigDb.sh "${LOFAR20_DIR}"/CDB/integrations/recvcluster_ConfigDb.json +make restart device-recv device-tilebeam +sleep 5 + +make run integration-test recv_cluster diff --git a/tangostationcontrol/docs/source/alerting.rst b/tangostationcontrol/docs/source/alerting.rst new file mode 100644 index 0000000000000000000000000000000000000000..032bcd379f68d3fa719dc8956334a910bf6227ee --- /dev/null +++ b/tangostationcontrol/docs/source/alerting.rst @@ -0,0 +1,152 @@ +Alerting +================== + +We use the following setup to forward alarms: + +- The Tango Controls `hdbpp subsystem <https://tango-controls.readthedocs.io/en/latest/administration/services/hdbpp/hdb++-design-guidelines.html>`_ archives data-value changes into a `TimescaleDB <http://timescale.com>`_ database, +- Grafana allows `Alert rules <https://grafana.com/docs/grafana/latest/alerting/>`_ to be configured, which poll TimescaleDB and generate an *alert* when the configured condition is met. It also maintains a list of currently firing alerts, +- `Alerta <https://alerta.io/>`_ is the *alert manager*: itreceives these alerts, manages duplicates, and maintains alerts until the operator explicitly acknowledges them. It thus also has a list of alerts that fired in the past. + +Archiving attributes +``````````````````````` + +The attributes of interest will have to be *archived* periodically to be able to see them in Grafana, and thus to be able to define alerts for them. In Tango Controls, there is an *configuration manager* that provides an interface to manage what is archived, and one or more *event subscribers* to subscribe to event changes and forward them to the archive database. + +The ``tangoncontrols.toolkit.archiver.Archiver`` class provides an easy interface to the archiver. It uses the ``device/attribute`` notation for attributes, f.e. ``STAT/SDP/1/FPGA_error_R``. Some of the functions it provides: + +:add_attribute_to_archiver(attribute, polling_period, event_period): Register the given attribute every ``polling_period`` ms. Also attribute on changes with a maximum rate of ``event_period`` ms. + +:remove_attribute_from_archiver(attribute): Unregister the given attribute. + +:start_archiving_attribute(attribute): Start archiving the given attribute. + +:stop_archiving_attribute(attribute): Stop archiving the given attribute. + +:get_attribute_errors(attribute): Return any errors detected while trying to archive the attribute. + +:get_subscriber_errors(): Return any errors detected by the subscribers. + +So a useful idiom to archive an individual attribute is:: + + from tangostationcontrol.archiver import Archiver + + archiver = Archiver() + attribute = "STAT/SDP/1/FPGA_error_R" + archiver.add_attribute_to_archiver(attribute, 1000, 1000) + archiver.start_archiving_attribute(attribute) + +.. note:: The archive subscriber gets confused if attributes it archives disappear from the monitoring database. This can cause an archive subscriber to stall. To fix this, get a proxy to the event subscriber, f.e. ``DeviceProxy("archiving/hdbppts/eventsubscriber01")``, and remove the offending attribute(s) from thr ``ArchivingList`` property using ``proxy.get_property("ArchivingList")`` and ``proxy.put_property({"ArchivingList": [...])``. + +Inspecting the database +````````````````````````` + +The archived attributes end up in a `TimescaleDB <http://timescale.com>`_ database, exposed on port 5432, with credentials ``postgres/pasword``. Key tables are: + +:att_conf: Describes which attributes are registered. Note that any device and attribute names are in lower case. + +:att_scalar_devXXX: Contains the attribute history for scalar attributes of type XXX. + +:att_array_devXXX: Contains the attribute history for 1D array attributes of type XXX. + +:att_image_devXXX: Contains the attribute history for 2D array attributes of type XXX. + +Each of the attribute history tables contains entries for any recorded value changes, but also for changes in ``quality`` (0=ok, >0=issues), and any error ``att_error_desc_id``. Futhermore, we provide specialised views which combine tables into more readable information: + +:lofar_scalar_XXX: View on the attribute history for scalar attributes of type XXX. + +:lofar_array_XXX: View on the attribute history for 1D array attributes of type XXX. Each array element is returned in its own row, with ``x`` denoting the index. + +:lofar_image_XXX: View on the attribute history for 2D array attributes of type XXX. Each array element is returned in its own row, with ``x`` and ``y`` denoting the indices. + +A typical selection could thus look like:: + + SELECT + date_time AS time, device, name, x, value + FROM lofar_array_boolean + WHERE device = 'stat/sdp/1' AND name = 'fpga_error_r' + ORDER BY time DESC + LIMIT 16 + +Attributes in Grafana +```````````````````````` + +The Grafana instance (http://localhost:3000) is linked to TimescaleDB by default. The query for plotting an attribute requires some Grafana-specific macros to select the exact data points Grafana requires:: + + SELECT + $__timeGroup(data_time, $__interval), + x::text, device, name, + value + FROM lofar_array_boolean + WHERE + $__timeFilter(data_time) AND name = 'fpga_error_r' + ORDER BY 1,2 + +The fields ``x``, ``device``, and ``name`` are retrieved as *string*, as that makes them labels to the query, which Grafana then uses to identify the different metrics for each array element. + +.. hint:: Grafana orders labels alphabetically. To order the ``x`` element properly, one could use the ``TO_CHAR(x, '00')`` function instead of ``x::text`` to prepend values with 0. + +Setting up alerts +``````````````````` + +We use the `Grafana 8+ alerts <https://grafana.com/docs/grafana/latest/alerting/>`_ to monitor our system, and the alerts are to be forwarded to our Alerta instance. Both our default set of alerts and this forwarding has to be post-configured after installation: + +- Go to Grafana (http://localhost:3000) and sign in with an administration account (default: admin/admin), +- Go to ``(cogwheel) -> API keys`` and create an ``editor`` API key. Copy the resulting hash, +- Go to the ``docker-compose/grafana/`` source directory, and run:: + + ./import-rules.py -c alerting.json -r rules.json -B <apikey> | bash + +.. hint:: Whether Grafana can send alerts to Alerta can be tested by sending a `test alert <http://localhost:3000/alerting/notifications/receivers/Alerta/edit?alertmanager=grafana>`_. + +The following enhancements are useful to configure for the alerts: + +- You'll want to alert on a query, followed by a ``Reduce`` step with Function ``Last`` and Mode ``Drop Non-numeric Value``. This triggers the alert on the latest value(s), but keeps the individual array elements separated, +- In ``Add details``, the ``Dashboard UID`` and ``Panel ID`` annotations are useful to configure to where you want the user to go, as Grafana will generate hyperlinks from them. To obtain a dashboard uid, go to ``Dashboards -> Browse`` and check out its URL. For the panel id, view a panel and check the URL, +- In ``Add details``, the ``Summary`` annotation will be used as the alert description, +- In ``Custom labels``, add ``severity = major`` to raise the severity of the alert (default: warning). See also the `supported values <https://docs.alerta.io/webui/configuration.html#severity-colors>`_. + +Alerta dashboard +`````````````````` + +The Alerta dashboard (http://localhost:8081) provides an overview of received alerts, which stay in the list until the alert condition disappears, and the alert is explicitly acknowledged or deleted: + +- *Acknowledging* an alert silences it for a day, +- *Shelving* an alert silences it for 2 hours, and removes it from more overviews, +- *Watching* an alert means receiving browser notifications on changes, +- *Deleting* an alert removes it until Grafana sends it again (default: 10 minutes). + +See ``docker-compose/alerta-web/alertad.conf`` for these settings. + +Several installed plugins enhance the received events: + +- ``slack`` plugin forwards alerts to Slack (see below), +- Our own ``grafana`` plugin parses Grafana-specific fields and adds them to the alert, +- Our own ``lofar`` plugin parses and generates LOFAR-specific fields. + +Slack integration +``````````````````` + +Our Alerta setup is configured to send alerts to Slack. To set this up, you need to: + +- Create a Slack App: https://api.slack.com/apps?new_app=1 +- Under ``OAuth & Permissions``, add the following ``OAuth Scope``: ``chat:write``, +- Install the App in your Workspace, +- Copy the ``OAuth Token``. + +.. hint:: To obtain the ``OAuth Token`` later on, go to https://api.slack.com/apps, click on your App, and look under ``Install App``. + +Now, edit ``docker-compose/alerta-web/alerta-secrets.json``: + +.. literalinclude:: ../../../docker-compose/alerta-web/alerta-secrets.json + +The ``SLACK_TOKEN`` is the ``OAuth Token``, and the ``SLACK_CHANNEL`` is the channel in which to post the alerts. + +Any further tweaking can be done by modifying ``docker-compose/alerta-web/alertad.conf``. + +Debugging hints +```````````````````````` + +- Grafana sends alerts to Alerta using the *Prometheus AlertManager* format, and thus uses the Prometheus webhook to do so. To see what Grafana emits, configure it to send to your custom https://hookbin.com/ endpoint, +- Grafana by default resends firing alerts every 4 hours, and we set this to 10 minutes. This means that if an alert was succesfully sent but lost (or deleted), it takes that long to get it back. For debugging, you may want to lower this to f.e. 10 seconds in the ``Alerting -> Notification policies`` settings of Grafana, +- Alerta has a plugin system which allows easily modifying the attributes of an alert (see ``docker-compose/alerta-web`` and https://github.com/alerta/alerta-contrib). To see which attributes an alert has, simply go to the alert in the web GUI, press *Copy*, and paste in your editor, +- Alerta allows a ``DEBUG=True`` parameter in ``docker-compose/alerta-web/alertad.conf`` to generate debug output. diff --git a/tangostationcontrol/docs/source/devices/pdu.rst b/tangostationcontrol/docs/source/devices/pdu.rst new file mode 100644 index 0000000000000000000000000000000000000000..ca78ad3c5b890bb53e422f11df17ff1530ec5376 --- /dev/null +++ b/tangostationcontrol/docs/source/devices/pdu.rst @@ -0,0 +1,7 @@ +.. _pdu: + +PDU +==================== + +The ``pdu == DeviceProxy("STAT/PDU/1")`` device controls the Power Distribution +Unit (PDU) diff --git a/tangostationcontrol/docs/source/devices/tilebeam.rst b/tangostationcontrol/docs/source/devices/tilebeam.rst index e0c4ce810434da3347b0ec9804937c9d69d9ec66..1df29562f605039cad0f77ad284b1f0d3b562f0b 100644 --- a/tangostationcontrol/docs/source/devices/tilebeam.rst +++ b/tangostationcontrol/docs/source/devices/tilebeam.rst @@ -8,19 +8,19 @@ Beam Tracking Beam tracking automatically recomputes and reapplies pointings periodically, and immediately when new pointings are configured. It exposes the following interface: -:HBAT_tracking_enabled_R: Whether beam tracking is running. +:Tracking_enabled_R: Whether beam tracking is running. :type: ``bool`` -:HBAT_pointing_direction_RW: The direction in which the beam should be tracked for each antenna. The beam tracker will steer the beam periodically, and explicitly whenever the pointings change. +:Pointing_direction_RW: The direction in which the beam should be tracked for each antenna. The beam tracker will steer the beam periodically, and explicitly whenever the pointings change. :type: ``str[N_ant][3]`` -:HBAT_pointing_direction_R: The last applied pointing of each antenna. +:Pointing_direction_R: The last applied pointing of each antenna. :type: ``str[N_ant][3]`` -:HBAT_pointing_timestamp_R: The timestamp for which the last set pointing for each antenna was applied and set (in seconds since 1970). +:Pointing_timestamp_R: The timestamp for which the last set pointing for each antenna was applied and set (in seconds since 1970). :type: ``float[N_ant][3]`` @@ -29,11 +29,11 @@ Beam Steering The beam steering is responsible for pointing the beams at a target, by converting pointings to ``recv.HBAT_bf_delay_steps``. The beam steering is typically controlled by the beam tracker. To point the antennas in any direction manually, you should disable beam tracking first: -:HBAT_tracking_enabled_RW: Enable or disable beam tracking (default: ``True``). +:Tracking_enabled_RW: Enable or disable beam tracking (default: ``True``). :type: ``bool`` -:HBAT_set_pointing(pointings): Point the beams towards the specified ``pointings[N_ant][3]`` for all antennas (for which ``recv.ANT_mask_RW`` is set). +:set_pointing(pointings): Point the beams towards the specified ``pointings[N_ant][3]`` for all antennas (for which ``recv.ANT_mask_RW`` is set). :returns: ``None`` diff --git a/tangostationcontrol/docs/source/index.rst b/tangostationcontrol/docs/source/index.rst index 97f8272100311a1b4cb758a4b47d3b9fc93d4e28..d89a0cda0bef89798497ae94263cd0204c4dfe3a 100644 --- a/tangostationcontrol/docs/source/index.rst +++ b/tangostationcontrol/docs/source/index.rst @@ -24,11 +24,13 @@ Even without having access to any LOFAR2.0 hardware, you can install the full st devices/digitalbeam devices/boot devices/docker + devices/pdu devices/recv devices/sdp devices/sst-xst devices/configure configure_station + alerting signal_chain beam_tracking developer diff --git a/tangostationcontrol/docs/source/installation.rst b/tangostationcontrol/docs/source/installation.rst index fd01fe45e0b27de2170c23341ab04e1c6b97f900..09877ef26a8c5f2ea71822338910b884e9bd7a3b 100644 --- a/tangostationcontrol/docs/source/installation.rst +++ b/tangostationcontrol/docs/source/installation.rst @@ -78,13 +78,3 @@ Configuration These sections are optional, to configure specific functionality you may or may not want to use. -Alerta -```````` - -If you want Grafana alerts to appear in Alerta, you need to manually configure Grafana to forward them. Import the alert settings manually: - -- Go to Grafana (http://localhost:3000) and sign in with an administration account (default: admin/admin), -- Go to ``Alerting`` and select ``Admin`` in the left menu bar, -- Copy/paste the following information, and press ``Save``: - -.. literalinclude:: ../../../docker-compose/grafana/alerting.json diff --git a/tangostationcontrol/setup.cfg b/tangostationcontrol/setup.cfg index e9d1b0ada4513e0973785c5e2f99e5d78ada9350..997b7947d27f34b8c30db7eb320bb5384b546d38 100644 --- a/tangostationcontrol/setup.cfg +++ b/tangostationcontrol/setup.cfg @@ -35,6 +35,7 @@ where=./ console_scripts = l2ss-apsct = tangostationcontrol.devices.apsct:main l2ss-apspu = tangostationcontrol.devices.apspu:main + l2ss-pdu = tangostationcontrol.devices.pdu:main l2ss-tilebeam = tangostationcontrol.devices.tilebeam:main l2ss-beamlet = tangostationcontrol.devices.sdp.beamlet:main l2ss-digitalbeam = tangostationcontrol.devices.sdp.digitalbeam:main diff --git a/tangostationcontrol/tangostationcontrol/devices/README.md b/tangostationcontrol/tangostationcontrol/devices/README.md index 08eaf1f67e6f47f9f323e387f5714cf2ff5359a6..19d72e35c4f09d06d500d997d5255c1abdd53b77 100644 --- a/tangostationcontrol/tangostationcontrol/devices/README.md +++ b/tangostationcontrol/tangostationcontrol/devices/README.md @@ -10,10 +10,11 @@ If a new device is added, it will (likely) need to be referenced in several plac - Adjust `docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py` to make an alias for it available in Jupyter, - Adjust `tangostationcontrol/tangostationcontrol/devices/boot.py` to add the device to the station initialisation sequence, - Add to `docker-compose/` to create a YaML file to start the device in a docker container. NOTE: it needs a unique 57xx port assigned, + current _unused_ port value: 5715 - Adjust `tangostationcontrol/setup.cfg` to add an entry point for the device in the package installation, - Add to `tangostationcontrol/tangostationcontrol/integration_test/default/devices/` to add an integration test, - Adjust `sbin/run_integration_test.sh` to have the device started when running the integration tests, - Adjust `.gitlab-ci.yml` to add the device to the `docker_build_image_all` step and to create a `docker_build_image_device_XXX` step, -- Add to `docs/source/devices/` to mention the device in the end-user documentation. -- Adjust `docs/source/index.rst` to include the newly created file in `docs/source/devices/`. +- Add to `tangostationcontrol/docs/source/devices/` to mention the device in the end-user documentation. +- Adjust `tangostationcontrol/docs/source/index.rst` to include the newly created file in `docs/source/devices/`. diff --git a/tangostationcontrol/tangostationcontrol/devices/apsct.py b/tangostationcontrol/tangostationcontrol/devices/apsct.py index dd8bd1c2a7698d2c2481dbb52d60e99541f7e43b..75ef8f612b3bfd44735444fba95d8927226fe55a 100644 --- a/tangostationcontrol/tangostationcontrol/devices/apsct.py +++ b/tangostationcontrol/tangostationcontrol/devices/apsct.py @@ -14,7 +14,7 @@ # PyTango imports from tango import DebugIt from tango.server import command, attribute, device_property -from tango import AttrWriteType, DevState +from tango import AttrWriteType import numpy # Additional import @@ -23,6 +23,7 @@ from tangostationcontrol.common.entrypoint import entry from tangostationcontrol.common.lofar_logging import device_logging_to_python from tangostationcontrol.devices.device_decorators import * from tangostationcontrol.devices.opcua_device import opcua_device +from tangostationcontrol.devices.lofar_device import lofar_device import logging logger = logging.getLogger() @@ -138,7 +139,7 @@ class APSCT(opcua_device): @command() @DebugIt() - @only_in_states([DevState.STANDBY, DevState.ON]) + @only_in_states(lofar_device.DEFAULT_COMMAND_STATES) def APSCT_off(self): """ @@ -148,7 +149,7 @@ class APSCT(opcua_device): @command() @DebugIt() - @only_in_states([DevState.STANDBY, DevState.ON]) + @only_in_states(lofar_device.DEFAULT_COMMAND_STATES) def APSCT_200MHz_on(self): """ @@ -158,7 +159,7 @@ class APSCT(opcua_device): @command() @DebugIt() - @only_in_states([DevState.STANDBY, DevState.ON]) + @only_in_states(lofar_device.DEFAULT_COMMAND_STATES) def APSCT_160MHz_on(self): """ diff --git a/tangostationcontrol/tangostationcontrol/devices/beam_device.py b/tangostationcontrol/tangostationcontrol/devices/beam_device.py new file mode 100644 index 0000000000000000000000000000000000000000..0b436d89b2191b33f059094a89740d66382d927b --- /dev/null +++ b/tangostationcontrol/tangostationcontrol/devices/beam_device.py @@ -0,0 +1,78 @@ +# -*- coding: utf-8 -*- +# +# Distributed under the terms of the APACHE license. +# See LICENSE.txt for more info. + +"""Beam Abstract Device Server for LOFAR2.0 + +""" +# PyTango imports +from tango.server import attribute, command +from tango import AttrWriteType, DebugIt + +# Additional import +from tangostationcontrol.common.entrypoint import entry +from tangostationcontrol.common.measures import get_measures_directory, get_available_measures_directories, download_measures, use_measures_directory, restart_python +from tangostationcontrol.common.lofar_logging import log_exceptions +from tangostationcontrol.devices.lofar_device import lofar_device + +__all__ = ["beam_device", "main"] + +import logging +logger = logging.getLogger() + + +class beam_device(lofar_device): + + # ---------- + # Attributes + # ---------- + + # Directory where the casacore measures that we use, reside. We configure ~/.casarc to + # use the symlink /opt/IERS/current, which we switch to the actual set of files to use. + measures_directory_R = attribute(dtype=str, access=AttrWriteType.READ, fget = lambda self: get_measures_directory()) + + # List of dowloaded measures (the latest 64, anyway) + measures_directories_available_R = attribute(dtype=(str,), max_dim_x=64, access=AttrWriteType.READ, fget = lambda self: sorted(get_available_measures_directories())[-64:]) + + # -------- + # Commands + # -------- + + @command(dtype_out=str, doc_out="Name of newly installed measures directory") + @DebugIt() + @log_exceptions() + def download_measures(self): + """ Download new measures tables into /opt/IERS, but do not activate them. + + NOTE: This may take a while to complete. You are advised to increase + the timeout of the proxy using `my_device.set_timeout_millis(10000)`. """ + + return download_measures() + + @command(dtype_in=str, doc_in="Measures directory to activate") + @DebugIt() + @log_exceptions() + def use_measures(self, newdir): + """ Activate a downloaded set of measures tables. + + NOTE: This will turn off and restart this device!! """ + + # switch to requested measures + use_measures_directory(newdir) + logger.info(f"Switched measures table to {newdir}") + + # turn off our device, to prepare for a python restart + self.Off() + + # restart this program to force casacore to adopt + # the new tables + logger.warning("Restarting device to activate new measures tables") + restart_python() + +# ---------- +# Run server +# ---------- +def main(**kwargs): + """Main function of the Docker module.""" + return entry(beam_device, **kwargs) diff --git a/tangostationcontrol/tangostationcontrol/devices/boot.py b/tangostationcontrol/tangostationcontrol/devices/boot.py index 0b496636fbd30550255c488ef458e294468c07aa..450df1b6fc233219168d3320a8acd8591361cb13 100644 --- a/tangostationcontrol/tangostationcontrol/devices/boot.py +++ b/tangostationcontrol/tangostationcontrol/devices/boot.py @@ -50,7 +50,7 @@ class DevicesInitialiser(object): the start() method, and progress can be followed by inspecting the members progress (0-100), status (string), and is_running() (bool). """ - def __init__(self, device_names, reboot=False, initialise_hardware=True, proxy_timeout=10.0): + def __init__(self, device_names, reboot=False, initialise_hardware=True, proxy_timeout=60.0): self.reboot = reboot self.initialise_hardware = initialise_hardware @@ -153,7 +153,7 @@ class DevicesInitialiser(object): continue if self.is_available(device): - if self.reboot or self.devices[device].state() not in [DevState.ON, DevState.ALARM]: + if self.reboot or self.devices[device].state() not in lofar_device.OPERATIONAL_STATES: self.stop_device(device) self.boot_device(device) @@ -204,7 +204,7 @@ class DevicesInitialiser(object): else: proxy.warm_boot() - if proxy.state() not in [DevState.ON, DevState.ALARM]: # ALARM still means booting was succesful + if proxy.state() not in lofar_device.OPERATIONAL_STATES: raise InitialisationException(f"Could not boot device {device_name}. It reports status: {proxy.status()}") self.set_status(f"[restarting {device_name}] Succesfully booted.") @@ -218,7 +218,7 @@ class Boot(lofar_device): DeviceProxy_Time_Out = device_property( dtype='DevDouble', mandatory=False, - default_value=30.0, + default_value=60.0, ) # Initialise the hardware when initialising a station. Can end badly when using simulators. @@ -233,6 +233,7 @@ class Boot(lofar_device): dtype='DevVarStringArray', mandatory=False, default_value=["STAT/Docker/1", # Docker controls the device containers, so it goes before anything else + "STAT/PDU/1", # PDU boot early to detect power delivery failure as fast as possible "STAT/APSPU/1", # APS Power Units control other hardware we want to initialise "STAT/APSCT/1", "STAT/RECV/1", # RCUs are input for SDP, so initialise them first @@ -313,14 +314,14 @@ class Boot(lofar_device): @command() @DebugIt() - @only_in_states([DevState.ON]) + @only_in_states(lofar_device.OPERATIONAL_STATES) @log_exceptions() def boot(self): self._boot(reboot=False, initialise_hardware=self.Initialise_Hardware) @command() @DebugIt() - @only_in_states([DevState.ON]) + @only_in_states(lofar_device.OPERATIONAL_STATES) @log_exceptions() def reboot(self): self._boot(reboot=True, initialise_hardware=self.Initialise_Hardware) diff --git a/tangostationcontrol/tangostationcontrol/devices/lofar_device.py b/tangostationcontrol/tangostationcontrol/devices/lofar_device.py index 740d45626ae02dcb62b1ff2a00d5b30b31be91ab..cc276428d962fa006d2ef0619790b65c1f2ba270 100644 --- a/tangostationcontrol/tangostationcontrol/devices/lofar_device.py +++ b/tangostationcontrol/tangostationcontrol/devices/lofar_device.py @@ -41,6 +41,7 @@ class lofar_device(Device, metaclass=DeviceMeta): INIT = Device is initialising. STANDBY = Device is initialised, but pends external configuration and an explicit turning on, ON = Device is fully configured, functional, controls the hardware, and is possibly actively running, + ALARM = Device is operating but one of its attributes is out of range, FAULT = Device detected an unrecoverable error, and is thus malfunctional, OFF = Device is turned off, drops connection to the hardware, @@ -49,6 +50,7 @@ class lofar_device(Device, metaclass=DeviceMeta): OFF -> INIT: Triggered by device. Device will initialise (connect to hardware, other devices), INIT -> STANDBY: Triggered by device. Device is initialised, and is ready for additional configuration by the user, STANDBY -> ON: Triggered by user. Device reports to be functional, + ON -> ALARM: Triggered by tango. Device has attribute(s) with value(s) exceeding their alarm treshold, * -> FAULT: Triggered by device. Device has degraded to malfunctional, for example because the connection to the hardware is lost, * -> FAULT: Triggered by user. Emulate a forced malfunction for integration testing purposes, * -> OFF: Triggered by user. Device is turned off. Triggered by the Off() command, @@ -57,6 +59,17 @@ class lofar_device(Device, metaclass=DeviceMeta): The user triggers their transitions by the commands reflecting the target state (Initialise(), On(), Fault()). """ + # The Device states in which we consider our device operational, + # and thus allow interaction. + OPERATIONAL_STATES = [DevState.ON, DevState.ALARM] + + # States in which Initialise() has happened, and the hardware + # can thus be configured or otherwise interacted with. + INITIALISED_STATES = OPERATIONAL_STATES + [DevState.STANDBY] + + # States in which most commands are allowed + DEFAULT_COMMAND_STATES = INITIALISED_STATES + # ---------- # Attributes # ---------- @@ -159,7 +172,7 @@ class lofar_device(Device, metaclass=DeviceMeta): self.set_status("Device is in the STANDBY state.") @command() - @only_in_states([DevState.STANDBY, DevState.ON]) + @only_in_states(INITIALISED_STATES) @DebugIt() @fault_on_error() @log_exceptions() @@ -204,7 +217,6 @@ class lofar_device(Device, metaclass=DeviceMeta): @command() - @only_in_states([DevState.ON, DevState.INIT, DevState.STANDBY, DevState.FAULT]) @DebugIt() @log_exceptions() def Fault(self, new_status="Device is in the FAULT state."): @@ -216,6 +228,9 @@ class lofar_device(Device, metaclass=DeviceMeta): :return:None """ + if self.get_state() == DevState.OFF: + raise Exception("IllegalCommand: Cannot go from FAULT -> OFF") + if self.get_state() == DevState.FAULT: # Already faulting. Don't complain. logger.warning("Requested to go to FAULT state, but am already in FAULT state.") @@ -228,15 +243,19 @@ class lofar_device(Device, metaclass=DeviceMeta): # functions that can or must be overloaded def configure_for_fault(self): + """Overloadable function called in Fault after state is set to FAULT""" pass def configure_for_off(self): + """Overloadable function called in Off after state is set to OFF""" pass def configure_for_on(self): + """Overloadable function called in On BEFORE state is set to ON""" pass def configure_for_initialise(self): + """Overloadable function called in initialise with state INIT, STANDBY after call""" pass def always_executed_hook(self): @@ -268,7 +287,7 @@ class lofar_device(Device, metaclass=DeviceMeta): pass @command() - @only_in_states([DevState.STANDBY, DevState.ON]) + @only_in_states(DEFAULT_COMMAND_STATES) @DebugIt() @log_exceptions() def set_defaults(self): @@ -298,7 +317,7 @@ class lofar_device(Device, metaclass=DeviceMeta): self._set_defaults(attributes_to_set) - @only_in_states([DevState.STANDBY, DevState.INIT, DevState.ON]) + @only_in_states(DEFAULT_COMMAND_STATES) @fault_on_error() @command() def set_translator_defaults(self): @@ -307,25 +326,27 @@ class lofar_device(Device, metaclass=DeviceMeta): # This is just the command version of _set_translator_defaults(). self._set_translator_defaults() - @only_in_states([DevState.STANDBY, DevState.INIT, DevState.ON]) + @only_in_states(DEFAULT_COMMAND_STATES) @fault_on_error() @command() + @DebugIt() def prepare_hardware(self): """ Load firmware required before configuring anything. """ # This is just the command version of _prepare_hardware(). self._prepare_hardware() - @only_in_states([DevState.STANDBY, DevState.INIT, DevState.ON]) + @only_in_states(DEFAULT_COMMAND_STATES) @fault_on_error() @command() + @DebugIt() def initialise_hardware(self): """ Initialise the hardware after configuring it. """ # This is just the command version of _initialise_hardware(). self._initialise_hardware() - @only_in_states([DevState.STANDBY, DevState.ON]) + @only_in_states(DEFAULT_COMMAND_STATES) @command(dtype_out = DevDouble) def max_archiving_load(self): """ Return the maximum archiving load for the device attributes """ diff --git a/tangostationcontrol/tangostationcontrol/devices/pdu.py b/tangostationcontrol/tangostationcontrol/devices/pdu.py new file mode 100644 index 0000000000000000000000000000000000000000..88a1af8d1d8264fc6214fc8db779a5cb10e684a2 --- /dev/null +++ b/tangostationcontrol/tangostationcontrol/devices/pdu.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# +# Distributed under the terms of the APACHE license. +# See LICENSE.txt for more info. + +""" PDU Device Server for LOFAR2.0 + +""" + +# Additional import +from tangostationcontrol.common.entrypoint import entry +from tangostationcontrol.devices.lofar_device import lofar_device +from tangostationcontrol.common.lofar_logging import device_logging_to_python, log_exceptions + +import logging + +logger = logging.getLogger() + +__all__ = ["PDU", "main"] + + +@device_logging_to_python() +class PDU(lofar_device): + # ----------------- + # Device Properties + # ----------------- + + # ---------- + # Attributes + # ---------- + + # -------- + # overloaded functions + # -------- + + def init_device(self): + super().init_device() + + @log_exceptions() + def configure_for_initialise(self): + super().configure_for_initialise() + + @log_exceptions() + def configure_for_on(self): + super().configure_for_on() + + @log_exceptions() + def configure_for_off(self): + super().configure_for_off() + + +# ---------- +# Run server +# ---------- +def main(**kwargs): + """Main function of the PDU module.""" + return entry(PDU, **kwargs) diff --git a/tangostationcontrol/tangostationcontrol/devices/recv.py b/tangostationcontrol/tangostationcontrol/devices/recv.py index 787a51df3e62ed35071cd97e66ffc1851819f74a..93233f86f4a0dd0291205dea5f30e1ab9ae8c5fc 100644 --- a/tangostationcontrol/tangostationcontrol/devices/recv.py +++ b/tangostationcontrol/tangostationcontrol/devices/recv.py @@ -12,11 +12,10 @@ """ # PyTango imports -from functools import partial from tango import DebugIt from tango.server import command from tango.server import device_property, attribute -from tango import AttrWriteType, DevState, DevVarFloatArray +from tango import AttrWriteType, DevVarFloatArray import numpy from math import pi @@ -29,6 +28,7 @@ from tangostationcontrol.common.lofar_logging import device_logging_to_python from tangostationcontrol.clients.attribute_wrapper import attribute_wrapper from tangostationcontrol.devices.device_decorators import * from tangostationcontrol.devices.opcua_device import opcua_device +from tangostationcontrol.devices.lofar_device import lofar_device import logging logger = logging.getLogger() @@ -126,6 +126,18 @@ class RECV(opcua_device): # ----- Position information + Antenna_Field_Reference_ITRF = device_property( + doc="ITRF position (XYZ) of each antenna field", + dtype='DevVarFloatArray', + mandatory=False + ) + + Antenna_Field_Reference_ETRS = device_property( + doc="ETRS position (XYZ) of each antenna field", + dtype='DevVarFloatArray', + mandatory=False + ) + HBAT_reference_ITRF = device_property( doc="ITRF position (XYZ) of each HBAT (leave empty to auto-derive from ETRS)", dtype='DevVarFloatArray', @@ -218,6 +230,10 @@ class RECV(opcua_device): # ----- Position information + Antenna_Field_Reference_ITRF_R = attribute(access=AttrWriteType.READ, + doc='Absolute reference position of antenna field, in ITRF', + dtype=(numpy.float,), max_dim_x=3) + HBAT_antenna_ITRF_offsets_R = attribute(access=AttrWriteType.READ, doc='Offsets of the antennas within a tile, in ITRF ("iHBADeltas"). True shape: 96x16x3.', dtype=((numpy.float,),), max_dim_x=48, max_dim_y=96) @@ -226,6 +242,15 @@ class RECV(opcua_device): doc='Absolute reference position of each tile, in ITRF', dtype=((numpy.float,),), max_dim_x=3, max_dim_y=96) + def read_Antenna_Field_Reference_ITRF_R(self): + # provide ITRF field coordinates if they were configured + if self.Antenna_Field_Reference_ITRF: + return numpy.array(self.Antenna_Field_Reference_ITRF).reshape(3) + + # calculate them from ETRS coordinates if not, using the configured ITRF reference + ETRS_coordinates = numpy.array(self.Antenna_Field_Reference_ETRS).reshape(3) + return ETRS_to_ITRF(ETRS_coordinates, self.ITRF_Reference_Frame, self.ITRF_Reference_Epoch) + def read_HBAT_antenna_ITRF_offsets_R(self): base_antenna_offsets = numpy.array(self.HBAT_base_antenna_offsets).reshape(16,3) PQR_to_ETRS_rotation_matrix = numpy.array(self.HBAT_PQR_to_ETRS_rotation_matrix).reshape(3,3) @@ -252,7 +277,7 @@ class RECV(opcua_device): # ---------- # Summarising Attributes # ---------- - RCU_LED_colour_R = attribute(dtype=(numpy.uint32,), max_dim_x=32) + RCU_LED_colour_R = attribute(dtype=(numpy.uint32,), max_dim_x=32, fisallowed="is_attribute_wrapper_allowed") def read_RCU_LED_colour_R(self): return (2 * self.read_attribute("RCU_LED_green_on_R") + 4 * self.read_attribute("RCU_LED_red_on_R")).astype(numpy.uint32) @@ -303,6 +328,14 @@ class RECV(opcua_device): # -------- def properties_changed(self): super().properties_changed() + + # The HBAT can only apply positive delays, yet we want to apply a delay + # relative to the center of the tile, which typically requires negative + # delays for half of the elements. + # + # We circumvent this issue by increasing the delays for all elements + # by a fixed amount, the average of all steps. Doing so should result + # in positive delays regardless of the pointing direction. self.HBAT_bf_delay_offset = numpy.mean(self.HBAT_bf_delay_step_delays) # -------- @@ -320,14 +353,13 @@ class RECV(opcua_device): calibrated_delays = numpy.add(polarised_delays, self.HBAT_signal_input_delays) # Find the right delay step by looking for the closest match in property RECV-> HBAT_bf_delay_step_delays - HBAT_bf_delay_steps = numpy.zeros((96,32), dtype=numpy.int64) - distance = lambda x , y : numpy.absolute(x-y) - for tile in range(96): - for at in range(32): - delay = calibrated_delays[tile,at] + self.HBAT_bf_delay_offset - step = min(self.HBAT_bf_delay_step_delays,key=partial(distance,delay)) - HBAT_bf_delay_steps[tile,at] = numpy.where(self.HBAT_bf_delay_step_delays==step)[0][0] - return HBAT_bf_delay_steps + def nearest_delay_step(delay): + # We want the index in the HBAT_bf_delay_steps_delay array which is closest to the given delay, + # shifted by HBAT_bf_delay_offset to obtain strictly positive delays. + return (numpy.abs(self.HBAT_bf_delay_step_delays - (delay + self.HBAT_bf_delay_offset))).argmin() + + # Apply to all elements to convert each delay into the number of delay steps + return numpy.vectorize(nearest_delay_step)(calibrated_delays) # -------- # Commands @@ -347,7 +379,7 @@ class RECV(opcua_device): @command() @DebugIt() - @only_in_states([DevState.STANDBY, DevState.ON]) + @only_in_states(lofar_device.DEFAULT_COMMAND_STATES) def RCU_off(self): """ @@ -357,7 +389,7 @@ class RECV(opcua_device): @command() @DebugIt() - @only_in_states([DevState.STANDBY, DevState.ON]) + @only_in_states(lofar_device.DEFAULT_COMMAND_STATES) def RCU_on(self): """ @@ -367,7 +399,7 @@ class RECV(opcua_device): @command() @DebugIt() - @only_in_states([DevState.STANDBY, DevState.ON]) + @only_in_states(lofar_device.DEFAULT_COMMAND_STATES) def RCU_DTH_off(self): """ @@ -377,7 +409,7 @@ class RECV(opcua_device): @command() @DebugIt() - @only_in_states([DevState.STANDBY, DevState.ON]) + @only_in_states(lofar_device.DEFAULT_COMMAND_STATES) def RCU_DTH_on(self): """ @@ -394,10 +426,6 @@ class RECV(opcua_device): self.RCU_on() self.wait_attribute("RECVTR_translator_busy_R", False, self.RCU_On_Off_timeout) - # Turn off DTH by default - self.RCU_DTH_off() - self.wait_attribute("RECVTR_translator_busy_R", False, self.RCU_DTH_On_Off_timeout) - # ---------- # Run server # ---------- diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/beamlet.py b/tangostationcontrol/tangostationcontrol/devices/sdp/beamlet.py index 36ac75f4ed8b6ba94035c495e9729b61f2a7536f..ae61d77bd51299d8dba44e77422d09d324d1ec53 100644 --- a/tangostationcontrol/tangostationcontrol/devices/sdp/beamlet.py +++ b/tangostationcontrol/tangostationcontrol/devices/sdp/beamlet.py @@ -8,10 +8,10 @@ """ # PyTango imports -from tango.server import device_property -from tango import AttrWriteType -# Additional import +from tango.server import device_property, command +from tango import AttrWriteType, DevVarFloatArray, DevVarULongArray +# Additional import from tangostationcontrol.common.entrypoint import entry from tangostationcontrol.clients.attribute_wrapper import attribute_wrapper from tangostationcontrol.devices.opcua_device import opcua_device @@ -24,6 +24,14 @@ __all__ = ["Beamlet", "main"] class Beamlet(opcua_device): + # List of OPC-UA CP for BF beamlets + S_pn = SDP.S_pn + N_pn = SDP.N_pn + A_pn = 6 + N_pol = 2 + N_beamlets_ctrl = 488 + N_pol_bf = 2 + # ----------------- # Device Properties # ----------------- @@ -55,12 +63,18 @@ class Beamlet(opcua_device): default_value=[1.0] * 16 ) + FPGA_bf_weights_xy_yx_RW_default = device_property( + dtype='DevVarULongArray', + mandatory=False, + default_value = [[0] * A_pn * N_pol * N_beamlets_ctrl] * N_pn + ) + first_default_settings = [ 'FPGA_beamlet_output_hdr_eth_destination_mac_RW', 'FPGA_beamlet_output_hdr_ip_destination_address_RW', 'FPGA_beamlet_output_hdr_udp_destination_port_RW', - - 'FPGA_beamlet_output_enable_RW' + 'FPGA_beamlet_output_enable_RW', + 'FPGA_bf_weights_xy_yx_RW' ] # ---------- @@ -78,43 +92,45 @@ class Beamlet(opcua_device): FPGA_beamlet_output_scale_R = attribute_wrapper(comms_annotation=["FPGA_beamlet_output_scale_R"], datatype=numpy.double, dims=(16,)) FPGA_beamlet_output_scale_RW = attribute_wrapper(comms_annotation=["FPGA_beamlet_output_scale_RW"], datatype=numpy.double, dims=(16,), access=AttrWriteType.READ_WRITE) - # List of OPC-UA CP for BF beamlets - S_pn = SDP.S_pn - N_pn = SDP.N_pn - A_pn = 6 - N_pol = 2 - N_beamlets_ctrl = 488 - N_pol_bf = 2 + FPGA_beamlet_output_nof_packets_R = attribute_wrapper(comms_annotation=["FPGA_beamlet_output_nof_packets_R"], datatype=numpy.int32, dims=(N_beamlets_ctrl, N_pn)) + FPGA_beamlet_output_nof_valid_R = attribute_wrapper(comms_annotation=["FPGA_beamlet_output_nof_valid_R"], datatype=numpy.int32, dims=(N_beamlets_ctrl, N_pn)) + + # uint16[N_pn][A_pn][N_pol][N_beamlets_ctrl] + # Select subband per dual-polarisation beamlet. + # 0 for antenna polarization X in beamlet polarization X, + # 1 for antenna polarization Y in beamlet polarization Y. + FPGA_beamlet_subband_select_R = attribute_wrapper(comms_annotation=["FPGA_beamlet_subband_select_R"], datatype=numpy.uint16, dims=(A_pn * N_pol * N_beamlets_ctrl, N_pn)) + FPGA_beamlet_subband_select_RW = attribute_wrapper(comms_annotation=["FPGA_beamlet_subband_select_RW"], datatype=numpy.uint16, dims=(A_pn * N_pol * N_beamlets_ctrl, N_pn), access=AttrWriteType.READ_WRITE) # cint16[N_pn][A_pn][N_pol][N_beamlets_ctrl] # Co-polarization BF weights. The N_pol = 2 parameter index is: # 0 for antenna polarization X in beamlet polarization X, # 1 for antenna polarization Y in beamlet polarization Y. - FPGA_bf_weights_xx_yy_R = attribute_wrapper(comms_annotation=["FPGA_bf_weights_xx_yy_R"], datatype=numpy.int16, dims=(A_pn * N_pol * N_beamlets_ctrl, N_pn)) - FPGA_bf_weights_xx_yy_RW = attribute_wrapper(comms_annotation=["FPGA_bf_weights_xx_yy_RW"], datatype=numpy.int16, dims=(A_pn * N_pol * N_beamlets_ctrl, N_pn), access=AttrWriteType.READ_WRITE) + FPGA_bf_weights_xx_yy_R = attribute_wrapper(comms_annotation=["FPGA_bf_weights_xx_yy_R"], datatype=numpy.uint32, dims=(A_pn * N_pol * N_beamlets_ctrl, N_pn)) + FPGA_bf_weights_xx_yy_RW = attribute_wrapper(comms_annotation=["FPGA_bf_weights_xx_yy_RW"], datatype=numpy.uint32, dims=(A_pn * N_pol * N_beamlets_ctrl, N_pn), access=AttrWriteType.READ_WRITE) # cint16[N_pn][A_pn][N_pol][N_beamlets_ctrl] # Cross-polarization BF weights. The N_pol = 2 parameter index is (note that index pol in range 0:N_pol-1 is the antenna polarization, so index !pol is the beamlet polarization): # 0 for antenna polarization X in beamlet polarization Y, # 1 for antenna polarization Y in beamlet polarization X. - FPGA_bf_weights_xy_yx_R = attribute_wrapper(comms_annotation=["FPGA_bf_weights_xy_yx_R"], datatype=numpy.int16, dims=(A_pn * N_pol * N_beamlets_ctrl, N_pn)) - FPGA_bf_weights_xy_yx_RW = attribute_wrapper(comms_annotation=["FPGA_bf_weights_xy_yx_RW"], datatype=numpy.int16, dims=(A_pn * N_pol * N_beamlets_ctrl, N_pn), access=AttrWriteType.READ_WRITE) + FPGA_bf_weights_xy_yx_R = attribute_wrapper(comms_annotation=["FPGA_bf_weights_xy_yx_R"], datatype=numpy.uint32, dims=(A_pn * N_pol * N_beamlets_ctrl, N_pn)) + FPGA_bf_weights_xy_yx_RW = attribute_wrapper(comms_annotation=["FPGA_bf_weights_xy_yx_RW"], datatype=numpy.uint32, dims=(A_pn * N_pol * N_beamlets_ctrl, N_pn), access=AttrWriteType.READ_WRITE) # cint16[N_pn][N_pol_bf][A_pn][N_pol][N_beamlets_ctrl] # Full Jones matrix of BF weights. - FPGA_bf_weights_xx_xy_yx_yy_R = attribute_wrapper(comms_annotation=["FPGA_bf_weights_xx_xy_yx_yy_R"], datatype=numpy.int16, dims=(N_pol_bf * A_pn * N_pol * N_beamlets_ctrl, N_pn)) - FPGA_bf_weights_xx_xy_yx_yy_RW = attribute_wrapper(comms_annotation=["FPGA_bf_weights_xx_xy_yx_yy_RW"], datatype=numpy.int16, dims=(N_pol_bf * A_pn * N_pol * N_beamlets_ctrl, N_pn), access=AttrWriteType.READ_WRITE) + FPGA_bf_weights_xx_xy_yx_yy_R = attribute_wrapper(comms_annotation=["FPGA_bf_weights_xx_xy_yx_yy_R"], datatype=numpy.uint32, dims=(N_pol_bf * A_pn * N_pol * N_beamlets_ctrl, N_pn)) + FPGA_bf_weights_xx_xy_yx_yy_RW = attribute_wrapper(comms_annotation=["FPGA_bf_weights_xx_xy_yx_yy_RW"], datatype=numpy.uint32, dims=(N_pol_bf * A_pn * N_pol * N_beamlets_ctrl, N_pn), access=AttrWriteType.READ_WRITE) # cint16[N_pn][A_pn][N_beamlets_ctrl] # BF weights for separate access to respectively w_xx, w_xy, w_yx, and w_yy. - FPGA_bf_weights_xx_R = attribute_wrapper(comms_annotation=["FPGA_bf_weights_xx_R"], datatype=numpy.int16, dims=(A_pn * N_beamlets_ctrl, N_pn)) - FPGA_bf_weights_xx_RW = attribute_wrapper(comms_annotation=["FPGA_bf_weights_xx_RW"], datatype=numpy.int16, dims=(A_pn * N_beamlets_ctrl, N_pn), access=AttrWriteType.READ_WRITE) - FPGA_bf_weights_xy_R = attribute_wrapper(comms_annotation=["FPGA_bf_weights_xy_R"], datatype=numpy.int16, dims=(A_pn * N_beamlets_ctrl, N_pn)) - FPGA_bf_weights_xy_RW = attribute_wrapper(comms_annotation=["FPGA_bf_weights_xy_RW"], datatype=numpy.int16, dims=(A_pn * N_beamlets_ctrl, N_pn), access=AttrWriteType.READ_WRITE) - FPGA_bf_weights_yx_R = attribute_wrapper(comms_annotation=["FPGA_bf_weights_yx_R"], datatype=numpy.int16, dims=(A_pn * N_beamlets_ctrl, N_pn)) - FPGA_bf_weights_yx_RW = attribute_wrapper(comms_annotation=["FPGA_bf_weights_yx_RW"], datatype=numpy.int16, dims=(A_pn * N_beamlets_ctrl, N_pn), access=AttrWriteType.READ_WRITE) - FPGA_bf_weights_yy_R = attribute_wrapper(comms_annotation=["FPGA_bf_weights_yy_R"], datatype=numpy.int16, dims=(A_pn * N_beamlets_ctrl, N_pn)) - FPGA_bf_weights_yy_RW = attribute_wrapper(comms_annotation=["FPGA_bf_weights_yy_RW"], datatype=numpy.int16, dims=(A_pn * N_beamlets_ctrl, N_pn), access=AttrWriteType.READ_WRITE) + FPGA_bf_weights_xx_R = attribute_wrapper(comms_annotation=["FPGA_bf_weights_xx_R"], datatype=numpy.uint32, dims=(A_pn * N_beamlets_ctrl, N_pn)) + FPGA_bf_weights_xx_RW = attribute_wrapper(comms_annotation=["FPGA_bf_weights_xx_RW"], datatype=numpy.uint32, dims=(A_pn * N_beamlets_ctrl, N_pn), access=AttrWriteType.READ_WRITE) + FPGA_bf_weights_xy_R = attribute_wrapper(comms_annotation=["FPGA_bf_weights_xy_R"], datatype=numpy.uint32, dims=(A_pn * N_beamlets_ctrl, N_pn)) + FPGA_bf_weights_xy_RW = attribute_wrapper(comms_annotation=["FPGA_bf_weights_xy_RW"], datatype=numpy.uint32, dims=(A_pn * N_beamlets_ctrl, N_pn), access=AttrWriteType.READ_WRITE) + FPGA_bf_weights_yx_R = attribute_wrapper(comms_annotation=["FPGA_bf_weights_yx_R"], datatype=numpy.uint32, dims=(A_pn * N_beamlets_ctrl, N_pn)) + FPGA_bf_weights_yx_RW = attribute_wrapper(comms_annotation=["FPGA_bf_weights_yx_RW"], datatype=numpy.uint32, dims=(A_pn * N_beamlets_ctrl, N_pn), access=AttrWriteType.READ_WRITE) + FPGA_bf_weights_yy_R = attribute_wrapper(comms_annotation=["FPGA_bf_weights_yy_R"], datatype=numpy.uint32, dims=(A_pn * N_beamlets_ctrl, N_pn)) + FPGA_bf_weights_yy_RW = attribute_wrapper(comms_annotation=["FPGA_bf_weights_yy_RW"], datatype=numpy.uint32, dims=(A_pn * N_beamlets_ctrl, N_pn), access=AttrWriteType.READ_WRITE) # ---------- # Summarising Attributes @@ -124,9 +140,33 @@ class Beamlet(opcua_device): # Overloaded functions # -------- + # -------- + # internal functions + # -------- + def _calculate_bf_weights(self, phases: numpy.ndarray): + """ Helper function that converts a difference in phase (in radiants) + to a FPGA weight (in complex number) """ + + # Convert array values in complex numbers + unit = numpy.power(2,14) + real = numpy.array(unit * numpy.cos(phases), dtype=numpy.short) + imag = numpy.array(unit * numpy.sin(phases), dtype=numpy.short) + # join 16 bits of imaginary part (MSB) with 16 bits of real part (LSB) + bf_weights = numpy.array( numpy.frombuffer( b''.join([imag,real]), dtype=numpy.uint32 ) ) + + return bf_weights + # -------- # Commands # -------- + @command(dtype_in=DevVarFloatArray, dtype_out=DevVarULongArray) + def calculate_bf_weights(self, phases: numpy.ndarray): + """ converts a difference in phase (in radiants) to a FPGA weight (in complex number) """ + + # Calculate the FPGA weight array + bf_weights = self._calculate_bf_weights(phases) + + return bf_weights # ---------- # Run server diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/digitalbeam.py b/tangostationcontrol/tangostationcontrol/devices/sdp/digitalbeam.py index 147b0a6237d14b70683f778f220f304ffb2e3b86..256bd5b2cccd8531b2e9ff9aeedcc1acf25cd425 100644 --- a/tangostationcontrol/tangostationcontrol/devices/sdp/digitalbeam.py +++ b/tangostationcontrol/tangostationcontrol/devices/sdp/digitalbeam.py @@ -14,14 +14,14 @@ from tangostationcontrol.common.entrypoint import entry #from tangostationcontrol.clients.attribute_wrapper import attribute_wrapper -from tangostationcontrol.devices.lofar_device import lofar_device +from tangostationcontrol.devices.beam_device import beam_device #import numpy __all__ = ["DigitalBeam", "main"] -class DigitalBeam(lofar_device): +class DigitalBeam(beam_device): pass # ----------------- # Device Properties diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/sdp.py b/tangostationcontrol/tangostationcontrol/devices/sdp/sdp.py index c4dc14a165f69ce471e7c416fef14dd0771b54cb..d551c0104eee3a13fcb792e168f2dce97ac3a84b 100644 --- a/tangostationcontrol/tangostationcontrol/devices/sdp/sdp.py +++ b/tangostationcontrol/tangostationcontrol/devices/sdp/sdp.py @@ -98,8 +98,8 @@ class SDP(opcua_device): # ---------- FPGA_firmware_version_R = attribute_wrapper(comms_annotation=["FPGA_firmware_version_R"], datatype=numpy.str, dims=(16,)) - FPGA_boot_image_R = attribute_wrapper(comms_annotation=["FPGA_boot_image_R"], datatype=numpy.uint32, dims=(16,), doc="Active FPGA image (0=factory, 1=user)") - FPGA_boot_image_RW = attribute_wrapper(comms_annotation=["FPGA_boot_image_RW"], datatype=numpy.uint32, dims=(16,), access=AttrWriteType.READ_WRITE) + FPGA_boot_image_R = attribute_wrapper(comms_annotation=["FPGA_boot_image_R"], datatype=numpy.int32, dims=(16,), doc="Active FPGA image (0=factory, 1=user)") + FPGA_boot_image_RW = attribute_wrapper(comms_annotation=["FPGA_boot_image_RW"], datatype=numpy.int32, dims=(16,), access=AttrWriteType.READ_WRITE) FPGA_global_node_index_R = attribute_wrapper(comms_annotation=["FPGA_global_node_index_R"], datatype=numpy.uint32, dims=(16,)) FPGA_hardware_version_R = attribute_wrapper(comms_annotation=["FPGA_hardware_version_R"], datatype=numpy.str, dims=(16,)) FPGA_pps_present_R = attribute_wrapper(comms_annotation=["FPGA_pps_present_R"], datatype=numpy.bool_, dims=(16,)) @@ -108,6 +108,14 @@ class SDP(opcua_device): FPGA_pps_expected_cnt_RW = attribute_wrapper(comms_annotation=["FPGA_pps_expected_cnt_RW"], datatype=numpy.uint32, dims=(16,), access=AttrWriteType.READ_WRITE) FPGA_processing_enable_R = attribute_wrapper(comms_annotation=["FPGA_processing_enable_R"], datatype=numpy.bool_, dims=(16,)) FPGA_processing_enable_RW = attribute_wrapper(comms_annotation=["FPGA_processing_enable_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE) + FPGA_ring_node_offset_R = attribute_wrapper(comms_annotation=["FPGA_ring_node_offset_R"], datatype=numpy.uint32, dims=(16,)) + FPGA_ring_node_offset_RW = attribute_wrapper(comms_annotation=["FPGA_ring_node_offset_RW"], datatype=numpy.uint32, dims=(16,), access=AttrWriteType.READ_WRITE) + FPGA_ring_nof_nodes_R = attribute_wrapper(comms_annotation=["FPGA_ring_nof_nodes_R"], datatype=numpy.uint32, dims=(16,)) + FPGA_ring_nof_nodes_RW = attribute_wrapper(comms_annotation=["FPGA_ring_nof_nodes_RW"], datatype=numpy.uint32, dims=(16,), access=AttrWriteType.READ_WRITE) + FPGA_ring_use_cable_to_next_rn_R = attribute_wrapper(comms_annotation=["FPGA_ring_use_cable_to_next_rn_R"], datatype=numpy.bool_, dims=(16,)) + FPGA_ring_use_cable_to_next_rn_RW = attribute_wrapper(comms_annotation=["FPGA_ring_use_cable_to_next_rn_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE) + FPGA_ring_use_cable_to_previous_rn_R = attribute_wrapper(comms_annotation=["FPGA_ring_use_cable_to_previous_rn_R"], datatype=numpy.bool_, dims=(16,)) + FPGA_ring_use_cable_to_previous_rn_RW = attribute_wrapper(comms_annotation=["FPGA_ring_use_cable_to_previous_rn_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE) FPGA_scrap_R = attribute_wrapper(comms_annotation=["FPGA_scrap_R"], datatype=numpy.int32, dims=(8192,)) FPGA_scrap_RW = attribute_wrapper(comms_annotation=["FPGA_scrap_RW"], datatype=numpy.int32, dims=(8192,), access=AttrWriteType.READ_WRITE) FPGA_sdp_info_antenna_band_index_R = attribute_wrapper(comms_annotation=["FPGA_sdp_info_antenna_band_index_R"], datatype=numpy.uint32, dims=(16,)) @@ -122,7 +130,7 @@ class SDP(opcua_device): FPGA_sdp_info_station_id_RW = attribute_wrapper(comms_annotation=["FPGA_sdp_info_station_id_RW"], datatype=numpy.uint32, dims=(16,), access=AttrWriteType.READ_WRITE) FPGA_subband_weights_R = attribute_wrapper(comms_annotation=["FPGA_subband_weights_R"], datatype=numpy.uint32, dims=(12 * 512, 16)) FPGA_subband_weights_RW = attribute_wrapper(comms_annotation=["FPGA_subband_weights_RW"], datatype=numpy.uint32, dims=(12 * 512, 16), access=AttrWriteType.READ_WRITE) - FPGA_time_since_last_pps_R = attribute_wrapper(comms_annotation=["FPGA_time_since_last_pps_R"], datatype=numpy.uint32, dims=(16,)) + FPGA_time_since_last_pps_R = attribute_wrapper(comms_annotation=["FPGA_time_since_last_pps_R"], datatype=numpy.float_, dims=(16,)) FPGA_temp_R = attribute_wrapper(comms_annotation=["FPGA_temp_R"], datatype=numpy.float_, dims=(16,)) FPGA_wg_amplitude_R = attribute_wrapper(comms_annotation=["FPGA_wg_amplitude_R"], datatype=numpy.float_, dims=(12, 16)) FPGA_wg_amplitude_RW = attribute_wrapper(comms_annotation=["FPGA_wg_amplitude_RW"], datatype=numpy.float_, dims=(12, 16), access=AttrWriteType.READ_WRITE) @@ -156,10 +164,9 @@ class SDP(opcua_device): FPGA_jesd204b_rx_err0_R = attribute_wrapper(comms_annotation=["FPGA_jesd204b_rx_err0_R"], datatype=numpy.uint32, dims=(S_pn, N_pn)) FPGA_jesd204b_rx_err1_R = attribute_wrapper(comms_annotation=["FPGA_jesd204b_rx_err1_R"], datatype=numpy.uint32, dims=(S_pn, N_pn)) - FPGA_bsn_monitor_input_bsn_R = attribute_wrapper(comms_annotation=["FPGA_bsn_monitor_input_bsn_R"], datatype=numpy.int64, dims=(N_pn,)) - FPGA_bsn_monitor_input_nof_packets_R = attribute_wrapper(comms_annotation=["FPGA_bsn_monitor_input_nof_packets_R"], datatype=numpy.int32, dims=(N_pn,)) - FPGA_bsn_monitor_input_nof_valid_R = attribute_wrapper(comms_annotation=["FPGA_bsn_monitor_input_nof_valid_R"], datatype=numpy.int32, dims=(N_pn,)) - FPGA_bsn_monitor_input_nof_err_R = attribute_wrapper(comms_annotation=["FPGA_bsn_monitor_input_nof_err_R"], datatype=numpy.int32, dims=(N_pn,)) + FPGA_signal_input_bsn_R = attribute_wrapper(comms_annotation=["FPGA_signal_input_bsn_R"], datatype=numpy.int64, dims=(N_pn,)) + FPGA_signal_input_nof_blocks_R = attribute_wrapper(comms_annotation=["FPGA_signal_input_nof_blocks_R"], datatype=numpy.int32, dims=(N_pn,)) + FPGA_signal_input_nof_samples_R = attribute_wrapper(comms_annotation=["FPGA_signal_input_nof_samples_R"], datatype=numpy.int32, dims=(N_pn,)) FPGA_signal_input_samples_delay_R = attribute_wrapper(comms_annotation=["FPGA_signal_input_samples_delay_R"], datatype=numpy.uint32, dims=(S_pn, N_pn)) FPGA_signal_input_samples_delay_RW = attribute_wrapper(comms_annotation=["FPGA_signal_input_samples_delay_RW"], datatype=numpy.uint32, dims=(S_pn, N_pn), access=AttrWriteType.READ_WRITE) @@ -173,8 +180,9 @@ class SDP(opcua_device): def read_FPGA_error_R(self): return self.read_attribute("TR_fpga_mask_R") & ( self.read_attribute("TR_fpga_communication_error_R") - | (self.read_attribute("FPGA_firmware_version_R") != "") - | (self.read_attribute("FPGA_jesd204b_csr_dev_syncn_R") == 0).any(axis=1) + | (self.read_attribute("FPGA_firmware_version_R") == "") + # we cannot assume all inputs of an FPGA are working until we have a mask for it + #| (self.read_attribute("FPGA_jesd204b_csr_dev_syncn_R") == 0).any(axis=1) ) def read_FPGA_processing_error_R(self): @@ -194,17 +202,14 @@ class SDP(opcua_device): # -------- def _prepare_hardware(self): - # FPGA firmware loading disabled, as it causes SDPTR to crash, - # see https://support.astron.nl/jira/browse/L2SDP-670 - """ - # FPGAs need the correct firmware loaded - self.FPGA_boot_image_RW = [1] * self.N_pn - - # wait for the firmware to be loaded (ignoring masked out elements) - mask = self.proxy.TR_fpga_mask_RW - self.wait_attribute("FPGA_boot_image_R", lambda attr: ((attr == 1) | ~mask).all(), 10) - """ - pass + # FPGAs that are actually reachable and we care about + wait_for = ~(self.read_attribute("TR_fpga_communication_error_R")) & self.read_attribute("TR_fpga_mask_R") + + # Order the correct firmare to be loaded + self.proxy.FPGA_boot_image_RW = [1] * self.N_pn + + # Wait for the firmware to be loaded (ignoring masked out elements) + self.wait_attribute("FPGA_boot_image_R", lambda attr: ((attr == 1) | ~wait_for).all(), 60) # -------- # Commands diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/sst.py b/tangostationcontrol/tangostationcontrol/devices/sdp/sst.py index c7e9c95844b93768caa3622488ea2520244b38d9..1228cdc99d6ab8037f7c8bfe4ec859b5185cdd9e 100644 --- a/tangostationcontrol/tangostationcontrol/devices/sdp/sst.py +++ b/tangostationcontrol/tangostationcontrol/devices/sdp/sst.py @@ -90,6 +90,9 @@ class SST(Statistics): FPGA_sst_offload_weighted_subbands_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_sst_offload_weighted_subbands_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE) FPGA_sst_offload_weighted_subbands_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_sst_offload_weighted_subbands_R"], datatype=numpy.bool_, dims=(16,)) + FPGA_sst_offload_nof_packets_R = attribute_wrapper(comms_annotation=["FPGA_sst_offload_nof_packets_R"], datatype=numpy.int32, dims=(16,)) + FPGA_sst_offload_nof_valid_R = attribute_wrapper(comms_annotation=["FPGA_sst_offload_nof_valid_R"], datatype=numpy.int32, dims=(16,)) + # number of packets with valid payloads nof_valid_payloads_R = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "nof_valid_payloads"}, dims=(SSTCollector.MAX_FPGAS,), datatype=numpy.uint64) # number of packets with invalid payloads diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/statistics_collector.py b/tangostationcontrol/tangostationcontrol/devices/sdp/statistics_collector.py index d8ac93d22dbee89bab1b48b86d5e011f4b4ae265..ca8fde9127b48f673bba745a74c989d260edb0d9 100644 --- a/tangostationcontrol/tangostationcontrol/devices/sdp/statistics_collector.py +++ b/tangostationcontrol/tangostationcontrol/devices/sdp/statistics_collector.py @@ -188,7 +188,7 @@ class XSTCollector(StatisticsCollector): # check whether set of baselines in this packet are not out of bounds for antenna in (0,1): - if fields.first_baseline[antenna] + fields.nof_signal_inputs >= self.MAX_INPUTS: + if fields.first_baseline[antenna] + fields.nof_signal_inputs > self.MAX_INPUTS: # packet describes an input that is out of bounds for us raise ValueError(f"Packet describes {fields.nof_signal_inputs} x {fields.nof_signal_inputs} baselines starting at {fields.first_baseline}, but we are limited to describing MAX_INPUTS={self.MAX_INPUTS}") diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/xst.py b/tangostationcontrol/tangostationcontrol/devices/sdp/xst.py index 8be9cdb483ef4f21818791009a19e8fbc2b91cb9..73a5d85bb742f06bf5412926cadefe4db0455716 100644 --- a/tangostationcontrol/tangostationcontrol/devices/sdp/xst.py +++ b/tangostationcontrol/tangostationcontrol/devices/sdp/xst.py @@ -110,6 +110,11 @@ class XST(Statistics): FPGA_xst_offload_nof_crosslets_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_offload_nof_crosslets_RW"], datatype=numpy.uint32, dims=(16,), access=AttrWriteType.READ_WRITE) FPGA_xst_offload_nof_crosslets_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_offload_nof_crosslets_R"], datatype=numpy.uint32, dims=(16,)) + FPGA_xst_ring_nof_transport_hops_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_nof_transport_hops_RW"], datatype=numpy.uint32, dims=(16,), access=AttrWriteType.READ_WRITE) + FPGA_xst_ring_nof_transport_hops_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_nof_transport_hops_R"], datatype=numpy.uint32, dims=(16,)) + + FPGA_xst_offload_nof_packets_R = attribute_wrapper(comms_annotation=["FPGA_xst_offload_nof_packets_R"], datatype=numpy.int32, dims=(16,)) + FPGA_xst_offload_nof_valid_R = attribute_wrapper(comms_annotation=["FPGA_xst_offload_nof_valid_R"], datatype=numpy.int32, dims=(16,)) # number of packets with valid payloads nof_valid_payloads_R = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "nof_valid_payloads"}, dims=(XSTCollector.MAX_FPGAS,), datatype=numpy.uint64) diff --git a/tangostationcontrol/tangostationcontrol/devices/tilebeam.py b/tangostationcontrol/tangostationcontrol/devices/tilebeam.py index 948a3657279bf489039bd372b1a096052d3556ec..25373f689e280bb2c81a1d6cbb5527a611f02092 100644 --- a/tangostationcontrol/tangostationcontrol/devices/tilebeam.py +++ b/tangostationcontrol/tangostationcontrol/devices/tilebeam.py @@ -11,17 +11,17 @@ import numpy import datetime from json import loads -from tango.server import attribute, command, device_property -from tango import AttrWriteType, DebugIt, DevState, DeviceProxy, DevVarStringArray, DevVarDoubleArray, DevString, DevSource from threading import Thread, Lock, Condition +from tango import AttrWriteType, DebugIt, DeviceProxy, DevVarStringArray, DevVarDoubleArray, DevString, DevSource +from tango.server import attribute, command, device_property +from tango import Util # Additional import from tangostationcontrol.common.entrypoint import entry -from tangostationcontrol.devices.lofar_device import lofar_device from tangostationcontrol.common.lofar_logging import device_logging_to_python, log_exceptions -from tangostationcontrol.common.measures import get_measures_directory, use_measures_directory, download_measures, restart_python, get_available_measures_directories from tangostationcontrol.beam.delays import delay_calculator from tangostationcontrol.devices.device_decorators import * +from tangostationcontrol.devices.beam_device import beam_device import logging logger = logging.getLogger() @@ -31,20 +31,20 @@ logger = logging.getLogger() __all__ = ["TileBeam", "main", "BeamTracker"] @device_logging_to_python() -class TileBeam(lofar_device): +class TileBeam(beam_device): # ----------------- # Device Properties # ----------------- - HBAT_beam_tracking_interval = device_property( + Beam_tracking_interval = device_property( dtype='DevFloat', - doc='HBAT beam weights updating interval time [seconds]', + doc='Beam weights updating interval time [seconds]', mandatory=False, default_value = 10.0 ) - HBAT_beam_tracking_preparation_period = device_property( + Beam_tracking_preparation_period = device_property( dtype='DevFloat', doc='Preparation time [seconds] needed before starting update operation', mandatory=False, @@ -55,34 +55,27 @@ class TileBeam(lofar_device): # Attributes # ---------- - HBAT_pointing_direction_R = attribute(access=AttrWriteType.READ, + Pointing_direction_R = attribute(access=AttrWriteType.READ, dtype=((numpy.str,),), max_dim_x=3, max_dim_y=96, - fget=lambda self: self._hbat_pointing_direction_r) - - HBAT_pointing_direction_RW = attribute(access=AttrWriteType.READ_WRITE, + fget=lambda self: self._pointing_direction_r) + + Pointing_direction_RW = attribute(access=AttrWriteType.READ_WRITE, dtype=((numpy.str,),), max_dim_x=3, max_dim_y=96, - fget=lambda self: self._hbat_pointing_direction_rw) + fget=lambda self: self._pointing_direction_rw) - HBAT_pointing_timestamp_R = attribute(access=AttrWriteType.READ, + Pointing_timestamp_R = attribute(access=AttrWriteType.READ, dtype=(numpy.double,), max_dim_x=96, - fget=lambda self: self._hbat_pointing_timestamp_r) + fget=lambda self: self._pointing_timestamp_r) - HBAT_tracking_enabled_R = attribute(access=AttrWriteType.READ, - doc="Whether the HBAT tile beam is updated periodically", + Tracking_enabled_R = attribute(access=AttrWriteType.READ, + doc="Whether the tile beam is updated periodically", dtype=numpy.bool, - fget=lambda self: self.HBAT_beam_tracker.is_alive()) + fget=lambda self: self.Beam_tracker.is_alive()) - HBAT_tracking_enabled_RW = attribute(access=AttrWriteType.READ_WRITE, - doc="Whether the HBAT tile beam should be updated periodically", + Tracking_enabled_RW = attribute(access=AttrWriteType.READ_WRITE, + doc="Whether the tile beam should be updated periodically", dtype=numpy.bool, - fget=lambda self: self._hbat_tracking_enabled_rw) - - # Directory where the casacore measures that we use, reside. We configure ~/.casarc to - # use the symlink /opt/IERS/current, which we switch to the actual set of files to use. - measures_directory_R = attribute(dtype=str, access=AttrWriteType.READ, fget = lambda self: get_measures_directory()) - - # List of dowloaded measures (the latest 64, anyway) - measures_directories_available_R = attribute(dtype=(str,), max_dim_x=64, access=AttrWriteType.READ, fget = lambda self: sorted(get_available_measures_directories())[-64:]) + fget=lambda self: self._tracking_enabled_rw) # -------- # overloaded functions @@ -92,22 +85,25 @@ class TileBeam(lofar_device): super().init_device() # thread to perform beam tracking - self.HBAT_beam_tracker = None + self.Beam_tracker = None @log_exceptions() def configure_for_initialise(self): super().configure_for_initialise() # Initialise pointing array data and attribute - self._hbat_pointing_timestamp_r = numpy.zeros(96, dtype=numpy.double) - self._hbat_pointing_direction_r = numpy.zeros((96,3), dtype="<U32") - self._hbat_pointing_direction_rw = numpy.array([["AZELGEO","0deg","90deg"]] * 96, dtype="<U32") + self._pointing_timestamp_r = numpy.zeros(96, dtype=numpy.double) + self._pointing_direction_r = numpy.zeros((96,3), dtype="<U32") + self._pointing_direction_rw = numpy.array([["AZELGEO","0deg","90deg"]] * 96, dtype="<U32") # Initialise tracking control - self._hbat_tracking_enabled_rw = True + self._tracking_enabled_rw = True - # Set a reference of RECV device - self.recv_proxy = DeviceProxy("STAT/RECV/1") + # Set a reference of RECV device that is correlated to this BEAM device + util = Util.instance() + instance_number = self.get_name().split('/')[2] + self.recv_proxy = DeviceProxy( + f"{util.get_ds_inst_name()}/RECV/{instance_number}") self.recv_proxy.set_source(DevSource.DEV) # Retrieve positions from RECV device @@ -120,22 +116,22 @@ class TileBeam(lofar_device): # absolute positions of each antenna element self.HBAT_antenna_positions = [HBAT_reference_itrf[tile] + HBAT_antenna_itrf_offsets[tile] for tile in range(96)] - # Create a thread object to update HBAT beam weights - self.HBAT_beam_tracker = BeamTracker(self) + # Create a thread object to update beam weights + self.Beam_tracker = BeamTracker(self) @log_exceptions() def configure_for_on(self): super().configure_for_on() # Start beam tracking thread - if self._hbat_tracking_enabled_rw: - self.HBAT_beam_tracker.start() - + if self._tracking_enabled_rw: + self.Beam_tracker.start() + @log_exceptions() def configure_for_off(self): - if self.HBAT_beam_tracker: + if self.Beam_tracker: # Stop thread object - self.HBAT_beam_tracker.stop() + self.Beam_tracker.stop() super().configure_for_off() @@ -143,28 +139,28 @@ class TileBeam(lofar_device): # internal functions # -------- - def write_HBAT_pointing_direction_RW(self, value): - """ Setter method for attribute HBAT_pointing_direction_RW """ + def write_Pointing_direction_RW(self, value): + """ Setter method for attribute Pointing_direction_RW """ # verify whether values are valid for tile in range(96): if not self.HBAT_delay_calculators[tile].is_valid_direction(value[tile]): raise ValueError(f"Invalid direction: {value[tile]}") - self._hbat_pointing_direction_rw = value + self._pointing_direction_rw = value # force update across tiles if pointing changes - self.HBAT_beam_tracker.force_update() + self.Beam_tracker.force_update() logger.info("Pointing direction update requested") - def write_HBAT_tracking_enabled_RW(self, value): - self._hbat_tracking_enabled_rw = value + def write_Tracking_enabled_RW(self, value): + self._tracking_enabled_rw = value if value: - self.HBAT_beam_tracker.start() + self.Beam_tracker.start() else: - self.HBAT_beam_tracker.stop() + self.Beam_tracker.stop() - def _HBAT_delays(self, pointing_direction: numpy.array, timestamp: datetime.datetime = None): + def _delays(self, pointing_direction: numpy.array, timestamp: datetime.datetime = None): """ Calculate the delays (in seconds) based on the pointing list and the timestamp """ @@ -186,7 +182,7 @@ class TileBeam(lofar_device): return delays - def _HBAT_set_pointing(self, pointing_direction: numpy.array, timestamp: datetime.datetime = None): + def _set_pointing(self, pointing_direction: numpy.array, timestamp: datetime.datetime = None): """ Uploads beam weights based on a given pointing direction 2D array (96 tiles x 3 parameters) """ @@ -197,7 +193,7 @@ class TileBeam(lofar_device): timestamp = datetime.datetime.now() # Retrieve delays from casacore - delays = self._HBAT_delays(pointing_direction, timestamp) + delays = self._delays(pointing_direction, timestamp) # Convert delays into beam weights delays = delays.flatten() @@ -212,50 +208,19 @@ class TileBeam(lofar_device): mask = self.recv_proxy.ANT_mask_RW.flatten() for rcu in range(96): if mask[rcu]: - self._hbat_pointing_direction_r[rcu] = pointing_direction[rcu] - self._hbat_pointing_timestamp_r[rcu] = timestamp.timestamp() + self._pointing_direction_r[rcu] = pointing_direction[rcu] + self._pointing_timestamp_r[rcu] = timestamp.timestamp() logger.info("Pointing direction updated") # -------- # Commands # -------- - - @command(dtype_out=str, doc_out="Name of newly installed measures directory") - @DebugIt() - @log_exceptions() - def download_measures(self): - """ Download new measures tables into /opt/IERS, but do not activate them. - - NOTE: This may take a while to complete. You are advised to increase - the timeout of the proxy using `my_device.set_timeout_millis(10000)`. """ - - return download_measures() - - @command(dtype_in=str, doc_in="Measures directory to activate") - @DebugIt() - @log_exceptions() - def use_measures(self, newdir): - """ Activate a downloaded set of measures tables. - - NOTE: This will turn off and restart this device!! """ - - # switch to requested measures - use_measures_directory(newdir) - logger.info(f"Switched measures table to {newdir}") - - # turn off our device, to prepare for a python restart - self.Off() - - # restart this program to force casacore to adopt - # the new tables - logger.warning("Restarting device to activate new measures tables") - restart_python() @command(dtype_in=DevVarStringArray, dtype_out=DevVarDoubleArray) @DebugIt() @log_exceptions() - @only_in_states([DevState.ON]) - def HBAT_delays(self, pointing_direction: numpy.array, timestamp: datetime.datetime = None): + @only_in_states(beam_device.DEFAULT_COMMAND_STATES) + def delays(self, pointing_direction: numpy.array, timestamp: datetime.datetime = None): """ Calculate the delays (in seconds) based on the pointing list and the timestamp TBD: antenna and reference positions will be retrieved from RECV and not stored as BEAM device properties @@ -268,15 +233,15 @@ class TileBeam(lofar_device): pointing_direction = numpy.array(pointing_direction).reshape(96,3) - delays = self._HBAT_delays(pointing_direction, timestamp) + delays = self._delays(pointing_direction, timestamp) return delays.flatten() @command(dtype_in=DevVarStringArray) @DebugIt() @log_exceptions() - @only_in_states([DevState.ON]) - def HBAT_set_pointing(self, pointing_direction: list, timestamp: datetime.datetime = None): + @only_in_states(beam_device.DEFAULT_COMMAND_STATES) + def set_pointing(self, pointing_direction: list, timestamp: datetime.datetime = None): """ Uploads beam weights based on a given pointing direction 2D array (96 tiles x 3 parameters) """ @@ -289,12 +254,12 @@ class TileBeam(lofar_device): # Reshape the flatten input array pointing_direction = numpy.array(pointing_direction).reshape(96,3) - self._HBAT_set_pointing(pointing_direction, timestamp) + self._set_pointing(pointing_direction, timestamp) @command(dtype_in = DevString) @DebugIt() - @only_in_states([DevState.ON]) - def HBAT_set_pointing_for_specific_time(self, parameters: DevString = None): + @only_in_states(beam_device.DEFAULT_COMMAND_STATES) + def set_pointing_for_specific_time(self, parameters: DevString = None): """ Uploads beam weights based on a given pointing direction 2D array (96 tiles x 3 parameters) for the given timestamp @@ -312,7 +277,7 @@ class TileBeam(lofar_device): # Reshape the flatten pointing array pointing_direction = numpy.array(pointing_direction).reshape(96,3) - self._HBAT_set_pointing(pointing_direction, timestamp) + self._set_pointing(pointing_direction, timestamp) # ---------- @@ -330,7 +295,7 @@ class BeamTracker(): DISCONNECT_TIMEOUT = 3.0 """ Object that encapsulates a Thread, resposible for beam tracking operations """ - def __init__(self, device: lofar_device): + def __init__(self, device: beam_device): self.thread = None self.device = device @@ -348,7 +313,7 @@ class BeamTracker(): return self.done = False - self.thread = Thread(target=self._update_HBAT_pointing_direction, name=f"BeamTracker of {self.device.get_name()}") + self.thread = Thread(target=self._update_pointing_direction, name=f"BeamTracker of {self.device.get_name()}") self.thread.start() logger.info("BeamTracking thread started") @@ -361,9 +326,9 @@ class BeamTracker(): """ Force the pointing to be updated. """ self.stale_pointing = True - self.unlock_thread() + self.notify_thread() - def unlock_thread(self): + def notify_thread(self): # inform the thread to stop waiting with self.update_lock: self.update_condition.notify() @@ -394,13 +359,13 @@ class BeamTracker(): now = datetime.datetime.now().timestamp() # Computes the left seconds before the next update - next_update_in = self.device.HBAT_beam_tracking_interval - (now % self.device.HBAT_beam_tracking_interval) + next_update_in = self.device.Beam_tracking_interval - (now % self.device.Beam_tracking_interval) # Computes the needed sleep time before the next update - sleep_time = next_update_in - self.device.HBAT_beam_tracking_preparation_period + sleep_time = next_update_in - self.device.Beam_tracking_preparation_period # If sleep time is negative, add the tracking interval for the next update if sleep_time < 0: - return sleep_time + self.device.HBAT_beam_tracking_interval + return sleep_time + self.device.Beam_tracking_interval else: return sleep_time @@ -410,14 +375,14 @@ class BeamTracker(): @log_exceptions() @fault_on_error() - def _update_HBAT_pointing_direction(self): + def _update_pointing_direction(self): """ Updates the beam weights using a fixed interval of time """ - # Check if flag beamtracking is true + # Check if flag beamtracking is true with self.update_lock: while not self.done: self.stale_pointing = False - self.device._HBAT_set_pointing(self.device._hbat_pointing_direction_rw, datetime.datetime.now()) + self.device._set_pointing(self.device._pointing_direction_rw, datetime.datetime.now()) # sleep until the next update, or when interrupted (this releases the lock, allowing for notification) # note that we need wait_for as conditions can be triggered multiple times in succession diff --git a/tangostationcontrol/tangostationcontrol/devices/unb2.py b/tangostationcontrol/tangostationcontrol/devices/unb2.py index 614055a716ce8a527197aaad159273ebfda220ef..a255cd7a25358e38ed8d807476b5786596acd773 100644 --- a/tangostationcontrol/tangostationcontrol/devices/unb2.py +++ b/tangostationcontrol/tangostationcontrol/devices/unb2.py @@ -13,12 +13,13 @@ # PyTango imports from tango.server import command, attribute, device_property -from tango import AttrWriteType, DebugIt, DevState +from tango import AttrWriteType, DebugIt # Additional import from tangostationcontrol.common.entrypoint import entry from tangostationcontrol.clients.attribute_wrapper import attribute_wrapper from tangostationcontrol.devices.opcua_device import opcua_device +from tangostationcontrol.devices.lofar_device import lofar_device from tangostationcontrol.common.lofar_logging import device_logging_to_python from tangostationcontrol.devices.device_decorators import only_in_states @@ -203,7 +204,7 @@ class UNB2(opcua_device): @command() @DebugIt() - @only_in_states([DevState.STANDBY, DevState.ON]) + @only_in_states(lofar_device.DEFAULT_COMMAND_STATES) def UNB2_off(self): """ @@ -213,7 +214,7 @@ class UNB2(opcua_device): @command() @DebugIt() - @only_in_states([DevState.STANDBY, DevState.ON]) + @only_in_states(lofar_device.DEFAULT_COMMAND_STATES) def UNB2_on(self): """ diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_pdu.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_pdu.py new file mode 100644 index 0000000000000000000000000000000000000000..8ffefd83a8bba8fe1a04b8d929a5c403cb0d8262 --- /dev/null +++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_pdu.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# +# This file is part of the LOFAR 2.0 Station Software +# +# +# +# Distributed under the terms of the APACHE license. +# See LICENSE.txt for more info. + +from .base import AbstractTestBases + + +class TestDevicePDU(AbstractTestBases.TestDeviceBase): + + def setUp(self): + """Intentionally recreate the device object in each test""" + super().setUp("STAT/PDU/1") diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_tilebeam.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_tilebeam.py index 4948dd7f70a1d1c122dcd3a3c50cda6184bb44d3..decd1678b5332bd0c45c70ae3527c1c289cd6f6b 100644 --- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_tilebeam.py +++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_tilebeam.py @@ -39,16 +39,16 @@ class TestDeviceTileBeam(AbstractTestBases.TestDeviceBase): recv_proxy.set_defaults() return recv_proxy - def test_HBAT_delays_dims(self): - """Verify HBAT delays are retrieved with correct dimensions""" + def test_delays_dims(self): + """Verify delays are retrieved with correct dimensions""" self.setup_recv_proxy() # setup BEAM self.proxy.warm_boot() - # verify HBAT_delays method returns the correct dimensions - HBAT_delays = self.proxy.HBAT_delays(self.pointing_direction) - self.assertEqual(1536, len(HBAT_delays)) # 96*16 + # verify delays method returns the correct dimensions + delays = self.proxy.delays(self.pointing_direction) + self.assertEqual(1536, len(delays)) # 96*16 def test_set_pointing(self): """Verify if set pointing procedure is correctly executed""" @@ -56,31 +56,32 @@ class TestDeviceTileBeam(AbstractTestBases.TestDeviceBase): # setup BEAM self.proxy.warm_boot() - self.proxy.HBAT_tracking_enabled_RW = False + self.proxy.Tracking_enabled_RW = False # Verify attribute is present (all zeros if never used before) - HBAT_delays_r1 = numpy.array(recv_proxy.read_attribute('HBAT_BF_delay_steps_RW').value) - self.assertIsNotNone(HBAT_delays_r1) + delays_r1 = numpy.array(recv_proxy.read_attribute('HBAT_BF_delay_steps_RW').value) + self.assertIsNotNone(delays_r1) time.sleep(3) # Verify writing operation does not lead to errors - self.proxy.HBAT_set_pointing(self.pointing_direction) # write values to RECV - HBAT_delays_r2 = numpy.array(recv_proxy.read_attribute('HBAT_BF_delay_steps_RW').value) - self.assertIsNotNone(HBAT_delays_r2) + self.proxy.set_pointing(self.pointing_direction) # write values to RECV + delays_r2 = numpy.array(recv_proxy.read_attribute('HBAT_BF_delay_steps_RW').value) + + self.assertIsNotNone(delays_r2) # Verify delays changed (to be discussed) - #self.assertFalse((HBAT_delays_r1==HBAT_delays_r2).all()) + #self.assertFalse((delays_r1==delays_r2).all()) def test_pointing_to_zenith(self): # setup RECV as well recv_proxy = self.setup_recv_proxy() self.proxy.warm_boot() - self.proxy.HBAT_tracking_enabled_RW = False + self.proxy.Tracking_enabled_RW = False # Point to Zenith - self.proxy.HBAT_set_pointing(numpy.array([["AZELGEO","0deg","90deg"]] * 96).flatten()) + self.proxy.set_pointing(numpy.array([["AZELGEO","0deg","90deg"]] * 96).flatten()) calculated_HBAT_delay_steps = numpy.array(recv_proxy.read_attribute('HBAT_BF_delay_steps_RW').value) @@ -93,10 +94,10 @@ class TestDeviceTileBeam(AbstractTestBases.TestDeviceBase): recv_proxy = self.setup_recv_proxy() self.proxy.warm_boot() - self.proxy.HBAT_tracking_enabled_RW = False + self.proxy.Tracking_enabled_RW = False # point at north on the horizon - self.proxy.HBAT_set_pointing(["AZELGEO","0deg","0deg"] * 96) + self.proxy.set_pointing(["AZELGEO","0deg","0deg"] * 96) # obtain delays of the X polarisation of all the elements of the first tile north_beam_delay_steps = recv_proxy.HBAT_BF_delay_steps_RW[0].reshape(2,4,4)[0] @@ -106,7 +107,7 @@ class TestDeviceTileBeam(AbstractTestBases.TestDeviceBase): for angle in (90,180,270): # point at angle degrees (90=E, 180=S, 270=W) - self.proxy.HBAT_set_pointing(["AZELGEO",f"{angle}deg","0deg"] * 96) + self.proxy.set_pointing(["AZELGEO",f"{angle}deg","0deg"] * 96) # obtain delays of the X polarisation of all the elements of the first tile angled_beam_delay_steps = recv_proxy.HBAT_BF_delay_steps_RW[0].reshape(2,4,4)[0] @@ -120,7 +121,7 @@ class TestDeviceTileBeam(AbstractTestBases.TestDeviceBase): recv_proxy = self.setup_recv_proxy() self.proxy.warm_boot() - self.proxy.HBAT_tracking_enabled_RW = False + self.proxy.Tracking_enabled_RW = False # Point to LOFAR 1 ref pointing (0.929342, 0.952579, J2000) pointings = numpy.array([["J2000", "0.929342rad", "0.952579rad"]] * 96).flatten() @@ -133,7 +134,7 @@ class TestDeviceTileBeam(AbstractTestBases.TestDeviceBase): } json_string = json.dumps(parameters, cls=NumpyEncoder) - self.proxy.HBAT_set_pointing_for_specific_time(json_string) + self.proxy.set_pointing_for_specific_time(json_string) calculated_HBAT_delay_steps = numpy.array(recv_proxy.read_attribute('HBAT_BF_delay_steps_RW').value) @@ -153,12 +154,12 @@ class TestDeviceTileBeam(AbstractTestBases.TestDeviceBase): self.proxy.warm_boot() # check if we're really tracking - self.assertTrue(self.proxy.HBAT_tracking_enabled_R) + self.assertTrue(self.proxy.Tracking_enabled_R) # point somewhere new_pointings = [("J2000",f"{tile}deg","0deg") for tile in range(96)] - self.proxy.HBAT_pointing_direction_RW = new_pointings + self.proxy.Pointing_direction_RW = new_pointings # check pointing - self.assertListEqual(new_pointings, list(self.proxy.HBAT_pointing_direction_R)) + self.assertListEqual(new_pointings, list(self.proxy.Pointing_direction_R)) diff --git a/tangostationcontrol/tangostationcontrol/integration_test/devices/test_lofar_device.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_lofar_device.py similarity index 99% rename from tangostationcontrol/tangostationcontrol/integration_test/devices/test_lofar_device.py rename to tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_lofar_device.py index d5a935eb537a9031ee67b2252ba1b1cfd1eb6687..3a420bbd3c3f03b34d4664f39d631de154c69bd0 100644 --- a/tangostationcontrol/tangostationcontrol/integration_test/devices/test_lofar_device.py +++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_lofar_device.py @@ -13,6 +13,7 @@ from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy from tangostationcontrol.integration_test import base + class TestProxyAttributeAccess(base.IntegrationTestCase): """ Test whether DeviceProxy's can always access attributes immediately after turning them on. """ diff --git a/tangostationcontrol/tangostationcontrol/integration_test/toolkit/__init__.py b/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/__init__.py similarity index 100% rename from tangostationcontrol/tangostationcontrol/integration_test/toolkit/__init__.py rename to tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/__init__.py diff --git a/tangostationcontrol/tangostationcontrol/integration_test/toolkit/test_archiver.py b/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver.py similarity index 87% rename from tangostationcontrol/tangostationcontrol/integration_test/toolkit/test_archiver.py rename to tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver.py index a6c361435cd953616bbd5cdf9514b2550a9e824a..091db2c253e5e163167afc204170d89cf087c61a 100644 --- a/tangostationcontrol/tangostationcontrol/integration_test/toolkit/test_archiver.py +++ b/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver.py @@ -16,6 +16,7 @@ from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy import time from datetime import datetime + class TestArchiver(BaseIntegrationTestCase): def setUp(self): @@ -54,12 +55,16 @@ class TestArchiver(BaseIntegrationTestCase): recv_proxy.set_defaults() recv_proxy.on() self.assertEqual(DevState.ON, recv_proxy.state()) - + + """ # Safety operation that prevents event subscriber to go in Fault state self.archiver.remove_attributes_in_error() time.sleep(3) - attr_fullname = 'stat/recv/1/recvtr_translator_busy_r' # boolean - self.archiver.add_attribute_to_archiver(attr_fullname, polling_period=1000, archive_event_period=3000) + """ + polling_period=1000 + archive_event_period=3000 + attr_fullname = 'stat/recv/1/recvtr_translator_busy_r' # boolean, but lofar view returns int + self.archiver.add_attribute_to_archiver(attr_fullname, polling_period, archive_event_period) time.sleep(3) # Test if the attribute has been correctly added to event subscriber self.assertTrue(self.archiver.is_attribute_archived(attribute_fqdn(attr_fullname))) @@ -67,19 +72,21 @@ class TestArchiver(BaseIntegrationTestCase): # Retrieve data from DB views self.retriever = RetrieverTimescale() self.assertIsNotNone(self.retriever) - records = self.retriever.get_lofar_attribute(attr_fullname) + records = self._wait_for_archiving(attr_fullname, archive_event_period) self.assertTrue(len(records)>0) item = records[-1] # last table record self.assertEqual('stat/recv/1',item.device) # column device self.assertEqual('recvtr_translator_busy_r',item.name) # column attribute self.assertEqual(datetime,type(item.data_time)) # column datetime - self.assertEqual(bool,type(item.value)) # column value + self.assertEqual(int,type(item.value)) # column value + """ # Remove attribute at the end of the test self.archiver.remove_attribute_from_archiver(attr_fullname) time.sleep(3) # Test if the attribute has been correctly removed self.assertFalse(self.archiver.is_attribute_archived(attribute_fqdn(attr_fullname))) + """ recv_proxy.off() def test_archive_array_attribute(self): @@ -95,11 +102,15 @@ class TestArchiver(BaseIntegrationTestCase): sdp_proxy.on() self.assertEqual(DevState.ON, sdp_proxy.state()) + """ # Safety operation that prevents event subscriber to go in Fault state self.archiver.remove_attributes_in_error() time.sleep(3) + """ + polling_period=1000 + archive_event_period=3000 attr_fullname = 'stat/sdp/1/fpga_temp_r' # double - self.archiver.add_attribute_to_archiver(attr_fullname, polling_period=1000, archive_event_period=3000) + self.archiver.add_attribute_to_archiver(attr_fullname, polling_period, archive_event_period) time.sleep(3) # Test if the attribute has been correctly added to event subscriber self.assertTrue(self.archiver.is_attribute_archived(attribute_fqdn(attr_fullname))) @@ -107,7 +118,7 @@ class TestArchiver(BaseIntegrationTestCase): # Retrieve data from DB views self.retriever = RetrieverTimescale() self.assertIsNotNone(self.retriever) - records = self.retriever.get_lofar_attribute(attr_fullname) + records = self._wait_for_archiving(attr_fullname, archive_event_period) self.assertTrue(len(records)>0) item = records[-1] # last table record self.assertEqual('stat/sdp/1',item.device) # column device @@ -116,11 +127,13 @@ class TestArchiver(BaseIntegrationTestCase): self.assertEqual(int,type(item.x)) # column index self.assertEqual(float,type(item.value)) # column value + """ # Remove attribute at the end of the test self.archiver.remove_attribute_from_archiver(attr_fullname) time.sleep(3) # Test if the attribute has been correctly removed self.assertFalse(self.archiver.is_attribute_archived(attribute_fqdn(attr_fullname))) + """ sdp_proxy.off() def test_get_maximum_device_load(self): @@ -142,3 +155,12 @@ class TestArchiver(BaseIntegrationTestCase): time.sleep(3) max_load = self.archiver.get_maximum_device_load(device_name) self.assertGreater(max_load,0) + + def _wait_for_archiving(self, attr_fullname: str, archive_event_period: int, max_wait: int = 10): + wait = 0 + records = self.retriever.get_lofar_attribute(attr_fullname) + while (not(len(records)>0) and wait<max_wait): + time.sleep(archive_event_period) + records = self.retriever.get_lofar_attribute(attr_fullname) + wait+=1 + return records diff --git a/tangostationcontrol/tangostationcontrol/integration_test/toolkit/test_archiver_util.py b/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver_util.py similarity index 99% rename from tangostationcontrol/tangostationcontrol/integration_test/toolkit/test_archiver_util.py rename to tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver_util.py index ad4595951d43c5cacdf26459e9cb982c1e32e142..952e665674d2bcb79e8a393a33c62a6144ec9125 100644 --- a/tangostationcontrol/tangostationcontrol/integration_test/toolkit/test_archiver_util.py +++ b/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver_util.py @@ -14,6 +14,7 @@ from tango import DevState import json import pkg_resources + class TestArchiverUtil(BaseIntegrationTestCase): def setUp(self): diff --git a/tangostationcontrol/tangostationcontrol/integration_test/recv_cluster/test_demo.py b/tangostationcontrol/tangostationcontrol/integration_test/recv_cluster/test_demo.py deleted file mode 100644 index 78ea0de074ee4f5336820a4fa91e88d3ef4a73c5..0000000000000000000000000000000000000000 --- a/tangostationcontrol/tangostationcontrol/integration_test/recv_cluster/test_demo.py +++ /dev/null @@ -1,28 +0,0 @@ -# -*- coding: utf-8 -*- -# -# This file is part of the LOFAR 2.0 Station Software -# -# -# -# Distributed under the terms of the APACHE license. -# See LICENSE.txt for more info. - -import logging - -from tangostationcontrol.integration_test import base - -logger = logging.getLogger() - - -class TestDemo(base.IntegrationTestCase): - """ This test will not be run until L2SS-484 is merged. - It serves as a trivial example of test exclusion since the unit tests pass - even though assertTrue(False) can trivially be determined to never succeed. - """ - - def setUp(self): - - super(TestDemo, self).setUp() - - def test_fail(self): - self.assertTrue(False) diff --git a/tangostationcontrol/tangostationcontrol/integration_test/recv_cluster/test_recv_cluster.py b/tangostationcontrol/tangostationcontrol/integration_test/recv_cluster/test_recv_cluster.py new file mode 100644 index 0000000000000000000000000000000000000000..5235cfcaa783bda13b49dd90b144b3197c7fad29 --- /dev/null +++ b/tangostationcontrol/tangostationcontrol/integration_test/recv_cluster/test_recv_cluster.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- +# +# This file is part of the LOFAR 2.0 Station Software +# +# +# +# Distributed under the terms of the APACHE license. +# See LICENSE.txt for more info. + +import time +import numpy +import logging +import statistics + +from tango import DevState + +from tangostationcontrol.integration_test import base +from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy + +logger = logging.getLogger() + + +class TestRecvCluster(base.IntegrationTestCase): + + pointing_direction = numpy.array([["J2000", "0deg", "0deg"]] * 96).flatten() + + def setUp(self): + + super(TestRecvCluster, self).setUp() + + def test_recv_cluster_performance(self): + beam_proxies = [] + recv_proxies = [] + + # Beam / Recv 1,2,3,4 + for i in range(1, 5): + recv_proxies.append(TestDeviceProxy(f"STAT/RECV/{i}")) + beam_proxies.append(TestDeviceProxy(f"STAT/TileBeam/{i}")) + + # Recv devices must be ready before TileBeam + for proxy in recv_proxies: + proxy.off() + self.assertTrue(proxy.state() is DevState.OFF) + proxy.initialise() + self.assertTrue(proxy.state() is DevState.STANDBY) + proxy.set_defaults() + proxy.on() + self.assertTrue(proxy.state() is DevState.ON) + + for proxy in beam_proxies: + proxy.off() + self.assertTrue(proxy.state() is DevState.OFF) + proxy.initialise() + self.assertTrue(proxy.state() is DevState.STANDBY) + proxy.on() + self.assertTrue(proxy.state() is DevState.ON) + + results = [] + for _i in range(25): + start_time = time.monotonic_ns() + for proxy in beam_proxies: + proxy.set_pointing(self.pointing_direction) + stop_time = time.monotonic_ns() + results.append(stop_time - start_time) + + logging.error(f"Median {statistics.median(results) / 1.e9} Stdev " + f"{statistics.stdev(results) / 1.e9}") diff --git a/tangostationcontrol/tangostationcontrol/test/devices/test_beam_device.py b/tangostationcontrol/tangostationcontrol/test/devices/test_beam_device.py index 905c5a3b12f43a85562c32f1e509fe5e3a1e7baf..5677b32c70f175ff74fff293e58bb1c535689a2c 100644 --- a/tangostationcontrol/tangostationcontrol/test/devices/test_beam_device.py +++ b/tangostationcontrol/tangostationcontrol/test/devices/test_beam_device.py @@ -33,11 +33,11 @@ class TestBeamDevice(base.TestCase): with DeviceTestContext(tilebeam.TileBeam, process=True, timeout=10) as proxy: proxy.initialise() self.assertEqual(96, len(proxy.read_attribute( - "HBAT_pointing_direction_R").value)) + "Pointing_direction_R").value)) def test_get_pointing_timestamps(self): """Verify can read timestamps attribute and length matches without err""" with DeviceTestContext(tilebeam.TileBeam, process=True, timeout=10) as proxy: proxy.initialise() self.assertEqual(96, len(proxy.read_attribute( - "HBAT_pointing_timestamp_R").value)) + "Pointing_timestamp_R").value)) diff --git a/tangostationcontrol/tangostationcontrol/test/devices/test_recv_device.py b/tangostationcontrol/tangostationcontrol/test/devices/test_recv_device.py index 1fde8856cf55a79a03aaec2cc53ba34a4ad818f0..49de70bd5aecc1faab3bf7402f7adad059bd3823 100644 --- a/tangostationcontrol/tangostationcontrol/test/devices/test_recv_device.py +++ b/tangostationcontrol/tangostationcontrol/test/devices/test_recv_device.py @@ -19,7 +19,8 @@ from tangostationcontrol.test import base class TestRecvDevice(base.TestCase): # some dummy values for mandatory properties - recv_properties = {'OPC_Server_Name': 'example.com', 'OPC_Server_Port': 4840, 'OPC_Time_Out': 5.0} + recv_properties = {'OPC_Server_Name': 'example.com', 'OPC_Server_Port': 4840, 'OPC_Time_Out': 5.0, + 'Antenna_Field_Reference_ITRF' : [3.0, 3.0, 3.0], 'Antenna_Field_Reference_ETRS' : [7.0, 7.0, 7.0]} def setUp(self): super(TestRecvDevice, self).setUp() @@ -31,11 +32,20 @@ class TestRecvDevice(base.TestCase): device, 'DeviceProxy') proxy_patcher.start() self.addCleanup(proxy_patcher.stop) - - + def test_calculate_HBAT_bf_delay_steps(self): """Verify HBAT beamforming calculations are correctly executed""" with DeviceTestContext(recv.RECV, properties=self.recv_properties, process=True) as proxy: delays = numpy.random.rand(96,16).flatten() HBAT_bf_delay_steps = proxy.calculate_HBAT_bf_delay_steps(delays) self.assertEqual(3072, len(HBAT_bf_delay_steps)) # 96x32=3072 + + def test_read_Antenna_Field_Reference(self): + """Verify if Antenna coordinates are correctly provided""" + # Device uses ITRF coordinates by default + with DeviceTestContext(recv.RECV, properties=self.recv_properties, process=True) as proxy: + self.assertEqual(3.0, proxy.Antenna_Field_Reference_ITRF_R[0]) + # Device derives coordinates from ETRS if ITRF ones are not found + recv_properties_v2 = {'OPC_Server_Name': 'example.com', 'OPC_Server_Port': 4840, 'OPC_Time_Out': 5.0, 'Antenna_Field_Reference_ETRS' : [7.0, 7.0, 7.0]} + with DeviceTestContext(recv.RECV, properties=recv_properties_v2, process=True) as proxy: + self.assertNotEqual(3.0, proxy.Antenna_Field_Reference_ITRF_R[0]) # value = 6.948998835785814 diff --git a/tangostationcontrol/tangostationcontrol/test/toolkit/test_archiver_util.py b/tangostationcontrol/tangostationcontrol/test/toolkit/test_archiver_util.py index 4265c277554adb9d4e92c8491f392bffaddb3084..842aff8367ab78a30ba55eb31bd48fcb29193d8e 100644 --- a/tangostationcontrol/tangostationcontrol/test/toolkit/test_archiver_util.py +++ b/tangostationcontrol/tangostationcontrol/test/toolkit/test_archiver_util.py @@ -22,12 +22,12 @@ class TestArchiverUtil(base.TestCase): def test_device_fqdn(self): """Test if a device name is correctly converted in a Tango FQDN""" - self.assertEqual(f"tango://databaseds:10000/{self.device_name}".lower(), device_fqdn(self.device_name)) + self.assertEqual(f"tango://databaseds:10000/{self.device_name}".lower(), device_fqdn(self.device_name, "databaseds:10000")) def test_attribute_fqdn(self): """Test if an attribute name is correctly converted in a Tango FQDN""" self.assertEqual(f"tango://databaseds:10000/{self.device_name}/{self.attribute_name}".lower(), - attribute_fqdn(f"{self.device_name}/{self.attribute_name}")) + attribute_fqdn(f"{self.device_name}/{self.attribute_name}", "databaseds:10000")) self.assertRaises(ValueError, lambda: attribute_fqdn(self.attribute_name)) def test_split_tango_name(self): diff --git a/tangostationcontrol/tangostationcontrol/toolkit/archiver.py b/tangostationcontrol/tangostationcontrol/toolkit/archiver.py index 3b78af4ae47d05d8cbe143abc1b676ae4781d8c8..2e178083d2428d1b0706e283ded68e7ee0c5ea46 100644 --- a/tangostationcontrol/tangostationcontrol/toolkit/archiver.py +++ b/tangostationcontrol/tangostationcontrol/toolkit/archiver.py @@ -25,8 +25,8 @@ def warn_if_attribute_not_found(): try: return func(self, attribute_name, *args, **kwargs) except DevFailed as e: - if e.args[0].reason == 'Attribute not found' or 'NOT FOUND in signal list': - logger.warning(f"Attribute {attribute_name} not found!") + if e.args[0].reason in ['Attribute not found', 'BadSignalName']: + logger.warning(f"Attribute {attribute_name} not found: {e.args[0].desc}") else: raise @@ -59,7 +59,7 @@ class Archiver(): self.cm = DeviceProxy(cm_name) try: if self.cm.state() == DevState.FAULT: - raise Exception(f"Configuration Manager {cm_name} is in FAULT state") + raise Exception(f"Configuration Manager {cm_name} is in FAULT state: {self.cm.status()}") except Exception as e: raise Exception(f"Connection failed with Configuration Manager {cm_name}") from e self.es_list = [es_name for es_name in self.get_subscribers(from_db=False)] @@ -145,7 +145,8 @@ class Archiver(): dev_polling_time, dev_archive_abs_change, dev_archive_rel_change, dev_archive_period, dev_event_period, dev_strategy = get_global_env_parameters(config_dict, environment) # Attributes to be included in archiving stategy include_att_list = get_include_attribute_list(device, config_dict, environment) - self.remove_attributes_by_device(device, exclude=include_att_list) + # TODO Cleanup the subscriber + # self.remove_attributes_by_device(device, exclude=include_att_list) # Include attributes by custom configuration try: for att in include_att_list: @@ -281,6 +282,10 @@ class Archiver(): """ Stops the data archiving of the attribute passed as input, and remove it from the subscriber's list. """ + + # Removal of attributes leads to hdbpp-es freezing up, see https://github.com/tango-controls-hdbpp/hdbpp-es/issues/25 + raise NotImplementedError("Removing attributes is not supported yet") + attribute_name = attribute_fqdn(attribute_name) self.cm.AttributeStop(attribute_name) self.cm.AttributeRemove(attribute_name) @@ -341,6 +346,7 @@ class Archiver(): Starts the archiving of the attribute passed as input. The attribute must be already present in the subscriber's list """ + attribute_name = attribute_fqdn(attribute_name) self.cm.AttributeStart(attribute_name) @warn_if_attribute_not_found() @@ -349,6 +355,7 @@ class Archiver(): Stops the archiving of the attribute passed as input. The attribute must be already present in the subscriber's list """ + attribute_name = attribute_fqdn(attribute_name) self.cm.AttributeStop(attribute_name) def is_attribute_archived(self, attribute_name:str): diff --git a/tangostationcontrol/tangostationcontrol/toolkit/archiver_util.py b/tangostationcontrol/tangostationcontrol/toolkit/archiver_util.py index 0d5e8f4d992170016bae7405c204b785c8cb8573..c797c5267a517dbe00c566297b94cc176f9a9574 100644 --- a/tangostationcontrol/tangostationcontrol/toolkit/archiver_util.py +++ b/tangostationcontrol/tangostationcontrol/toolkit/archiver_util.py @@ -6,6 +6,7 @@ from tango import DeviceProxy, CmdArgType import re +import os """ A dictionary whose keys are the Tango datatypes mapping, and the values are the relative byte size @@ -21,6 +22,8 @@ DATATYPES_SIZE_DICT = {CmdArgType.DevBoolean:1, CmdArgType.DevShort:2, CmdArgTyp CmdArgType.DevULong64:8,CmdArgType.DevVarLong64Array:None,CmdArgType.DevVarULong64Array:None, CmdArgType.DevInt:4,CmdArgType.DevEncoded:None, CmdArgType.DevEnum:None, CmdArgType.DevPipeBlob:None} +TANGO_HOST = os.environ.get("TANGO_HOST", None) + def get_db_config(device_name:str) -> dict: """ Retrieve the DB credentials from the Tango properties of Configuration Manager or EventSubscribers @@ -46,7 +49,7 @@ def get_attribute_from_fqdn(attribute_name:str): return attribute_name -def device_fqdn(device_name:str, tango_host:str = 'databaseds:10000'): +def device_fqdn(device_name:str, tango_host:str = TANGO_HOST): """ For some operations Tango devices must be transformed from the form 'domain/family/name' to 'tango://db:port/domain/family/name' @@ -59,7 +62,7 @@ def device_fqdn(device_name:str, tango_host:str = 'databaseds:10000'): return f"tango://{tango_host}/{device_name}".lower() -def attribute_fqdn(attribute_name:str, tango_host:str = 'databaseds:10000'): +def attribute_fqdn(attribute_name:str, tango_host:str = TANGO_HOST): """ For some operations Tango devices must be transformed from the form 'domain/family/name/attribute' to 'tango://db:port/domain/family/name/attribute' diff --git a/tangostationcontrol/tox.ini b/tangostationcontrol/tox.ini index 9a7799463c22c24e85236c4e780a57e951502964..40f6e5cf38bace12fc00a2ccba1c0515678906c3 100644 --- a/tangostationcontrol/tox.ini +++ b/tangostationcontrol/tox.ini @@ -31,7 +31,7 @@ commands = {envpython} -m stestr run {posargs} passenv = TANGO_HOST setenv = TESTS_DIR=./tangostationcontrol/integration_test/{posargs} commands = - {envpython} -m stestr run --serial + {envpython} -m stestr run --serial {posargs} [testenv:cover] ; stestr does not natively support generating coverage reports use