Skip to content
Snippets Groups Projects
Commit 04150e85 authored by Stefano Di Frischia's avatar Stefano Di Frischia
Browse files

Merge branch 'L2SS-1030-create-configuration-device' into 'master'

Resolve L2SS-1030 "Create configuration device"

Closes L2SS-1030

See merge request !468
parents 464c1ab3 05c1daff
No related branches found
No related tags found
1 merge request!468Resolve L2SS-1030 "Create configuration device"
Showing
with 363 additions and 12 deletions
......@@ -103,26 +103,27 @@ docker_build_image_all:
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh recv-sim latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh sdptr-sim latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh unb2-sim latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-antennafield latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-apsct latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-ccd latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-apspu latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-tilebeam latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-boot latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-beamlet latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-ccd latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-configuration latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-digitalbeam latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-antennafield latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-boot latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-docker latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-observation latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-observation-control latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-psoc latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-pcon latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-psoc latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-recv latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-sdp latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-temperature-manager latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-tilebeam latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-unb2 latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-bst latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-sst latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-unb2 latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-xst latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-temperature-manager latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh archiver-timescale latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh hdbpp latest
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh hdbppts-cm latest
......@@ -307,6 +308,17 @@ docker_build_image_device_ccd:
script:
# Do not remove 'bash' or statement will be ignored by primitive docker shell
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-ccd $tag
docker_build_image_device_configuration:
extends: .base_docker_images_except
only:
refs:
- merge_requests
changes:
- docker-compose/device-configuration.yml
- docker-compose/lofar-device-base/*
script:
# Do not remove 'bash' or statement will be ignored by primitive docker shell
- bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-configuration $tag
docker_build_image_device_apspu:
extends: .base_docker_images_except
only:
......
......@@ -17,6 +17,13 @@
}
}
},
"Configuration": {
"STAT": {
"Configuration": {
"STAT/Configuration/1": {}
}
}
},
"Observation": {
"STAT": {
"Observation": {
......
......@@ -115,6 +115,7 @@ Next change the version in the following places:
# Release Notes
* 0.5.0 Add `Configuration` device
* 0.4.1 Fix for missing SDP attributes for spectral inversion
* 0.4.0 Have most containers report health status and add `make await` command
* 0.3.1 Fix for applying boot device dsconfig
......
#
# Docker compose file that launches an interactive iTango session.
#
# Connect to the interactive session with 'docker attach itango'.
# Disconnect with the Docker deattach sequence: <CTRL>+<P> <CTRL>+<Q>
#
# Defines:
# - itango: iTango interactive session
#
# Requires:
# - lofar-device-base.yml
#
version: '2.1'
services:
device-configuration:
image: device-configuration
# build explicitly, as docker-compose does not understand a local image
# being shared among services.
build:
context: .
dockerfile: lofar-device-base/Dockerfile
args:
SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
container_name: ${CONTAINER_NAME_PREFIX}device-configuration
logging:
driver: "json-file"
options:
max-size: "100m"
max-file: "10"
networks:
- control
ports:
- "5722:5722" # unique port for this DS
- "5822:5822" # ZeroMQ event port
- "5922:5922" # ZeroMQ heartbeat port
extra_hosts:
- "host.docker.internal:host-gateway"
volumes:
- ..:/opt/lofar/tango:rw
environment:
- TANGO_HOST=${TANGO_HOST}
- TANGO_ZMQ_EVENT_PORT=5822
- TANGO_ZMQ_HEARTBEAT_PORT=5922
healthcheck:
test: l2ss-health STAT/Configuration/1
interval: 1m
timeout: 30s
retries: 3
start_period: 30s
working_dir: /opt/lofar/tango
entrypoint:
- bin/start-ds.sh
# configure CORBA to _listen_ on 0:port, but tell others we're _reachable_ through ${HOSTNAME}:port, since CORBA
# can't know about our Docker port forwarding
- l2ss-configuration-device Configuration STAT -v -ORBendPoint giop:tcp:device-configuration:5722 -ORBendPointPublish giop:tcp:${HOSTNAME}:5722
restart: on-failure
stop_signal: SIGINT # request a graceful shutdown of Tango
stop_grace_period: 2s
......@@ -17,6 +17,7 @@ digitalbeam = DeviceProxy("STAT/DigitalBeam/1")
antennafield = DeviceProxy("STAT/AntennaField/1")
docker = DeviceProxy("STAT/Docker/1")
temperaturemanager = DeviceProxy("STAT/TemperatureManager/1")
configuration = DeviceProxy("STAT/Configuration/1")
# Put them in a list in case one wants to iterate
devices = [apsct, ccd, apspu, recv, sdp, bst, sst, xst, unb2, boot, tilebeam, beamlet, digitalbeam, antennafield, temperaturemanager, docker, pcon, psoc]
devices = [apsct, ccd, apspu, recv, sdp, bst, sst, xst, unb2, boot, tilebeam, beamlet, digitalbeam, antennafield, temperaturemanager, docker, pcon, psoc, configuration]
......@@ -17,6 +17,7 @@ digitalbeam = DeviceProxy("STAT/DigitalBeam/1")
antennafield = DeviceProxy("STAT/AntennaField/1")
docker = DeviceProxy("STAT/Docker/1")
temperaturemanager = DeviceProxy("STAT/TemperatureManager/1")
configuration = DeviceProxy("STAT/Configuration/1")
# Put them in a list in case one wants to iterate
devices = [apsct, ccd, apspu, recv, sdp, bst, sst, xst, unb2, boot, tilebeam, beamlet, digitalbeam, antennafield, temperaturemanager, docker, pcon, psoc]
devices = [apsct, ccd, apspu, recv, sdp, bst, sst, xst, unb2, boot, tilebeam, beamlet, digitalbeam, antennafield, temperaturemanager, docker, pcon, psoc, configuration]
......@@ -17,6 +17,8 @@
},
"stat/ccd/1": {
},
"stat/configuration/1": {
},
"stat/apspu/1": {
},
"stat/beamlet/1": {
......
......@@ -76,7 +76,9 @@ sleep 1 # dsconfig container must be up and running...
# shellcheck disable=SC2016
echo '/usr/local/bin/wait-for-it.sh ${TANGO_HOST} --strict --timeout=300 -- true' | make run dsconfig bash -
DEVICES=(device-boot device-apsct device-ccd device-apspu device-sdp device-recv device-bst device-sst device-unb2 device-xst device-beamlet device-digitalbeam device-tilebeam device-psoc device-pcon device-antennafield device-temperature-manager device-observation device-observation-control)
# Devices list is used to explitly word split when supplied to commands, must
# disable shellcheck SC2086 for each case.
DEVICES=(device-boot device-apsct device-ccd device-apspu device-sdp device-recv device-bst device-sst device-unb2 device-xst device-beamlet device-digitalbeam device-tilebeam device-psoc device-pcon device-antennafield device-temperature-manager device-observation device-observation-control device-configuration)
SIMULATORS=(sdptr-sim recv-sim unb2-sim apsct-sim apspu-sim ccd-sim)
......
......@@ -72,6 +72,7 @@ LOCAL_IMAGES=(
"device-antennafield device-antennafield y"
"device-apsct device-apsct y" "device-apspu device-apspu y"
"device-ccd device-ccd y"
"device-configuration device-configuration y"
"device-boot device-boot y" "device-docker device-docker y"
"device-observation device-observation y"
"device-observation-control device-observation-control y"
......
0.4.1
0.5.0
.. _configuration:
Configuration
--------------------
The ``Configuration == DeviceProxy("STAT/Configuration/1")`` Configuration Device controls the loading, updating, exposing and dumping of the whole Station Configuration
......@@ -30,6 +30,7 @@ Even without having access to any LOFAR2.0 hardware, you can install the full st
devices/docker
devices/psoc
devices/ccd
devices/configuration
devices/temperature-manager
devices/configure
configure_station
......
......@@ -56,6 +56,7 @@ console_scripts =
l2ss-unb2 = tangostationcontrol.devices.unb2:main
l2ss-xst = tangostationcontrol.devices.sdp.xst:main
l2ss-temperature-manager = tangostationcontrol.devices.temperature_manager:main
l2ss-configuration-device = tangostationcontrol.devices.configuration_device:main
# The following entry points should eventually be removed / replaced
l2ss-hardware-device-template = tangostationcontrol.examples.HW_device_template:main
......
# -*- coding: utf-8 -*-
#
# This file is part of the LOFAR 2.0 Station Software
#
#
#
# Distributed under the terms of the APACHE license.
# See LICENSE.txt for more info.
from tango import DeviceProxy, Database
from itertools import islice
class StationConfiguration:
DEVICE_PROPERTIES_QUERY = "SELECT device, property_device.name, property_device.value FROM property_device \
INNER JOIN device ON property_device.device = device.name \
WHERE class != 'DServer' \
AND property_device.name != '__SubDevices' \
ORDER BY device, property_device.name, property_device.count ASC"
ATTRS_PROPERTIES_QUERY = "SELECT device, attribute, property_attribute_device.name, \
property_attribute_device.value \
FROM property_attribute_device \
INNER JOIN device ON property_attribute_device.device = device.name \
WHERE class != 'DServer' \
ORDER BY device, property_attribute_device.name, property_attribute_device.count ASC"
SERVER_QUERY = "SELECT server, class, name FROM device \
WHERE class != 'DServer' \
ORDER BY server ASC"
def __init__(self, db: Database, tangodb_timeout:int = 10000):
self.dbproxy = DeviceProxy(db.dev_name()) # TangoDB
self.dbproxy.set_timeout_millis(tangodb_timeout) # Set a security timeout (default is 3000ms)
def get_tangodb_data(self) -> dict:
""" Dump a subset of TANGO database into dictionary.
The dictionary contains the info about all the Devices used in the
present environment, including their Properties values, their Attribute Properties,
and the namespace of the DeviceServers which incapsulate each Device.
"""
# Create empty dictionaries to be populated
devices_dict = {}
server_dict = {}
# Populate devices dictionary from query data
device_property_result = self._query_tangodb(self.dbproxy, self.DEVICE_PROPERTIES_QUERY, 3)
devices_dict = self.add_to_devices_dict(devices_dict, device_property_result)
# Populate devices dictionary from query data
attrs_property_result = self._query_tangodb(self.dbproxy, self.ATTRS_PROPERTIES_QUERY, 4)
devices_dict = self.add_to_attrs_dict(devices_dict, attrs_property_result)
# Populate server dictionary from query data and merge it with devices dict
server_result = self._query_tangodb(self.dbproxy, self.SERVER_QUERY, 3)
server_dict = self.add_to_server_dict(server_dict, devices_dict, server_result)
return {"servers" : server_dict}
def _query_tangodb(self, dbproxy: DeviceProxy, sql_query: str, num_cols: int) -> list:
""" Query TangoDb with a built-in function and return data as tuples """
_, raw_result = dbproxy.command_inout("DbMySqlSelect", sql_query)
return self.query_to_tuples(raw_result, num_cols)
def add_to_devices_dict(self, devices_dict:dict, result:list) -> dict:
""" Populate a devices dictionary with the following structure:
'device_name': { 'properties' : { 'property_name': ['property_value'] } }
"""
for device, property, value in result:
# lowercase data
device = device.lower()
property = property.lower()
# model dictionary
device_data = devices_dict.setdefault(device, {})
property_data = device_data.setdefault("properties", {})
value_data = property_data.setdefault(property, [])
value_data.append(value)
return devices_dict
def add_to_attrs_dict(self, devices_dict:dict, result:list) -> dict:
""" Populate a device dictionary with the following structure :
'device_name': { 'attribute_properties' : { 'attribute_name': {'property_name' : ['property_value'] } } }
"""
for device, attribute, property, value in result:
# lowercase data
device = device.lower()
attribute = attribute.lower()
property = property.lower()
# model dictionary
device_data = devices_dict.setdefault(device, {})
property_data = device_data.setdefault("attribute_properties", {})
attr_data = property_data.setdefault(attribute, {})
value_data = attr_data.setdefault(property, [])
value_data.append(value)
return devices_dict
def add_to_server_dict(self, server_dict:dict, devices_dict:dict, result:list) -> dict:
""" Populate the server dictionary and merge it with the devices dictionary.
At the end of the process, the dictionary will have the following structure :
'server_name' : { 'server_instance' : { 'server_class' :
'device_name': { 'properties' : { 'property_name': ['property_value'] } },
{ 'attribute_properties' : { 'attribute_name': {'property_name' : ['property_value'] } } } } }
"""
for server, sclass, device in result:
# lowercase data
device = device.lower()
server = server.lower()
sclass = sclass.lower()
# model dictionary
sname, instance = server.split('/')
device_data = devices_dict.get(device, {})
server_data = server_dict.setdefault(sname, {})
instance_data = server_data.setdefault(instance, {})
class_data = instance_data.setdefault(sclass, {})
# merge the two dictionaries
server_dict[sname][instance][sclass][device] = device_data
return server_dict
def query_to_tuples(self, result: list, num_cols: int) -> list:
""" Given a query result and its number of columns, transforms the raw result in a list of tuples """
return list(zip(*[islice(result, i, None, num_cols) for i in range(num_cols)]))
......@@ -9,7 +9,7 @@ If a new device is added, it will (likely) need to be referenced in several plac
- Adjust `CDB/LOFAR_ConfigDb.json` to create the device in the Tango device database,
- Adjust `docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py` and `docker-compose/jupyterlab/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py` to make an alias for it available in Jupyter and Jupyter-Lab,
- Adjust `tangostationcontrol/tangostationcontrol/devices/boot.py` to add the device to the station initialisation sequence,
- Add to `docker-compose/` to create a YaML file to start the device in a docker container. NOTE: it needs a unique 57xx port assigned (current _unused_ port value: 5722), a unique 58xx port for ZMQ events, and a unique 59xx port for ZMQ heartbeat
- Add to `docker-compose/` to create a YaML file to start the device in a docker container. NOTE: it needs a unique 57xx port assigned (current _unused_ port value: 5723), a unique 58xx port for ZMQ events, and a unique 59xx port for ZMQ heartbeat
- Adjust `tangostationcontrol/setup.cfg` to add an entry point for the device in the package installation,
- Add to `tangostationcontrol/tangostationcontrol/integration_test/default/devices/` to add an integration test,
- Adjust `sbin/run_integration_test.sh` to have the device started when running the integration tests,
......
......@@ -240,6 +240,7 @@ class Boot(lofar_device):
dtype='DevVarStringArray',
mandatory=False,
default_value=["STAT/Docker/1", # Docker controls the device containers, so it goes before anything else
"STAT/Configuration/1", # Configuration device loads and update station configuration
"STAT/PSOC/1", # PSOC boot early to detect power delivery failure as fast as possible
"STAT/PCON/1", # PCON boot early because it is responsible for power delivery.
"STAT/APSPU/1", # APS Power Units control other hardware we want to initialise
......
# -*- coding: utf-8 -*-
#
# This file is part of the RECV project
#
#
#
# Distributed under the terms of the APACHE license.
# See LICENSE.txt for more info.
""" Configuration Device Server for LOFAR2.0
Handles and exposes the station configuration
"""
# PyTango imports
from tango import AttrWriteType, Database
from tango.server import attribute
# Additional import
from tangostationcontrol.common.configuration import StationConfiguration
from tangostationcontrol.common.entrypoint import entry
from tangostationcontrol.devices.lofar_device import lofar_device
from tangostationcontrol.common.lofar_logging import device_logging_to_python, log_exceptions
import json
import logging
logger = logging.getLogger()
__all__ = ["Configuration", "main"]
@device_logging_to_python()
class Configuration(lofar_device):
# -----------------
# Device Properties
# -----------------
# ----------
# Attributes
# ----------
station_configuration_RW = attribute(dtype=str, access=AttrWriteType.READ_WRITE, doc='The Tango properties of all the devices in this station, as a JSON string.')
def read_station_configuration_RW(self):
return self._dump_configdb()
def write_station_configuration_RW(self, station_configuration):
""" Takes a JSON string which represents the station configuration
and loads the whole configuration from scratch.
N.B. it does not update, it loads a full new configuration.
"""
# TODO(Stefano): L2SS-1031 implement load configuration
self.proxy.station_configuration_RW = station_configuration
def _dump_configdb(self):
""" Returns the TangoDB station configuration as a JSON string """
dbdata = self.station_configuration.get_tangodb_data()
return json.dumps(dbdata, ensure_ascii=False, indent=4, sort_keys=True)
# --------
# overloaded functions
# --------
@log_exceptions()
def configure_for_initialise(self):
super().configure_for_initialise()
self.station_configuration = StationConfiguration(db = Database())
# ----------
# Run server
# ----------
def main(**kwargs):
"""Main function of the Boot module."""
return entry(Configuration, **kwargs)
......@@ -48,6 +48,8 @@ class Docker(lofar_device):
device_boot_R = attribute_wrapper(comms_annotation={"container": "device-boot"}, datatype=bool)
device_boot_RW = attribute_wrapper(comms_annotation={"container": "device-boot"}, datatype=bool, access=AttrWriteType.READ_WRITE)
device_docker_R = attribute_wrapper(comms_annotation={"container": "device-docker"}, datatype=bool)
device_configuration_R = attribute_wrapper(comms_annotation={"container": "device-configuration"}, datatype=bool)
device_configuration_RW = attribute_wrapper(comms_annotation={"container": "device-configuration"}, datatype=bool, access=AttrWriteType.READ_WRITE)
# device_docker_RW is not available, as we cannot start our own container`
device_temperature_manager_R = attribute_wrapper(comms_annotation={"container": "device-temperature-manager"}, datatype=bool)
device_temperature_manager_RW = attribute_wrapper(comms_annotation={"container": "device-temperature-manager"}, datatype=bool, access=AttrWriteType.READ_WRITE)
......
# -*- coding: utf-8 -*-
#
# This file is part of the LOFAR 2.0 Station Software
#
#
#
# Distributed under the terms of the APACHE license.
# See LICENSE.txt for more info.
from tango import Database
from tangostationcontrol.common.configuration import StationConfiguration
from tangostationcontrol.integration_test.base import BaseIntegrationTestCase
class TestStationConfiguration(BaseIntegrationTestCase):
sc = StationConfiguration(Database())
def test_query_to_tuples(self):
""" Test whether Tango DB data are correctly converted into tuples """
raw_result = ['device1', 'property_name1', 'value1', 'device1', 'property_name2', 'value2']
num_col = 3
record1 = ('device1', 'property_name1', 'value1')
record2 = ('device1', 'property_name2', 'value2')
expected_result = [record1, record2]
self.assertEqual(self.sc.query_to_tuples(raw_result, num_col), expected_result)
def test_add_to_devices_dict(self):
""" Test whether data retrieved from DB are correctly inserted into devices dictionary """
data = [('device1', 'property_name1', 'value1'), ('device1', 'property_name2', 'value2')]
expected_result = {'device1': {'properties': { 'property_name1': ['value1'],
'property_name2': ['value2']}}}
self.assertEqual(self.sc.add_to_devices_dict({}, data), expected_result)
def test_add_to_attrs_dict(self):
""" Test whether data retrieved from DB are correctly inserted into attributes dictionary """
# Two attributes
data_2attrs = [('device1', 'attribute1', 'attr_property_name1', 'value1'),
('device1', 'attribute2', 'attr_property_name1', 'value2')]
expected_result = {'device1': {'attribute_properties': {'attribute1': {'attr_property_name1': ['value1']},
'attribute2': {'attr_property_name1': ['value2']}}}}
self.assertEqual(self.sc.add_to_attrs_dict({}, data_2attrs), expected_result)
# One attribute, two property values
data_1attr = [('device1', 'attribute1', 'attr_property_name1', 'value1'),
('device1', 'attribute1', 'attr_property_name1', 'value2')]
expected_result = {'device1': {'attribute_properties': {'attribute1':
{'attr_property_name1': ['value1','value2']}}}}
self.assertEqual(self.sc.add_to_attrs_dict({}, data_1attr), expected_result)
def test_add_to_server_dict(self):
""" Test whether data retrieved from DB are correctly inserted into server dictionary """
data = [('server_name/server_instance', 'server_class', 'device1')]
devices_dict = {'device1': {'properties': { 'property_name1': ['value1'],
'property_name2': ['value2']}}}
expected_result = {'server_name': {'server_instance': {'server_class':
{'device1': {'properties': {'property_name1': ['value1'],
'property_name2': ['value2']}}}}}}
self.assertEqual(self.sc.add_to_server_dict({}, devices_dict, data), expected_result)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment