Skip to content
Snippets Groups Projects
Commit 3828da71 authored by Jan David Mol's avatar Jan David Mol
Browse files

L2SS-762: Configure station name in Tango database and use it in the prometheus export

parent 7ef575f8
Branches
Tags
1 merge request!312Resolve L2SS-762 "Add station name"
{ {
"objects": {
"station": {
"name": [
"DevStation"
],
"number": [
"999"
]
}
},
"servers": { "servers": {
"Docker": { "Docker": {
"STAT": { "STAT": {
......
{ {
"objects": {
"station": {
"name": [
"DTS"
],
"number": [
"902"
]
}
},
"servers": { "servers": {
"boot": { "boot": {
"STAT": { "STAT": {
......
{ {
"objects": {
"station": {
"name": [
"DTSOutside"
],
"number": [
"903"
]
}
},
"servers": { "servers": {
"boot": { "boot": {
"STAT": { "STAT": {
...@@ -341,22 +351,22 @@ ...@@ -341,22 +351,22 @@
"5.0" "5.0"
], ],
"FPGA_sdp_info_station_id_RW_default": [ "FPGA_sdp_info_station_id_RW_default": [
"902", "903",
"902", "903",
"902", "903",
"902", "903",
"902", "903",
"902", "903",
"902", "903",
"902", "903",
"902", "903",
"902", "903",
"902", "903",
"902", "903",
"902", "903",
"902", "903",
"902", "903",
"902" "903"
], ],
"TR_fpga_mask_RW_default": [ "TR_fpga_mask_RW_default": [
"True", "True",
......
{ {
"objects": {
"station": {
"name": [
"LTS"
],
"number": [
"901"
]
}
},
"servers": { "servers": {
"boot": { "boot": {
"STAT": { "STAT": {
......
#!/bin/bash #!/bin/bash
# writes the JSON dump to stdout, Do not change -i into -it incompatible with gitlab ci! # writes the JSON dump to stdout, Do not change -i into -it incompatible with gitlab ci!
docker exec -i "${CONTAINER_NAME_PREFIX}"dsconfig python -m dsconfig.dump docker exec -i "${CONTAINER_NAME_PREFIX}"dsconfig bash -c '
python -m dsconfig.dump > /tmp/dsconfig-configdb-dump.json
/manage_object_properties.py -r > /tmp/dsconfig-objectdb-dump.json
/merge_json.py /tmp/dsconfig-objectdb-dump.json /tmp/dsconfig-configdb-dump.json'
ARG SOURCE_IMAGE
FROM ${SOURCE_IMAGE}
COPY manage_object_properties.py /
COPY merge_json.py /
#!/usr/bin/env python3
"""
Import/export the object propertis of the Tango Controls Database.
"""
from tango import Database
def read_objects(db = None) -> dict:
""" Read and return all object properties. """
db = db or Database()
result = {}
objects = db.get_object_list("*").value_string
for obj in objects:
result[obj] = {}
properties = db.get_object_property_list(obj, "*").value_string
for prop in properties:
value = db.get_property(obj, prop)[prop]
result[obj][prop] = list(value)
return result
def write_objects(objects: dict, db = None) -> None:
""" Write the given object properties. """
db = db or Database()
for obj, properties in objects.items():
db.put_property(obj, properties)
if __name__ == "__main__":
import sys
import argparse
import json
parser = argparse.ArgumentParser("Import/export object properties of the Tango Database using the JSON file format")
parser.add_argument('-w', '--write', default=False, required=False, action='store_true', help='import objects from stdin')
parser.add_argument('-r', '--read', default=False, required=False, action='store_true', help='export all objects to stdout in JSON')
args = parser.parse_args()
if not args.read and not args.write:
parser.print_help()
sys.exit(1)
# import
if args.write:
objects = json.load(sys.stdin)
write_objects(objects["objects"])
# export
if args.read:
objects = read_objects()
print(json.dumps({"objects": objects}, indent=4))
#!/usr/bin/env python3
""" Merge all JSON files given on the command line at top level. """
import json
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser("Merge input JSON files at top level. Keys from later files override those from earlier files.")
parser.add_argument('files', metavar='FILE', type=str, nargs='+', help='JSON input files')
args = parser.parse_args()
result = {}
# read all provided files
for filename in args.files:
with open(filename) as f:
file_dict = json.load(f)
# add them to the result
result.update(file_dict)
# print result in JSON
print(json.dumps(result, indent=4))
...@@ -64,7 +64,8 @@ class ArchiverPolicy(object): ...@@ -64,7 +64,8 @@ class ArchiverPolicy(object):
return sorted(list(attributes)) return sorted(list(attributes))
class CustomCollector(object): class CustomCollector(object):
def __init__(self, config, proxy_timeout=250): def __init__(self, station, config, proxy_timeout=250):
self.station = station
self.policy = ArchiverPolicy(config) self.policy = ArchiverPolicy(config)
self.proxy_timeout = proxy_timeout self.proxy_timeout = proxy_timeout
...@@ -97,7 +98,7 @@ class CustomCollector(object): ...@@ -97,7 +98,7 @@ class CustomCollector(object):
return None return None
# (labels, value) # (labels, value)
return ([dev.dev_name(), attr_info.name, str_value, data_type, f"{x:02}", f"{y:02}", f"{idx:03}"], float_value) return ([self.station, dev.dev_name(), attr_info.name, str_value, data_type, f"{x:02}", f"{y:02}", f"{idx:03}"], float_value)
def metrics_scalar(self, dev, attr_info, attr_value): def metrics_scalar(self, dev, attr_info, attr_value):
""" Return all metrics for a given SCALAR attribute. """ """ Return all metrics for a given SCALAR attribute. """
...@@ -182,8 +183,8 @@ class CustomCollector(object): ...@@ -182,8 +183,8 @@ class CustomCollector(object):
logger.info("Start scraping") logger.info("Start scraping")
scrape_begin = time.time() scrape_begin = time.time()
attribute_metrics = GaugeMetricFamily("device_attribute", 'Device attribute value', labels=['device', 'name', 'str_value', 'type', 'x', 'y', 'idx']) attribute_metrics = GaugeMetricFamily("device_attribute", 'Device attribute value', labels=['station', 'device', 'name', 'str_value', 'type', 'x', 'y', 'idx'])
scraping_metrics = GaugeMetricFamily("device_scraping", 'Device scraping duration', labels=['device']) scraping_metrics = GaugeMetricFamily("device_scraping", 'Device scraping duration', labels=['station', 'device'])
for device_name in self.policy.devices(): for device_name in self.policy.devices():
logger.debug(f"Processing device {device_name}") logger.debug(f"Processing device {device_name}")
...@@ -203,7 +204,7 @@ class CustomCollector(object): ...@@ -203,7 +204,7 @@ class CustomCollector(object):
logger.info(f"Done processing device {device_name}. Took {dev_scrape_end - dev_scrape_begin} seconds.") logger.info(f"Done processing device {device_name}. Took {dev_scrape_end - dev_scrape_begin} seconds.")
scraping_metrics.add_metric([device_name], dev_scrape_end - dev_scrape_begin) scraping_metrics.add_metric([self.station, device_name], dev_scrape_end - dev_scrape_begin)
scrape_end = time.time() scrape_end = time.time()
logger.info(f"Done scraping. Took {scrape_end - scrape_begin} seconds.") logger.info(f"Done scraping. Took {scrape_end - scrape_begin} seconds.")
...@@ -214,6 +215,8 @@ class CustomCollector(object): ...@@ -214,6 +215,8 @@ class CustomCollector(object):
yield scraping_metrics yield scraping_metrics
if __name__ == '__main__': if __name__ == '__main__':
import sys
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('-c', '--config', type=str, required=True, help='configuration file') parser.add_argument('-c', '--config', type=str, required=True, help='configuration file')
parser.add_argument('-t', '--timeout', type=int, required=False, default=250, help='device proxy timeout (ms)') parser.add_argument('-t', '--timeout', type=int, required=False, default=250, help='device proxy timeout (ms)')
...@@ -221,7 +224,15 @@ if __name__ == '__main__': ...@@ -221,7 +224,15 @@ if __name__ == '__main__':
args = parser.parse_args() args = parser.parse_args()
config = ArchiverPolicy.load_config(args.config) config = ArchiverPolicy.load_config(args.config)
collector = CustomCollector(config, proxy_timeout=args.timeout)
db = Database()
try:
station = db.get_property("station","name")["name"][0]
except Exception as e:
logger.exception("Could not determine station name")
sys.exit(1)
collector = CustomCollector(config, station=station, proxy_timeout=args.timeout)
logger.info("Starting server") logger.info("Starting server")
start_http_server(args.port) start_http_server(args.port)
......
...@@ -70,7 +70,10 @@ services: ...@@ -70,7 +70,10 @@ services:
restart: unless-stopped restart: unless-stopped
dsconfig: dsconfig:
image: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-dsconfig:${TANGO_DSCONFIG_VERSION} build:
context: dsconfig
args:
SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-dsconfig:${TANGO_DSCONFIG_VERSION}
container_name: ${CONTAINER_NAME_PREFIX}dsconfig container_name: ${CONTAINER_NAME_PREFIX}dsconfig
networks: networks:
- control - control
......
...@@ -11,6 +11,9 @@ fi ...@@ -11,6 +11,9 @@ fi
# in the container won't be the same as on the host. # in the container won't be the same as on the host.
docker cp "${file}" "${CONTAINER_NAME_PREFIX}"dsconfig:/tmp/dsconfig-update-settings.json || exit 1 docker cp "${file}" "${CONTAINER_NAME_PREFIX}"dsconfig:/tmp/dsconfig-update-settings.json || exit 1
# update settings, Do not change -i into -it this will break integration tests in gitlab ci!
docker exec -i "${CONTAINER_NAME_PREFIX}"dsconfig /manage_object_properties.py --write < "${file}"
# update settings, Do not change -i into -it this will break integration tests in gitlab ci! # update settings, Do not change -i into -it this will break integration tests in gitlab ci!
docker exec -i "${CONTAINER_NAME_PREFIX}"dsconfig json2tango --write --update /tmp/dsconfig-update-settings.json docker exec -i "${CONTAINER_NAME_PREFIX}"dsconfig json2tango --write --update /tmp/dsconfig-update-settings.json
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment