Skip to content
Snippets Groups Projects
Commit 43083dba authored by Taya Snijder's avatar Taya Snijder
Browse files

Merge branch 'master' of https://git.astron.nl/lofar2.0/tango

parents 7e40db8b c64692aa
No related branches found
No related tags found
1 merge request!140Resolve L2SS-413 "2021 09 30 branched from master optimise hdf5 file size"
...@@ -114,7 +114,8 @@ class OPCUAConnection(CommClient): ...@@ -114,7 +114,8 @@ class OPCUAConnection(CommClient):
ping the client to make sure the connection with the client is still functional. ping the client to make sure the connection with the client is still functional.
""" """
try: try:
self.client.send_hello() #self.client.send_hello() # <-- this crashes when communicating with open62541 v1.2.2+
pass
except Exception as e: except Exception as e:
raise Exception("Lost connection to server %s: %s", self._servername(), e) raise Exception("Lost connection to server %s: %s", self._servername(), e)
......
# -*- coding: utf-8 -*-
#
# This file is part of the XXX project
#
#
#
# Distributed under the terms of the APACHE license.
# See LICENSE.txt for more info.
"""Abstract Device Meta for LOFAR2.0
"""
from abc import ABCMeta
import logging
from tango.server import DeviceMeta
logger = logging.getLogger()
class AbstractDeviceMetas(DeviceMeta, ABCMeta):
"""Collects meta classes to allow hardware_device to be both a Device and an ABC. """
def __new__(mcs, name, bases, namespace, **kwargs):
cls = ABCMeta.__new__(mcs, name, bases, namespace, **kwargs)
cls = DeviceMeta.__new__(type(cls), name, bases, namespace)
return cls
...@@ -20,18 +20,15 @@ from tango import DevState, DebugIt, Attribute, DeviceProxy ...@@ -20,18 +20,15 @@ from tango import DevState, DebugIt, Attribute, DeviceProxy
from clients.attribute_wrapper import attribute_wrapper from clients.attribute_wrapper import attribute_wrapper
from common.lofar_logging import log_exceptions from common.lofar_logging import log_exceptions
import logging from devices.abstract_device import AbstractDeviceMetas
from devices.device_decorators import only_in_states, fault_on_error
import logging
__all__ = ["hardware_device"] __all__ = ["hardware_device"]
from devices.device_decorators import only_in_states, fault_on_error
logger = logging.getLogger() logger = logging.getLogger()
class AbstractDeviceMetas(DeviceMeta, ABCMeta):
''' Collects meta classes to allow hardware_device to be both a Device and an ABC. '''
pass
#@log_exceptions() #@log_exceptions()
class hardware_device(Device, metaclass=AbstractDeviceMetas): class hardware_device(Device, metaclass=AbstractDeviceMetas):
......
...@@ -100,11 +100,14 @@ class SDP(hardware_device): ...@@ -100,11 +100,14 @@ class SDP(hardware_device):
# SDP will switch from FPGA_mask_RW to TR_FPGA_mask_RW, offer both for now as its a critical flag # SDP will switch from FPGA_mask_RW to TR_FPGA_mask_RW, offer both for now as its a critical flag
FPGA_firmware_version_R = attribute_wrapper(comms_annotation=["2:FPGA_firmware_version_R"], datatype=numpy.str, dims=(16,)) FPGA_firmware_version_R = attribute_wrapper(comms_annotation=["2:FPGA_firmware_version_R"], datatype=numpy.str, dims=(16,))
FPGA_global_node_index_R = attribute_wrapper(comms_annotation=["2:FPGA_global_node_index_R"], datatype=numpy.uint32, dims=(16,))
FPGA_hardware_version_R = attribute_wrapper(comms_annotation=["2:FPGA_hardware_version_R"], datatype=numpy.str, dims=(16,)) FPGA_hardware_version_R = attribute_wrapper(comms_annotation=["2:FPGA_hardware_version_R"], datatype=numpy.str, dims=(16,))
FPGA_processing_enable_R = attribute_wrapper(comms_annotation=["2:FPGA_processing_enable_R"], datatype=numpy.bool_, dims=(16,)) FPGA_processing_enable_R = attribute_wrapper(comms_annotation=["2:FPGA_processing_enable_R"], datatype=numpy.bool_, dims=(16,))
FPGA_processing_enable_RW = attribute_wrapper(comms_annotation=["2:FPGA_processing_enable_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE) FPGA_processing_enable_RW = attribute_wrapper(comms_annotation=["2:FPGA_processing_enable_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE)
FPGA_scrap_R = attribute_wrapper(comms_annotation=["2:FPGA_scrap_R"], datatype=numpy.int32, dims=(8192,)) FPGA_scrap_R = attribute_wrapper(comms_annotation=["2:FPGA_scrap_R"], datatype=numpy.int32, dims=(8192,))
FPGA_scrap_RW = attribute_wrapper(comms_annotation=["2:FPGA_scrap_RW"], datatype=numpy.int32, dims=(8192,), access=AttrWriteType.READ_WRITE) FPGA_scrap_RW = attribute_wrapper(comms_annotation=["2:FPGA_scrap_RW"], datatype=numpy.int32, dims=(8192,), access=AttrWriteType.READ_WRITE)
FPGA_signal_input_mean_R = attribute_wrapper(comms_annotation=["2:FPGA_signal_input_mean_R"], datatype=numpy.double, dims=(12, 16))
FPGA_signal_input_rms_R = attribute_wrapper(comms_annotation=["2:FPGA_signal_input_rms_R"], datatype=numpy.double, dims=(12, 16))
FPGA_sdp_info_antenna_band_index_R = attribute_wrapper(comms_annotation=["2:FPGA_sdp_info_antenna_band_index_R"], datatype=numpy.uint32, dims=(16,)) FPGA_sdp_info_antenna_band_index_R = attribute_wrapper(comms_annotation=["2:FPGA_sdp_info_antenna_band_index_R"], datatype=numpy.uint32, dims=(16,))
FPGA_sdp_info_block_period_R = attribute_wrapper(comms_annotation=["2:FPGA_sdp_info_block_period_R"], datatype=numpy.uint32, dims=(16,)) FPGA_sdp_info_block_period_R = attribute_wrapper(comms_annotation=["2:FPGA_sdp_info_block_period_R"], datatype=numpy.uint32, dims=(16,))
FPGA_sdp_info_f_adc_R = attribute_wrapper(comms_annotation=["2:FPGA_sdp_info_f_adc_R"], datatype=numpy.uint32, dims=(16,)) FPGA_sdp_info_f_adc_R = attribute_wrapper(comms_annotation=["2:FPGA_sdp_info_f_adc_R"], datatype=numpy.uint32, dims=(16,))
...@@ -131,10 +134,13 @@ class SDP(hardware_device): ...@@ -131,10 +134,13 @@ class SDP(hardware_device):
TR_fpga_mask_R = attribute_wrapper(comms_annotation=["2:TR_fpga_mask_R"], datatype=numpy.bool_, dims=(16,)) TR_fpga_mask_R = attribute_wrapper(comms_annotation=["2:TR_fpga_mask_R"], datatype=numpy.bool_, dims=(16,))
TR_fpga_mask_RW = attribute_wrapper(comms_annotation=["2:TR_fpga_mask_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE) TR_fpga_mask_RW = attribute_wrapper(comms_annotation=["2:TR_fpga_mask_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE)
TR_fpga_communication_error_R = attribute_wrapper(comms_annotation=["2:TR_fpga_communication_error_R"], datatype=numpy.bool_, dims=(16,)) TR_fpga_communication_error_R = attribute_wrapper(comms_annotation=["2:TR_fpga_communication_error_R"], datatype=numpy.bool_, dims=(16,))
TR_sdp_config_first_fpga_nr_R = attribute_wrapper(comms_annotation=["2:TR_sdp_config_first_fpga_nr_R"], datatype=numpy.uint32)
TR_sdp_config_nof_beamsets_R = attribute_wrapper(comms_annotation=["2:TR_sdp_config_nof_beamsets_R"], datatype=numpy.uint32)
TR_sdp_config_nof_fpgas_R = attribute_wrapper(comms_annotation=["2:TR_sdp_config_nof_fpgas_R"], datatype=numpy.uint32)
TR_software_version_R = attribute_wrapper(comms_annotation=["2:TR_software_version_R"], datatype=numpy.str) TR_software_version_R = attribute_wrapper(comms_annotation=["2:TR_software_version_R"], datatype=numpy.str)
TR_start_time_R = attribute_wrapper(comms_annotation=["2:TR_start_time_R"], datatype=numpy.int32) TR_start_time_R = attribute_wrapper(comms_annotation=["2:TR_start_time_R"], datatype=numpy.int64)
TR_tod_R = attribute_wrapper(comms_annotation=["2:TR_tod_R"], datatype=numpy.uint64) TR_tod_R = attribute_wrapper(comms_annotation=["2:TR_tod_R"], datatype=numpy.int64, dims=(2,))
TR_tod_pps_delta_R = attribute_wrapper(comms_annotation=["2:TR_tod_pps_delta_R"], datatype=numpy.float_) TR_tod_pps_delta_R = attribute_wrapper(comms_annotation=["2:TR_tod_pps_delta_R"], datatype=numpy.double)
def always_executed_hook(self): def always_executed_hook(self):
"""Method always executed before any TANGO command is executed.""" """Method always executed before any TANGO command is executed."""
......
...@@ -12,10 +12,11 @@ and writing those matrices (as well as a bunch of metadata) to hdf5. ...@@ -12,10 +12,11 @@ and writing those matrices (as well as a bunch of metadata) to hdf5.
The TCP statistics writer can be called with the `tcp_hdf5_writer.py` script. The TCP statistics writer can be called with the `tcp_hdf5_writer.py` script.
This script can be called with the following arguments: This script can be called with the following arguments:
``` ```
--address the address to connect to --host the address to connect to
--port the port to use --port the port to use
--file file to read from (as opposed to host and port)
--interval The time between creating new files in hours --interval The time between creating new files in hours
--location specifies the folder to write all the files --output_dir specifies the folder to write all the files
--mode sets the statistics type to be decoded options: "SST", "XST", "BST" --mode sets the statistics type to be decoded options: "SST", "XST", "BST"
--debug takes no arguments, when used prints a lot of extra data to help with debugging --debug takes no arguments, when used prints a lot of extra data to help with debugging
``` ```
......
...@@ -6,7 +6,6 @@ import pytz ...@@ -6,7 +6,6 @@ import pytz
import h5py import h5py
import numpy import numpy
import json
import logging import logging
# import statistics classes with workaround # import statistics classes with workaround
...@@ -23,10 +22,14 @@ __all__ = ["hdf5_writer"] ...@@ -23,10 +22,14 @@ __all__ = ["hdf5_writer"]
class hdf5_writer: class hdf5_writer:
SST_MODE = "SST"
XST_MODE = "XST"
BST_MODE = "BST"
def __init__(self, new_file_time_interval, file_location, statistics_mode): def __init__(self, new_file_time_interval, file_location, statistics_mode):
# all variables that deal with the SST matrix that's currently being decoded # all variables that deal with the matrix that's currently being decoded
self.current_matrix = None self.current_matrix = None
self.current_timestamp = datetime.min.replace(tzinfo=pytz.UTC) self.current_timestamp = datetime.min.replace(tzinfo=pytz.UTC)
...@@ -36,13 +39,14 @@ class hdf5_writer: ...@@ -36,13 +39,14 @@ class hdf5_writer:
# file handing # file handing
self.file_location = file_location self.file_location = file_location
self.new_file_time_interval = timedelta(hours=new_file_time_interval) self.new_file_time_interval = timedelta(seconds=new_file_time_interval)
self.last_file_time = datetime.min.replace(tzinfo=pytz.UTC) self.last_file_time = datetime.min.replace(tzinfo=pytz.UTC)
self.file = None self.file = None
# config the writer for the correct statistics type # parameters that are configured depending on the mode the statistics writer is in (SST,XST,BST)
self.collector = None
self.decoder = None self.decoder = None
self.collector = None
self.store_function = None
self.mode = statistics_mode.upper() self.mode = statistics_mode.upper()
self.config_mode() self.config_mode()
...@@ -117,8 +121,8 @@ class hdf5_writer: ...@@ -117,8 +121,8 @@ class hdf5_writer:
# create the new hdf5 group based on the timestamp of packets # create the new hdf5 group based on the timestamp of packets
current_group = self.file.create_group("{}_{}".format(self.mode, self.current_timestamp.strftime("%Y-%m-%d-%H-%M-%S-%f")[:-3])) current_group = self.file.create_group("{}_{}".format(self.mode, self.current_timestamp.strftime("%Y-%m-%d-%H-%M-%S-%f")[:-3]))
# store the statistics values # store the statistics values for the current group
current_group.create_dataset(name=f"{self.mode}_values", data=self.current_matrix.parameters["sst_values"]) self.store_function(current_group)
# might be optional, but they're easy to add. # might be optional, but they're easy to add.
current_group.create_dataset(name="nof_payload_errors", data=self.current_matrix.parameters["nof_payload_errors"]) current_group.create_dataset(name="nof_payload_errors", data=self.current_matrix.parameters["nof_payload_errors"])
...@@ -138,6 +142,17 @@ class hdf5_writer: ...@@ -138,6 +142,17 @@ class hdf5_writer:
else: else:
current_group.attrs[k] = v current_group.attrs[k] = v
def write_sst_matrix(self, current_group):
# store the SST values
current_group.create_dataset(name="sst_values", data=self.current_matrix.parameters["sst_values"])
def write_xst_matrix(self, current_group):
# requires a function call to transform the xst_blocks in to the right structure
current_group.create_dataset(name="xst_values", data=self.current_matrix.xst_values())
def write_bst_matrix(self, current_group):
raise NotImplementedError("BST values not implemented")
def process_packet(self, packet): def process_packet(self, packet):
logger.debug(f"Processing packet") logger.debug(f"Processing packet")
...@@ -170,19 +185,26 @@ class hdf5_writer: ...@@ -170,19 +185,26 @@ class hdf5_writer:
""" """
Configures the object for the correct statistics type to be used. Configures the object for the correct statistics type to be used.
decoder: the class to decode a single packet
collector: the class to collect statistics packets
store_function: the function to write the mode specific data to file
""" """
if self.mode == 'SST': if self.mode == self.SST_MODE:
self.decoder = SSTPacket self.decoder = SSTPacket
self.collector = statistics_collector.SSTCollector self.collector = statistics_collector.SSTCollector
elif self.mode == 'BST': self.store_function = self.write_sst_matrix
# self.decoder = XSTPacket
raise NotImplementedError("BST collector has not yet been implemented") elif self.mode == self.XST_MODE:
elif self.mode == 'XST': self.decoder = XSTPacket
# self.decoder = XSTPacket self.collector = statistics_collector.XSTCollector
self.store_function = self.write_xst_matrix
elif self.mode == self.BST_MODE:
self.store_function = self.write_bst_matrix
raise NotImplementedError("BST collector has not yet been implemented") raise NotImplementedError("BST collector has not yet been implemented")
else: else:
# make sure the mode is valid
raise ValueError("invalid statistics mode specified '{}', please use 'SST', 'XST' or 'BST' ".format(self.mode)) raise ValueError("invalid statistics mode specified '{}', please use 'SST', 'XST' or 'BST' ".format(self.mode))
def close_writer(self): def close_writer(self):
......
...@@ -11,7 +11,7 @@ logger = logging.getLogger("statistics_writer") ...@@ -11,7 +11,7 @@ logger = logging.getLogger("statistics_writer")
parser = argparse.ArgumentParser(description='Converts a stream of statistics packets into HDF5 files.') parser = argparse.ArgumentParser(description='Converts a stream of statistics packets into HDF5 files.')
parser.add_argument('--host', type=str, help='the host to connect to') parser.add_argument('--host', type=str, help='the host to connect to')
parser.add_argument('--port', type=int, default=5101, help='the port to connect to (default: %(default)s)') parser.add_argument('--port', type=int, default=0, help='the port to connect to, or 0 to use default port for the selected mode (default: %(default)s)')
parser.add_argument('--file', type=str, help='the file to read from') parser.add_argument('--file', type=str, help='the file to read from')
parser.add_argument('--mode', type=str, choices=['SST', 'XST', 'BST'], default='SST', help='sets the statistics type to be decoded options (default: %(default)s)') parser.add_argument('--mode', type=str, choices=['SST', 'XST', 'BST'], default='SST', help='sets the statistics type to be decoded options (default: %(default)s)')
...@@ -33,6 +33,10 @@ if __name__ == "__main__": ...@@ -33,6 +33,10 @@ if __name__ == "__main__":
mode = args.mode mode = args.mode
debug = args.debug debug = args.debug
if port == 0:
default_ports = { "SST": 5101, "XST": 5102, "BST": 5103 }
port = default_ports[mode]
if debug: if debug:
logger.setLevel(logging.DEBUG) logger.setLevel(logging.DEBUG)
logger.debug("Setting loglevel to DEBUG") logger.debug("Setting loglevel to DEBUG")
......
...@@ -19,80 +19,43 @@ class statistics_data: ...@@ -19,80 +19,43 @@ class statistics_data:
the datasets in them. the datasets in them.
""" """
NOF_PAYLOAD_ERRORS = "nof_payload_errors"
NOF_VALID_PAYLOADS = "nof_valid_payloads"
FIRST_PACKET_HEADER = "first_packet_header"
STATISTICS_VALUES = "statistics_values"
def __init__(self, file, statistics_name):
self.nof_valid_payloads = file.get(f"{statistics_name}/{statistics_data.NOF_VALID_PAYLOADS}")
self.nof_payload_errors = file.get(f"{statistics_name}/{statistics_data.NOF_PAYLOAD_ERRORS}")
self.first_packet_header = file.get(f"{statistics_name}/{statistics_data.FIRST_PACKET_HEADER}")
self.statistics_values = file.get(f"{statistics_name}/{statistics_data.STATISTICS_VALUES}")
class explorer: class explorer:
""" """
This class serves both as a tool to test and verify the content of HDF5 files as well as provide an example This class serves both as a tool to test and verify the content of HDF5 files as well as provide an example
of how you can go through HDF5 files. of how you can go through HDF5 files.
The first 2 functions, print_high_level and print_full both call the hdf5 file.visititems function. this function
takes another function as argument and then calls that function for each and every group and dataset in the file.
The last 2 functions do this without this file.visititems function and instead have knowledge of how we structure the
statistics data.
""" """
def __init__(self, filename): def __init__(self, filename):
self.file = h5py.File(filename, 'r') self.file = h5py.File(filename, 'r')
def print_high_level(self):
"""Calls a function that will go through all groups and datasets in the file and pass data along to another specified function"""
self.file.visititems(self._high_level_explorer)
def print_full(self):
"""Calls a function that will go through all groups and datasets in the file and pass data along to another specified function"""
self.file.visititems(self._full_explorer)
def _full_explorer(self, name, obj):
"""
Called by the file.visititems(func) function. Gets called for each and every group and dataset.
Prints all groups and datasets including their content.
"""
shift = name.count('/') * ' '
data = self.file.get(name)
logger.debug(f"{shift}{name}: {data}")
logger.debug(numpy.array(data))
def _high_level_explorer(self, name, obj):
"""
Called by the file.visititems(func) function. Gets called for each and every group and dataset.
Only lists the groups and datasets without the actual content.
"""
shift = name.count('/') * ' '
data = self.file.get(name)
logger.debug(f"{shift}{name}: {data}")
def print_all_statistics_full(self): def print_all_statistics_full(self):
""" """
Explores the file with knowledge of the file structure. assumes all top level groups are statistics Explores the file with knowledge of the file structure. assumes all top level groups are statistics
and that all statistics groups are made up of datasets. and that all statistics groups are made up of datasets.
Prints the groups, the datasets and the content of the datasets. Prints the groups, the datasets and the content of the datasets.
"""
# List all groups Can easily be modified to instead of just logging all the data, store it in whatever structure is needed.
logger.debug("Keys: %s" % self.file.keys()) """
for group_key in self.file.keys(): for group_key in self.file.keys():
dataset = list(self.file[group_key]) dataset = list(self.file[group_key])
#print group name
logger.debug(f" \n\ngroup: {group_key}")
# Go through all the datasets
for i in dataset: for i in dataset:
data = self.file.get(f"{group_key}/{i}") data = self.file.get(f"{group_key}/{i}")
logger.debug(group_key) logger.debug(f" dataset: {i}")
logger.debug(numpy.array(data)) logger.debug(f" Data: {numpy.array(data)}")
# go through all the attributes in the group (This is the header info)
attr_keys = self.file[group_key].attrs.keys()
for i in attr_keys:
attr = self.file[group_key].attrs[i]
logger.debug(f" {i}: {attr}")
def print_all_statistics_top_level(self): def print_all_statistics_top_level(self):
""" """
...@@ -108,7 +71,6 @@ class explorer: ...@@ -108,7 +71,6 @@ class explorer:
logger.debug(group_key) logger.debug(group_key)
# create a data dumper that creates a new file every 10s (for testing) # create a data dumper that creates a new file every 10s (for testing)
if __name__ == "__main__": if __name__ == "__main__":
args = parser.parse_args() args = parser.parse_args()
...@@ -122,6 +84,7 @@ if __name__ == "__main__": ...@@ -122,6 +84,7 @@ if __name__ == "__main__":
""" """
Print only the names of all the statistics in this file Print only the names of all the statistics in this file
""" """
logger.debug("--------------Top level groups--------------")
Explorer.print_all_statistics_top_level() Explorer.print_all_statistics_top_level()
......
# -*- coding: utf-8 -*-
#
# This file is part of the LOFAR 2.0 Station Software
#
#
#
# Distributed under the terms of the APACHE license.
# See LICENSE.txt for more info.
import abc
from unittest import mock
from tango import DevFailed
from tango import server
from tango.server import attribute
from tango.test_context import DeviceTestContext
from devices.abstract_device import AbstractDeviceMetas
from test import base
class TestAbstractDevice(base.TestCase):
class AbstractExample(object, metaclass=abc.ABCMeta):
"""A pure abc.ABCMeta metaclass with an abstract method
This is an abstract class that inherits object with the abc.ABCMeta as
metaclass
"""
@abc.abstractmethod
def example_method(self):
raise NotImplementedError
class TestHardwareDevice(server.Device, metaclass=AbstractDeviceMetas):
"""This is your overarching abstract class with a combined metaclass
Device is an object with DeviceMeta as metaclass
We use HardwareDeviceMetas as metaclass
Our metaclass contract is now fulfilled.
"""
@attribute(dtype=float)
def call_example_method(self):
return self.example_method()
@abc.abstractmethod
def example_method(self):
raise NotImplementedError
class ConcreteHardwareDevice(TestHardwareDevice):
def example_method(self):
return 12
def setUp(self):
super(TestAbstractDevice, self).setUp()
def test_instance_tango(self):
try:
with DeviceTestContext(self.TestHardwareDevice, process=True) as proxy:
# Calling this method raises the NotImplementedError exception
proxy.call_example_method()
except Exception as e:
self.assertIsInstance(e, DevFailed)
with DeviceTestContext(self.ConcreteHardwareDevice, process=True) as proxy:
self.assertEqual(12, proxy.call_example_method)
@mock.patch.object(server, 'get_worker')
@mock.patch.object(server, 'LatestDeviceImpl')
def test_instance_error(self, m_worker, m_implement):
# Creating this object should raise a type error but it does not
# combining metaclasses in this way does not have the desired result.
# This is a known limitation of this approach
m_device = self.TestHardwareDevice(mock.Mock(), mock.Mock())
# Raising the NotImplementedError works as expected, however.
self.assertRaises(NotImplementedError, m_device.example_method)
# Creating this object of a class that has a pure metaclass does raise
# the expected error.
self.assertRaises(TypeError, self.AbstractExample)
...@@ -91,7 +91,7 @@ ...@@ -91,7 +91,7 @@
"targets": [ "targets": [
{ {
"exemplar": true, "exemplar": true,
"expr": "device_attribute{device=\"lts/stationcontrol/1\",name=\"initialisation_progress_R\"}", "expr": "device_attribute{device=\"lts/boot/1\",name=\"initialisation_progress_R\"}",
"interval": "", "interval": "",
"legendFormat": "", "legendFormat": "",
"refId": "A" "refId": "A"
...@@ -345,7 +345,7 @@ ...@@ -345,7 +345,7 @@
"targets": [ "targets": [
{ {
"exemplar": true, "exemplar": true,
"expr": "device_attribute{device=\"lts/stationcontrol/1\",name=\"initialisation_status_R\"}", "expr": "device_attribute{device=\"lts/boot/1\",name=\"initialisation_status_R\"}",
"instant": true, "instant": true,
"interval": "", "interval": "",
"legendFormat": "", "legendFormat": "",
...@@ -365,7 +365,7 @@ ...@@ -365,7 +365,7 @@
"Time": true, "Time": true,
"Value": true, "Value": true,
"device": true, "device": true,
"device_attribute{device=\"lts/stationcontrol/1\", dim_x=\"1\", dim_y=\"0\", instance=\"tango-prometheus-exporter:8000\", job=\"tango\", label=\"initialisation_status_R\", name=\"initialisation_status_R\", str_value=\"Initialisation completed\", type=\"string\", x=\"0\", y=\"0\"}": true, "device_attribute{device=\"lts/boot/1\", dim_x=\"1\", dim_y=\"0\", instance=\"tango-prometheus-exporter:8000\", job=\"tango\", label=\"initialisation_status_R\", name=\"initialisation_status_R\", str_value=\"Initialisation completed\", type=\"string\", x=\"0\", y=\"0\"}": true,
"dim_x": true, "dim_x": true,
"dim_y": true, "dim_y": true,
"instance": true, "instance": true,
......
...@@ -966,7 +966,7 @@ enabled = true ...@@ -966,7 +966,7 @@ enabled = true
[feature_toggles] [feature_toggles]
# enable features, separated by spaces # enable features, separated by spaces
;enable = enable = ngalert
[date_formats] [date_formats]
# For information on what formatting patterns that are supported https://momentjs.com/docs/#/displaying/ # For information on what formatting patterns that are supported https://momentjs.com/docs/#/displaying/
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment