diff --git a/CDB/LOFAR_ConfigDb.json b/CDB/LOFAR_ConfigDb.json index 07ec1d19cbc03ba3345083ab1743bbf5cb8b1a70..0415262942e43476b7c867a372c0476a6bb54fcb 100644 --- a/CDB/LOFAR_ConfigDb.json +++ b/CDB/LOFAR_ConfigDb.json @@ -427,55 +427,59 @@ } }, "RandomData": { - "CS998": { + "LTS": { "RandomData": { - "computation/random/1": { + "LTS/RandomData/1": { "properties": { "polled_attr": [ "rnd1", - "100", + "1000", "rnd2", - "100", + "1000", "rnd3", - "100", + "1000", "rnd4", - "100", + "1000", "rnd5", - "100", + "1000", "rnd6", - "100", + "1000", "rnd7", - "100", + "1000", "rnd8", - "100", + "1000", "rnd9", - "100", + "1000", "rnd10", - "100", + "1000", "rnd11", - "100", + "1000", "rnd12", - "100", + "1000", "rnd13", - "100", + "1000", "rnd14", - "100", + "1000", "rnd15", - "100", + "1000", "rnd16", - "100", + "1000", "rnd17", - "100", + "1000", "rnd18", - "100", + "1000", "rnd19", - "100", + "1000", "rnd20", - "100" + "1000", + "state", + "1000", + "status", + "1000" ] } }, - "computation/random/2": { + "LTS/RandomData/2": { "properties": { "polled_attr": [ "rnd1", diff --git a/bootstrap/etc/lofar20rc.sh b/bootstrap/etc/lofar20rc.sh index 5c78ee177941e2701363fd6a9dac381576c79ed3..e9e8ac326b32aa0130c98005e2f472457bb65f42 100755 --- a/bootstrap/etc/lofar20rc.sh +++ b/bootstrap/etc/lofar20rc.sh @@ -17,7 +17,7 @@ export LOFAR20_DIR=${1:-$(realpath ${ABSOLUTE_PATH}/../..)} # The current setting is for a production environment. export TANGO_LOFAR_LOCAL_DIR=${LOFAR20_DIR}/ -export TANGO_LOFAR_CONTAINER_DIR=${LOFAR20_DIR}/ +export TANGO_LOFAR_CONTAINER_DIR=/opt/lofar2.0/tango/ export TANGO_LOFAR_CONTAINER_MOUNT=${TANGO_LOFAR_LOCAL_DIR}:${TANGO_LOFAR_CONTAINER_DIR}:rw # This needs to be modified for a development environment. diff --git a/devices/RandomData.py b/devices/RandomData.py new file mode 100644 index 0000000000000000000000000000000000000000..86836b73ce9a5a154d33a8ac891a8517678b08f7 --- /dev/null +++ b/devices/RandomData.py @@ -0,0 +1,466 @@ +# -*- coding: utf-8 -*- +# +# This file is part of the LOFAR2.0 project +# +# +# +# Distributed under the terms of the APACHE license. +# See LICENSE.txt for more info. + +# PyTango imports +from tango import DevState +from tango.server import run, Device, attribute, command +from numpy import random + +__all__ = ["RandomData", "main"] + +class RandomData(Device): + """ + Random data monitor point device + """ + def read(self): + return random.random() + + # Attributes + rnd1 = attribute( + dtype = 'DevDouble', + polling_period = 1000, + period = 1000, + rel_change = 0.1, + abs_change = 0.1, + archive_period = 1000, + archive_rel_change = 0.1, + archive_abs_change = 0.1, + max_value = 1.0, + min_value = 0.0, + max_alarm = 1.0, + min_alarm = 0.99, + max_warning = 0.99, + min_warning = 0.98, + fget = read, + ) + + rnd2 = attribute( + dtype = 'DevDouble', + polling_period = 1000, + period = 1000, + rel_change = 0.1, + abs_change = 0.1, + archive_period = 1000, + archive_rel_change = 0.1, + archive_abs_change = 0.1, + max_value = 1.0, + min_value = 0.0, + max_alarm = 1.0, + min_alarm = 0.99, + max_warning = 0.99, + min_warning = 0.98, + fget = read, + ) + + rnd3 = attribute( + dtype = 'DevDouble', + polling_period = 1000, + period = 1000, + rel_change = 0.1, + abs_change = 0.1, + archive_period = 1000, + archive_rel_change = 0.1, + archive_abs_change = 0.1, + max_value = 1.0, + min_value = 0.0, + max_alarm = 1.0, + min_alarm = 0.99, + max_warning = 0.99, + min_warning = 0.98, + fget = read, + ) + + rnd4 = attribute( + dtype = 'DevDouble', + polling_period = 1000, + period = 1000, + rel_change = 0.1, + abs_change = 0.1, + archive_period = 1000, + archive_rel_change = 0.1, + archive_abs_change = 0.1, + max_value = 1.0, + min_value = 0.0, + max_alarm = 1.0, + min_alarm = 0.99, + max_warning = 0.99, + min_warning = 0.98, + fget = read, + ) + + rnd5 = attribute( + dtype = 'DevDouble', + polling_period = 1000, + period = 1000, + rel_change = 0.1, + abs_change = 0.1, + archive_period = 1000, + archive_rel_change = 0.1, + archive_abs_change = 0.1, + max_value = 1.0, + min_value = 0.0, + max_alarm = 1.0, + min_alarm = 0.99, + max_warning = 0.99, + min_warning = 0.98, + fget = read, + ) + + rnd6 = attribute( + dtype = 'DevDouble', + polling_period = 1000, + period = 1000, + rel_change = 0.1, + abs_change = 0.1, + archive_period = 1000, + archive_rel_change = 0.1, + archive_abs_change = 0.1, + max_value = 1.0, + min_value = 0.0, + max_alarm = 1.0, + min_alarm = 0.99, + max_warning = 0.99, + min_warning = 0.98, + fget = read, + ) + + rnd7 = attribute( + dtype = 'DevDouble', + polling_period = 1000, + period = 1000, + rel_change = 0.1, + abs_change = 0.1, + archive_period = 1000, + archive_rel_change = 0.1, + archive_abs_change = 0.1, + max_value = 1.0, + min_value = 0.0, + max_alarm = 1.0, + min_alarm = 0.99, + max_warning = 0.99, + min_warning = 0.98, + fget = read, + ) + + rnd8 = attribute( + dtype = 'DevDouble', + polling_period = 1000, + period = 1000, + rel_change = 0.1, + abs_change = 0.1, + archive_period = 1000, + archive_rel_change = 0.1, + archive_abs_change = 0.1, + max_value = 1.0, + min_value = 0.0, + max_alarm = 1.0, + min_alarm = 0.99, + max_warning = 0.99, + min_warning = 0.98, + fget = read, + ) + + rnd9 = attribute( + dtype = 'DevDouble', + polling_period = 1000, + period = 1000, + rel_change = 0.1, + abs_change = 0.1, + archive_period = 1000, + archive_rel_change = 0.1, + archive_abs_change = 0.1, + max_value = 1.0, + min_value = 0.0, + max_alarm = 1.0, + min_alarm = 0.99, + max_warning = 0.99, + min_warning = 0.98, + fget = read, + ) + + rnd10 = attribute( + dtype = 'DevDouble', + polling_period = 1000, + period = 1000, + rel_change = 0.1, + abs_change = 0.1, + archive_period = 1000, + archive_rel_change = 0.1, + archive_abs_change = 0.1, + max_value = 1.0, + min_value = 0.0, + max_alarm = 1.0, + min_alarm = 0.99, + max_warning = 0.99, + min_warning = 0.98, + fget = read, + ) + + rnd11 = attribute( + dtype = 'DevDouble', + polling_period = 1000, + period = 1000, + rel_change = 0.1, + abs_change = 0.1, + archive_period = 1000, + archive_rel_change = 0.1, + archive_abs_change = 0.1, + max_value = 1.0, + min_value = 0.0, + max_alarm = 1.0, + min_alarm = 0.99, + max_warning = 0.99, + min_warning = 0.98, + fget = read, + ) + + rnd12 = attribute( + dtype = 'DevDouble', + polling_period = 1000, + period = 1000, + rel_change = 0.1, + abs_change = 0.1, + archive_period = 1000, + archive_rel_change = 0.1, + archive_abs_change = 0.1, + max_value = 1.0, + min_value = 0.0, + max_alarm = 1.0, + min_alarm = 0.99, + max_warning = 0.99, + min_warning = 0.98, + fget = read, + ) + + rnd13 = attribute( + dtype = 'DevDouble', + polling_period = 1000, + period = 1000, + rel_change = 0.1, + abs_change = 0.1, + archive_period = 1000, + archive_rel_change = 0.1, + archive_abs_change = 0.1, + max_value = 1.0, + min_value = 0.0, + max_alarm = 1.0, + min_alarm = 0.99, + max_warning = 0.99, + min_warning = 0.98, + fget = read, + ) + + rnd14 = attribute( + dtype = 'DevDouble', + polling_period = 1000, + period = 1000, + rel_change = 0.1, + abs_change = 0.1, + archive_period = 1000, + archive_rel_change = 0.1, + archive_abs_change = 0.1, + max_value = 1.0, + min_value = 0.0, + max_alarm = 1.0, + min_alarm = 0.99, + max_warning = 0.99, + min_warning = 0.98, + fget = read, + ) + + rnd15 = attribute( + dtype = 'DevDouble', + polling_period = 1000, + period = 1000, + rel_change = 0.1, + abs_change = 0.1, + archive_period = 1000, + archive_rel_change = 0.1, + archive_abs_change = 0.1, + max_value = 1.0, + min_value = 0.0, + max_alarm = 1.0, + min_alarm = 0.99, + max_warning = 0.99, + min_warning = 0.98, + fget = read, + ) + + rnd16 = attribute( + dtype = 'DevDouble', + polling_period = 1000, + period = 1000, + rel_change = 0.1, + abs_change = 0.1, + archive_period = 1000, + archive_rel_change = 0.1, + archive_abs_change = 0.1, + max_value = 1.0, + min_value = 0.0, + max_alarm = 1.0, + min_alarm = 0.99, + max_warning = 0.99, + min_warning = 0.98, + fget = read, + ) + + rnd17 = attribute( + dtype = 'DevDouble', + polling_period = 1000, + period = 1000, + rel_change = 0.1, + abs_change = 0.1, + archive_period = 1000, + archive_rel_change = 0.1, + archive_abs_change = 0.1, + max_value = 1.0, + min_value = 0.0, + max_alarm = 1.0, + min_alarm = 0.99, + max_warning = 0.99, + min_warning = 0.98, + fget = read, + ) + + rnd18 = attribute( + dtype = 'DevDouble', + polling_period = 1000, + period = 1000, + rel_change = 0.1, + abs_change = 0.1, + archive_period = 1000, + archive_rel_change = 0.1, + archive_abs_change = 0.1, + max_value = 1.0, + min_value = 0.0, + max_alarm = 1.0, + min_alarm = 0.99, + max_warning = 0.99, + min_warning = 0.98, + fget = read, + ) + + rnd19 = attribute( + dtype = 'DevDouble', + polling_period = 1000, + period = 1000, + rel_change = 0.1, + abs_change = 0.1, + archive_period = 1000, + archive_rel_change = 0.1, + archive_abs_change = 0.1, + max_value = 1.0, + min_value = 0.0, + max_alarm = 1.0, + min_alarm = 0.99, + max_warning = 0.99, + min_warning = 0.98, + fget = read, + ) + + rnd20 = attribute( + dtype = 'DevDouble', + polling_period = 1000, + period = 1000, + rel_change = 0.1, + abs_change = 0.1, + archive_period = 1000, + archive_rel_change = 0.1, + archive_abs_change = 0.1, + max_value = 1.0, + min_value = 0.0, + max_alarm = 1.0, + min_alarm = 0.99, + max_warning = 0.99, + min_warning = 0.98, + fget = read, + ) + + # General methods + def init_device(self): + """ + Initialises the attributes and properties of the RandomData device. + """ + Device.init_device(self) + self.set_state(DevState.OFF) + + self.rnd1.set_data_ready_event(True) + self.set_change_event("rnd1", True, True) + self.set_archive_event("rnd1", True, True) + self.rnd2.set_data_ready_event(True) + self.set_change_event("rnd2", True, True) + self.set_archive_event("rnd2", True, True) + self.rnd3.set_data_ready_event(True) + self.set_change_event("rnd3", True, True) + self.set_archive_event("rnd3", True, True) + self.rnd4.set_data_ready_event(True) + self.set_change_event("rnd4", True, True) + self.set_archive_event("rnd4", True, True) + self.rnd5.set_data_ready_event(True) + self.set_change_event("rnd5", True, True) + self.set_archive_event("rnd5", True, True) + self.rnd6.set_data_ready_event(True) + self.set_change_event("rnd6", True, True) + self.set_archive_event("rnd6", True, True) + self.rnd7.set_data_ready_event(True) + self.set_change_event("rnd7", True, True) + self.set_archive_event("rnd7", True, True) + self.rnd8.set_data_ready_event(True) + self.set_change_event("rnd8", True, True) + self.set_archive_event("rnd8", True, True) + self.rnd9.set_data_ready_event(True) + self.set_change_event("rnd9", True, True) + self.set_archive_event("rnd9", True, True) + self.rnd10.set_data_ready_event(True) + self.set_change_event("rnd10", True, True) + self.set_archive_event("rnd10", True, True) + self.rnd11.set_data_ready_event(True) + self.set_change_event("rnd11", True, True) + self.set_archive_event("rnd11", True, True) + self.rnd12.set_data_ready_event(True) + self.set_change_event("rnd12", True, True) + self.set_archive_event("rnd12", True, True) + self.rnd13.set_data_ready_event(True) + self.set_change_event("rnd13", True, True) + self.set_archive_event("rnd13", True, True) + self.rnd14.set_data_ready_event(True) + self.set_change_event("rnd14", True, True) + self.set_archive_event("rnd14", True, True) + self.rnd15.set_data_ready_event(True) + self.set_change_event("rnd15", True, True) + self.set_archive_event("rnd15", True, True) + self.rnd16.set_data_ready_event(True) + self.set_change_event("rnd16", True, True) + self.set_archive_event("rnd16", True, True) + self.rnd17.set_data_ready_event(True) + self.set_change_event("rnd17", True, True) + self.set_archive_event("rnd17", True, True) + self.rnd18.set_data_ready_event(True) + self.set_change_event("rnd18", True, True) + self.set_archive_event("rnd18", True, True) + self.rnd19.set_data_ready_event(True) + self.set_change_event("rnd19", True, True) + self.set_archive_event("rnd19", True, True) + self.rnd20.set_data_ready_event(True) + self.set_change_event("rnd20", True, True) + self.set_archive_event("rnd20", True, True) + self.set_state(DevState.ON) + + def delete_device(self): + self.set_state(DevState.OFF) + + +def main(args = None, **kwargs): + """ + Main function of the RandomData module. + """ + return run((RandomData,), args = args, **kwargs) + +if __name__ == '__main__': + main() diff --git a/devices/SDP_statistics.py b/devices/SDP_statistics.py new file mode 100644 index 0000000000000000000000000000000000000000..a0492afd9d60ba56a5cb061db01032f10ed6510c --- /dev/null +++ b/devices/SDP_statistics.py @@ -0,0 +1,219 @@ +from struct import unpack, calcsize +from datetime import datetime, timezone +import numpy + +__all__ = ["StatisticsPacket"] + +def get_bit_value(value: bytes, first_bit: int, last_bit:int=None) -> int: + """ Return bits [first_bit:last_bit] from value, and return their integer value. Bit 0 = LSB. + + For example, extracting bits 2-3 from b'01100' returns 11 binary = 3 decimal: + get_bit_value(b'01100', 2, 3) == 3 + + If 'last_bit' is not given, just the value of bit 'first_bit' is returned. """ + + # default last_bit to first_bit + if last_bit is None: + last_bit = first_bit + + return value >> first_bit & ((1 << (last_bit - first_bit + 1)) - 1) + +class StatisticsPacket(object): + """ + Models a statistics UDP packet from SDP. + + Packets are expected to be UDP payload only (so no Ethernet/IP/UDP headers). + """ + + def __init__(self, packet: bytes): + self.packet = packet + + # Only parse valid packets + if self.marker not in 'SBX': + raise ValueError("Invalid SDP statistics packet: packet marker (first byte) is '{}', not one of 'SBX'.".format(self.marker)) + + @property + def marker(self) -> str: + """ Return the type of statistic: + + 'S' = SST + 'B' = BST + 'X' = XST + """ + + raw_marker = unpack("c",self.packet[0:1])[0] + + try: + return raw_marker.decode('ascii') + except UnicodeDecodeError: + # non-ascii (>127) character, return as binary + # + # this is typically not visible to the user, as these packets are not SDP statistics packets, + # which the constructor will refuse to accept. + return raw_marker + + @property + def version_id(self) -> int: + """ Return the version of this packet. """ + + return unpack("B",self.packet[1:2])[0] + + @property + def observation_id(self) -> int: + """ Return the ID of the observation running when this packet was generated. """ + + return unpack("<I",self.packet[2:6])[0] + + @property + def station_id(self) -> int: + """ Return the number of the station this packet was generated on. """ + + return unpack("<H",self.packet[6:8])[0] + + @property + def source_info(self) -> int: + """ Return a dict with the source_info flags. The dict contains the following fields: + + _raw: raw value of the source_info field in the packet, as an integer. + antenna_band_index: antenna type. 0 = low band, 1 = high band. + nyquist_zone_index: nyquist zone of filter: + 0 = 0 -- 1/2 * t_adc Hz (low band), + 1 = 1/2 * t_adc -- t_adc Hz (high band), + 2 = t_adc -- 3/2 * t_adc Hz (high band). + t_adc: sampling clock. 0 = 160 MHz, 1 = 200 MHz. + fsub_type: sampling method. 0 = critically sampled, 1 = oversampled. + payload_error: 0 = data is ok, 1 = data is corrupted (a fault was encountered). + beam_repositioning_flag: 0 = data is ok, 1 = beam got repositioned during packet construction (BST only). + subband_calibrated_flag: 1 = subband data had subband calibration values applied, 0 = not + reserved: reserved bits + gn_index: global index of FPGA that emitted this packet. """ + + bits = unpack("<H",self.packet[8:10])[0] + + return { + "_raw": bits, + "antenna_band_index": get_bit_value(bits, 15), + "nyquist_zone_index": get_bit_value(bits, 13, 14), + "t_adc": get_bit_value(bits, 12), + "fsub_type": get_bit_value(bits, 11), + "payload_error": get_bit_value(bits, 10), + "beam_repositioning_flag": get_bit_value(bits, 9), + "subband_calibrated_flag": get_bit_value(bits, 8), + "reserved": get_bit_value(bits, 5, 7), + "gn_index": get_bit_value(bits, 0, 4), + } + + @property + def reserved(self) -> bytes: + """ Reserved bytes. """ + + return self.packet[10:11] + + @property + def integration_interval_raw(self) -> int: + """ Returns the integration interval, in blocks. """ + + # This field is 3 bytes, little endian, so we need to append a 0 to parse it as a 32-bit integer. + return unpack("<I", self.packet[11:14] + b'0')[0] + + def integration_interval(self) -> float: + """ Returns the integration interval, in seconds. """ + + # Translate to seconds using the block period + return self.integration_interval_raw * self.block_period() + + @property + def data_id(self) -> int: + """ Returns the generic data identifier. """ + + return unpack("<I",self.packet[14:18])[0] + + @property + def nof_signal_inputs(self) -> int: + """ Number of inputs that were used for constructing the payload. """ + return unpack("<B",self.packet[18:19])[0] + + @property + def nof_bytes_per_statistic(self) -> int: + """ Word size for the payload. """ + + return unpack("<B",self.packet[19:20])[0] + + @property + def nof_statistics_per_packet(self) -> int: + """ Number of data points in the payload. """ + + return unpack("<H",self.packet[20:22])[0] + + @property + def block_period_raw(self) -> int: + """ Return the block period, in nanoseconds. """ + + return unpack("<H",self.packet[22:24])[0] + + def block_period(self) -> float: + """ Return the block period, in seconds. """ + + return self.block_period_raw / 1e9 + + @property + def block_serial_number(self) -> int: + """ Block index since epoch (1970). """ + + return unpack("<Q",self.packet[24:32])[0] + + def timestamp(self) -> datetime: + """ Returns the timestamp of the data in this packet. """ + + return datetime.fromtimestamp(self.block_serial_number * self.block_period(), timezone.utc) + + def header(self) -> dict: + """ Return all the header fields as a dict. """ + + return { + "marker": self.marker, + "version_id": self.version_id, + "observation_id": self.observation_id, + "station_id": self.station_id, + "source_info": self.source_info, + "reserved": self.reserved, + "integration_interval_raw": self.integration_interval_raw, + "integration_interval": self.integration_interval(), + "data_id": self.data_id, + "nof_signal_inputs": self.nof_signal_inputs, + "nof_bytes_per_statistic": self.nof_bytes_per_statistic, + "nof_statistics_per_packet": self.nof_statistics_per_packet, + "block_period_raw": self.block_period_raw, + "block_period": self.block_period(), + "block_serial_number": self.block_serial_number, + "timestamp": self.timestamp(), + } + + @property + def payload_sst(self) -> numpy.array: + """ The payload of this packet, interpreted as SST data. """ + + if self.marker != 'S': + raise Exception("Payload of SST requested of a non-SST packet. Actual packet marker is '{}', but must be 'S'.".format(self.marker)) + + # derive which and how many elements to read from the packet header + bytecount_to_unsigned_struct_type = { 1: 'B', 2: 'H', 4: 'I', 8: 'Q' } + format_str = "<{}{}".format(self.nof_statistics_per_packet, bytecount_to_unsigned_struct_type[self.nof_bytes_per_statistic]) + + return numpy.array(unpack(format_str, self.packet[32:32+calcsize(format_str)])) + + +if __name__ == "__main__": + # parse one packet from stdin + import sys + import pprint + + # read all of stdin, even though we only parse the first packet. we're too lazy to intelligently decide when + # the packet is complete and can stop reading. + data = sys.stdin.buffer.read() + packet = StatisticsPacket(data) + + # print header & payload + pprint.pprint(packet.header()) + pprint.pprint(packet.payload_sst) + diff --git a/devices/test/SDP_SST_statistics_packet.bin b/devices/test/SDP_SST_statistics_packet.bin new file mode 100644 index 0000000000000000000000000000000000000000..ade2d62c32eb6cbf4fb9b5ec2d7c0368ab0af408 Binary files /dev/null and b/devices/test/SDP_SST_statistics_packet.bin differ diff --git a/devices/util/lts_cold_start.py b/devices/util/lts_cold_start.py new file mode 100644 index 0000000000000000000000000000000000000000..18b2bbb01fdff1508a65beec5333a4572369000f --- /dev/null +++ b/devices/util/lts_cold_start.py @@ -0,0 +1,221 @@ +#! /usr/bin/env python3 +import logging +from time import sleep +from .startup import startup +from .lofar2_config import configure_logging + + +def start_device(device: str): + ''' + Start a Tango device with the help of the startup function. + The device will not be forced to got through + OFF/INIT/STANDBY/ON but it is assumed that the device is in OFF + state. If the device is not in OFF state, then an exception + will be raised. + ''' + dev = startup(device = device, force_restart = False) + state = device.state() + if state is not tango._tango.DevState.ON: + raise Exception("Device \"{}\" is unexpectedly in \"{}\" state but it is expected to be in \"{}\" state. Please check the reason for the unexpected device state. Aborting the start-up procedure.".format(device, state, tango._tango.DevState.ON)) + return device + + +def lts_cold_start(): + ''' + What is this? + This is the LTS (LOFAR Test - and I forgot what S stands for) cold start + procedure cast into source code. The procedure can be found there: + https://support.astron.nl/confluence/display/L2M/LTS+startup+procedure + + Paulus wrote already a script that - illegally ;) - makes direct use of the + OPC-UA servers to accomplish the same thing that we are doing here. + Paulus' script can be found there: + https://git.astron.nl/lofar2.0/pypcc/-/blob/master/scripts/Startup.py + Thanks, Paulus! You made it very easy for me to cobble together this + script. + + For obvious reasons is our script much better though. :) + First, it is bigger. And bigger is always better. + Then it is better documented but that does not count in the HW world. + But it also raises exceptions with error messages that make an attempt to + help the user reading them and shuts down the respective Tango device(s) if + something goes south. + And that is where we try to do it really right: there is no reason to be + excessively verbatim when things work like they are expected to work. But + tell the user when something goes wrong, give an indication of what could + have gone wrong and where to look for the problem. + + Again, Paulus' script contains already very good indications where problems + might lie and made my job very easy. + + No parameters, parameters are for wimps. :) + ''' + # Define the LOFAR2.0 specific log format + configure_logging() + + # Get a reference to the PCC device, do not + # force a restart of the already running Tango + # device. + pcc = startup("LTS/PCC/1") + + # Getting CLK, RCU & RCU ADCs into proper shape for use by real people. + # + # The start-up needs to happen in this sequence due to HW dependencies + # that can introduce issues which are then becoming very complicated to + # handle in SW. Therefore to keep it as simple as possible, let's stick + # to the rule recommended by Paulus: + # 1 CLK + # 2 RCU + # 3 RCU ADCs + # + # + # First take the CLK board through the motions. + # 1.1 Switch off CLK + # 1.2 Wait for CLK_translator_busy_R == True, throw an exception in timeout + # 1.3 Switch on CLK + # 1.4 Wait for CLK_translator_busy_R == True, throw an exception in timeout + # 1.5 Check if CLK_PLL_locked_R == True + # 1.6 Done + # + # + # Steps 1.1 & 1.2 + pcc.CLK_off() + # 2021-04-30, Thomas + # This should be refactored into a function. + timeout = 10.0 + while pcc.CLK_translator_busy_R is True: + logging.debug("Waiting on \"CLK_translator_busy_R\" to become \"True\"...") + timeout = timeout - 1.0 + if timeout < 1.0: + # Switching the PCC clock off should never take longer than + # 10 seconds. Here we ran into a timeout. + # Clean up and raise an exception. + pcc.off() + raise Exception("After calling \"CLK_off\" a timeout occured while waiting for \"CLK_translator_busy_R\" to become \"True\". Please investigate the reason why the PCC translator never set \"CLK_translator_busy_R\" to \"True\". Aborting start-up procedure.") + sleep(1.0) + + # Steps 1.3 & 1.4 + pcc.CLK_on() + # Per Paulus this should never take longer than 2 seconds. + # 2021-04-30, Thomas + # This should be refactored into a function. + timeout = 2.0 + while pcc.CLK_translator_busy_R is True: + logging.debug("After calling \"CLK_on()\" Waiting on \"CLK_translator_busy_R\" to become \"True\"...") + timeout = timeout - 1.0 + if timeout < 1.0: + # Switching the PCC clock on should never take longer than + # a couple of seconds. Here we ran into a timeout. + # Clean up and raise an exception. + pcc.off() + raise Exception("After calling \"CLK_on\" a timeout occured while waiting for \"CLK_translator_busy_R\" to become \"True\". Please investigate the reason why the PCC translator never set \"CLK_translator_busy_R\" to \"True\". Aborting start-up procedure.") + sleep(1.0) + + # 1.5 Check if CLK_PLL_locked_R == True + # 2021-04-30, Thomas + # This should be refactored into a function. + clk_locked = pcc.CLK_PLL_locked_R + if clk_locked is True: + logging.info("CLK signal is locked.") + else: + # CLK signal is not locked + clk_i2c_status = pcc.CLK_I2C_STATUS_R + exception_text = "CLK I2C is not working. Please investigate! Maybe power cycle subrack to restart CLK board and translator. Aborting start-up procedure." + if i2c_status <= 0: + exception_text = "CLK signal is not locked. Please investigate! The subrack probably do not receive clock input or the CLK PCB is broken. Aborting start-up procedure." + pcc.off() + raise Exception(exception_text) + # Step 1.6 + # Done. + + # 2 RCUs + # If we reach this point in the start-up procedure, then the CLK board setup + # is done. We can proceed with the RCUs. + # + # Now take the RCUs through the motions. + # 2.1 Set RCU mask to all available RCUs + # 2.2 Switch off all RCUs + # 2.3 Wait for RCU_translator_busy_R = True, throw an exception in timeout + # 2.4 Switch on RCUs + # 2.5 Wait for RCU_translator_busy_R = True, throw an exception in timeout + # 2.6 Done + # + # + # Step 2.1 + # We have only 8 RCUs in LTS. + pcc.RCU_mask_RW = [True, ] * 8 + # Steps 2.2 & 2.3 + pcc.RCU_off() + # 2021-04-30, Thomas + # This should be refactored into a function. + timeout = 10.0 + while pcc.RCU_translator_busy_R is True: + logging.debug("Waiting on \"RCU_translator_busy_R\" to become \"True\"...") + timeout = timeout - 1.0 + if timeout < 1.0: + # Switching the RCUs off should never take longer than + # 10 seconds. Here we ran into a timeout. + # Clean up and raise an exception. + pcc.off() + raise Exception("After calling \"RCU_off\" a timeout occured while waiting for \"RCU_translator_busy_R\" to become \"True\". Please investigate the reason why the PCC translator never set \"RCU_translator_busy_R\" to \"True\". Aborting start-up procedure.") + sleep(1.0) + + # Steps 2.4 & 2.5 + # We leave the RCU mask as it is because it got already set for the + # RCU_off() call. + pcc.RCU_on() + # Per Paulus this should never take longer than 5 seconds. + # 2021-04-30, Thomas + # This should be refactored into a function. + timeout = 5.0 + while pcc.RCU_translator_busy_R is True: + logging.debug("After calling \"RCU_on()\" Waiting on \"RCU_translator_busy_R\" to become \"True\"...") + timeout = timeout - 1.0 + if timeout < 1.0: + # Switching the RCUs on should never take longer than + # a couple of seconds. Here we ran into a timeout. + # Clean up and raise an exception. + pcc.off() + raise Exception("After calling \"RCU_on\" a timeout occured while waiting for \"RCU_translator_busy_R\" to become \"True\". Please investigate the reason why the PCC translator never set \"RCU_translator_busy_R\" to \"True\". Aborting start-up procedure.") + sleep(1.0) + # Step 2.6 + # Done. + + # 3 ADCs + # If we get here, we only got to check if the ADCs are locked, too. + # 3.1 Check RCUs' I2C status + # 3.2 Check RCU_ADC_lock_R == [True, ] for RCUs that have a good I2C status + # 3.3 Done + # + # + # Steps 3.1 & 3.2 + rcu_mask = pcc.RCU_mask_RW + adc_locked = numpy.array(pcc.RCU_ADC_lock_R) + for rcu, i2c_status in enumerate(pcc.RCU_I2C_STATUS_R): + if i2c_status == 0: + rcu_mask[rcu] = True + logging.info("RCU #{} is available.".format(rcu)) + for adc, adc_is_locked in enumerate(adc_locked[rcu]): + if adc_is_locked < 1: + logging.warning("RCU#{}, ADC#{} is unlocked. Please investigate! Will continue with normal operation.".format(rcu, adc)) + else: + # The RCU's I2C bus is not working. + rcu_mask[rcu] = False + logging.error("RCU #{}'s I2C is not working. Please investigate! Disabling RCU #{} to avoid damage.".format(rcu, rcu)) + pcc.RCU_mask_RW = rcu_mask + # Step 3.3 + # Done + + # Start-up APSCTL, i.e. Uniboard2s. + aps = startup("APSCTL/SDP/1") + logging.warning("Cannot start-up APSCTL because it requires manual actions.") + + # Start up SDP, i.e. configure the firmware in the Unibards + sdp = startup("LTS/SDP/1") + logging.warning("Cannot start-up SDP because it requires manual actions.") + + logging.info("LTS has been successfully started and configured.") + + +if __name__ == '__main__': + lts_cold_start()