Skip to content
Snippets Groups Projects
Commit 4dbf743c authored by Hannes Feldt's avatar Hannes Feldt
Browse files

L2SS-1101: As an open source committee, I want the license mentioned in every source file

parent c6ba1821
No related branches found
No related tags found
1 merge request!501L2SS-1101: As an open source committee, I want the license mentioned in every source file
Showing
with 1039 additions and 762 deletions
# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
# SPDX-License-Identifier: Apache-2.0
from dataclasses import dataclass
@dataclass(frozen=True)
class CableType:
"""A cable used in LOFAR, with its properties."""
name: str
length: int
delay: float
......@@ -23,7 +28,9 @@ class CableType:
elif rcu_band_select == 2:
return self.loss[50]
else:
raise ValueError(f"Unsupported RCU band selection for LBA: {rcu_band_select}")
raise ValueError(
f"Unsupported RCU band selection for LBA: {rcu_band_select}"
)
elif antenna_type == "HBA":
if rcu_band_select == 1:
return self.loss[200]
......@@ -32,21 +39,58 @@ class CableType:
elif rcu_band_select == 4:
return self.loss[250]
else:
raise ValueError(f"Unsupported RCU band selection for HBA: {rcu_band_select}")
raise ValueError(
f"Unsupported RCU band selection for HBA: {rcu_band_select}"
)
raise ValueError(f"Unsupported antenna type: {antenna_type}")
# Global list of all known cable types.
#
# NB: The LOFAR1 equivalents of these tables are:
# - MAC/Deployment/data/StaticMetaData/CableDelays/
# - MAC/Deployment/data/StaticMetaData/CableAttenuation.conf
cable_types = {}
cable_types[ "0m"] = CableType(name= "0m", length= 0, delay=000.0000e-9, loss={50: 0.00, 150: 0.00, 200: 0.00, 250: 0.00})
cable_types[ "50m"] = CableType(name= "50m", length= 50, delay=199.2573e-9, loss={50: 2.05, 150: 3.64, 200: 4.24, 250: 4.46})
cable_types[ "80m"] = CableType(name= "80m", length= 80, delay=326.9640e-9, loss={50: 3.32, 150: 5.87, 200: 6.82, 250: 7.19})
cable_types[ "85m"] = CableType(name= "85m", length= 85, delay=342.5133e-9, loss={50: 3.53, 150: 6.22, 200: 7.21, 250: 7.58})
cable_types["115m"] = CableType(name="115m", length=115, delay=465.5254e-9, loss={50: 4.74, 150: 8.35, 200: 9.70, 250: 10.18})
cable_types["120m"] = CableType(name="120m", length=120, delay=493.8617e-9, loss={50: 4.85, 150: 8.55, 200: 9.92, 250: 10.42}) # used on CS030
cable_types["130m"] = CableType(name="130m", length=130, delay=530.6981e-9, loss={50: 5.40, 150: 9.52, 200: 11.06, 250: 11.61})
cable_types["0m"] = CableType(
name="0m",
length=0,
delay=000.0000e-9,
loss={50: 0.00, 150: 0.00, 200: 0.00, 250: 0.00},
)
cable_types["50m"] = CableType(
name="50m",
length=50,
delay=199.2573e-9,
loss={50: 2.05, 150: 3.64, 200: 4.24, 250: 4.46},
)
cable_types["80m"] = CableType(
name="80m",
length=80,
delay=326.9640e-9,
loss={50: 3.32, 150: 5.87, 200: 6.82, 250: 7.19},
)
cable_types["85m"] = CableType(
name="85m",
length=85,
delay=342.5133e-9,
loss={50: 3.53, 150: 6.22, 200: 7.21, 250: 7.58},
)
cable_types["115m"] = CableType(
name="115m",
length=115,
delay=465.5254e-9,
loss={50: 4.74, 150: 8.35, 200: 9.70, 250: 10.18},
)
cable_types["120m"] = CableType(
name="120m",
length=120,
delay=493.8617e-9,
loss={50: 4.85, 150: 8.55, 200: 9.92, 250: 10.42},
) # used on CS030
cable_types["130m"] = CableType(
name="130m",
length=130,
delay=530.6981e-9,
loss={50: 5.40, 150: 9.52, 200: 11.06, 250: 11.61},
)
# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
# SPDX-License-Identifier: Apache-2.0
import numpy
def delay_compensation(delays_seconds: numpy.ndarray, clock: int):
"""Return the delay compensation required to line up
signals that are delayed by "delays" seconds. The returned values
......@@ -37,10 +41,12 @@ def delay_compensation(delays_seconds: numpy.ndarray, clock: int):
return (input_delays_samples, input_delays_subsample_seconds)
def dB_to_factor(dB: numpy.ndarray) -> numpy.ndarray:
"""Convert values in decibel (dB) into their equivalent scaling factors."""
return 10 ** (dB / 10)
def loss_compensation(losses_dB: numpy.ndarray):
"""Return the attenuation required to line up
signals that are dampened by "lossed_dB" decibel.
......@@ -64,7 +70,9 @@ def loss_compensation(losses_dB: numpy.ndarray):
signal_attenuation_integer_dB = numpy.round(losses_dB).astype(numpy.uint32)
# correct for the coarse loss by dampening the signals to line up.
input_attenuation_integer_dB = max(signal_attenuation_integer_dB) - signal_attenuation_integer_dB
input_attenuation_integer_dB = (
max(signal_attenuation_integer_dB) - signal_attenuation_integer_dB
)
# compute the remainder, as a scaling factor
signal_loss_remainder_dB = losses_dB - signal_attenuation_integer_dB
......
# -*- coding: utf-8 -*-
#
# This file is part of the LOFAR 2.0 Station Software
#
#
#
# Distributed under the terms of the APACHE license.
# See LICENSE.txt for more info.
from tango import DeviceProxy, Database, DevFailed, DbDevInfo
# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
# SPDX-License-Identifier: Apache-2.0
from itertools import islice
import json
import logging
from itertools import islice
from tango import DeviceProxy, Database, DevFailed, DbDevInfo
logger = logging.getLogger()
class StationConfiguration:
DEVICE_PROPERTIES_QUERY = "SELECT device, property_device.name, property_device.value FROM property_device \
......@@ -34,12 +30,20 @@ class StationConfiguration:
ORDER BY server ASC"
# Servers that NEVER must be removed
DEFAULT_SKIP_SERVER_NAMES = ['configuration/stat', 'databaseds/2', 'tangorestserver/rest', 'tangotest/test', 'tangoaccesscontrol/1']
DEFAULT_SKIP_SERVER_NAMES = [
"configuration/stat",
"databaseds/2",
"tangorestserver/rest",
"tangotest/test",
"tangoaccesscontrol/1",
]
def __init__(self, db: Database, tangodb_timeout: int = 10000):
self.db = db # TangoDB
self.dbproxy = DeviceProxy(db.dev_name()) # TangoDB Proxy
self.dbproxy.set_timeout_millis(tangodb_timeout) # Set a security timeout (default is 3000ms)
self.dbproxy.set_timeout_millis(
tangodb_timeout
) # Set a security timeout (default is 3000ms)
#
# DUMPING TANGO DATABASE
......@@ -57,11 +61,15 @@ class StationConfiguration:
server_dict = {}
# Populate devices dictionary from query data
device_property_result = self._query_tangodb(self.dbproxy, self.DEVICE_PROPERTIES_QUERY, 3)
device_property_result = self._query_tangodb(
self.dbproxy, self.DEVICE_PROPERTIES_QUERY, 3
)
devices_dict = self.add_to_devices_dict(devices_dict, device_property_result)
# Populate devices dictionary from query data
attrs_property_result = self._query_tangodb(self.dbproxy, self.ATTRS_PROPERTIES_QUERY, 4)
attrs_property_result = self._query_tangodb(
self.dbproxy, self.ATTRS_PROPERTIES_QUERY, 4
)
devices_dict = self.add_to_attrs_dict(devices_dict, attrs_property_result)
# Populate server dictionary from query data and merge it with devices dict
......@@ -69,7 +77,9 @@ class StationConfiguration:
server_dict = self.add_to_server_dict(server_dict, devices_dict, server_result)
return {"servers": server_dict}
def _query_tangodb(self, dbproxy: DeviceProxy, sql_query: str, num_cols: int) -> list:
def _query_tangodb(
self, dbproxy: DeviceProxy, sql_query: str, num_cols: int
) -> list:
"""Query TangoDb with a built-in function and return data as tuples"""
_, raw_result = dbproxy.command_inout("DbMySqlSelect", sql_query)
return self.query_to_tuples(raw_result, num_cols)
......@@ -106,7 +116,9 @@ class StationConfiguration:
value_data.append(value)
return devices_dict
def add_to_server_dict(self, server_dict:dict, devices_dict:dict, result:list) -> dict:
def add_to_server_dict(
self, server_dict: dict, devices_dict: dict, result: list
) -> dict:
"""Populate the server dictionary and merge it with the devices dictionary.
At the end of the process, the dictionary will have the following structure :
'server_name' : { 'server_instance' : { 'server_class' :
......@@ -119,7 +131,7 @@ class StationConfiguration:
server = server.lower()
sclass = sclass.lower()
# model dictionary
sname, instance = server.split('/')
sname, instance = server.split("/")
device_data = devices_dict.get(device, {})
server_data = server_dict.setdefault(sname, {})
instance_data = server_data.setdefault(instance, {})
......@@ -154,8 +166,15 @@ class StationConfiguration:
# Select if update or loading configuration from scratch
if not update:
# Select the servers to be removed after having built a proper select query
server_select_query = self.build_select_server_query(self.DEFAULT_SKIP_SERVER_NAMES)
servers_to_be_removed = [ server for server, _, _ in self._query_tangodb(self.dbproxy, server_select_query, 3)]
server_select_query = self.build_select_server_query(
self.DEFAULT_SKIP_SERVER_NAMES
)
servers_to_be_removed = [
server
for server, _, _ in self._query_tangodb(
self.dbproxy, server_select_query, 3
)
]
for server in servers_to_be_removed:
# Remove devices
self.delete_server(server)
......@@ -193,20 +212,31 @@ class StationConfiguration:
'device_name': { 'properties' : { 'property_name': ['property_value'] } },
{ 'attribute_properties' : { 'attribute_name': {'property_name' : ['property_value'] } } } } }
"""
configuration_db = tangodb_dict['servers']
configuration_db = tangodb_dict["servers"]
for server_name in configuration_db:
instance_data = configuration_db.get(server_name, {})
for instance in instance_data:
# Excluding from update default_skip_servers as well
if f"{server_name}/{instance}".lower() not in self.DEFAULT_SKIP_SERVER_NAMES:
if (
f"{server_name}/{instance}".lower()
not in self.DEFAULT_SKIP_SERVER_NAMES
):
class_data = instance_data.get(instance, {})
for _class in class_data:
device_data = class_data.get(_class, {})
for device_name in device_data:
self._insert_server_into_db(server_name, instance, _class, device_data, device_name)
self._insert_server_into_db(
server_name, instance, _class, device_data, device_name
)
def _insert_server_into_db(self, server_name:str, instance:str, _class:str, device_data:str,
device_name:str):
def _insert_server_into_db(
self,
server_name: str,
instance: str,
_class: str,
device_data: str,
device_name: str,
):
"""Insert a new server with all its relative info into the Tango DB"""
device_info = DbDevInfo() # Built-in Tango object to interact with DB
# Set device name
......@@ -221,13 +251,13 @@ class StationConfiguration:
logger.info(f"Server {server_name}/{instance} has been inserted into DB")
# Add device properties
device_property_data = device_data.get(device_name, {})
if 'properties' in device_property_data:
property_data = device_property_data['properties']
if "properties" in device_property_data:
property_data = device_property_data["properties"]
# https://pytango.readthedocs.io/en/stable/database.html#tango.Database.put_device_property
self.db.put_device_property(device_name, property_data)
# Add attribute properties
if 'attribute_properties' in device_property_data:
attr_property_data = device_property_data['attribute_properties']
if "attribute_properties" in device_property_data:
attr_property_data = device_property_data["attribute_properties"]
# https://pytango.readthedocs.io/en/stable/database.html#tango.Database.put_device_attribute_property
self.db.put_device_attribute_property(device_name, attr_property_data)
......
from tangostationcontrol.common.baselines import nr_baselines
# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
# SPDX-License-Identifier: Apache-2.0
from tangostationcontrol.common.baselines import nr_baselines
# number of FPGA processing nodes
N_pn = 16
......@@ -48,7 +50,6 @@ N_subbands = 512
# Number of points per subband (the resolution)
N_subband_res = 1024
# main clock frequency's are 200MHz and 160MHz
CLK_200_MHZ = 200_000_000
CLK_160_MHZ = 160_000_000
......@@ -74,7 +75,6 @@ N_ddr = 2
# number of QSFP tranceivers per uniboard
N_qsfp = 24
# the three spatial dimensions XYZ used a lot for PQR and ITRF coordinates.
N_xyz = 3
# amount of parameters needed for a pointing
......@@ -82,7 +82,6 @@ N_point_prop = 3
# number of values for latitude/longitude coordinates
N_latlong = 2
# default subband we use because of its low RFI
DEFAULT_SUBBAND = 102
......
# -*- coding: utf-8 -*-
#
# Distributed under the terms of the APACHE license.
# See LICENSE.txt for more info.
# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
# SPDX-License-Identifier: Apache-2.0
import sys
from tango.server import run
from tangostationcontrol.common.lofar_logging import configure_logger
......
# -*- coding: utf-8 -*-
#
# Distributed under the terms of the APACHE license.
# See LICENSE.txt for more info.
# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
# SPDX-License-Identifier: Apache-2.0
import sys
from tango import DeviceProxy
def main(*args, **kwargs):
"""Main function health check"""
......
# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
# SPDX-License-Identifier: Apache-2.0
import logging
from functools import wraps
from tango.server import Device
import traceback
import socket
import time
import traceback
from functools import wraps
from tango.server import Device
from tangostationcontrol import __version__ as version
class TangoLoggingHandler(logging.Handler):
LEVEL_TO_DEVICE_STREAM = {
logging.DEBUG: Device.debug_stream,
......@@ -39,6 +44,7 @@ class TangoLoggingHandler(logging.Handler):
self.flush()
class LogSuppressErrorSpam(logging.Formatter):
"""
Suppress specific errors from spamming the logs, by only letting them through periodically.
......@@ -57,7 +63,10 @@ class LogSuppressErrorSpam(logging.Formatter):
def is_error_to_suppress(self, record):
# Errors occuring by not being able to connect to the log processing container, f.e. because it is down.
return record.name == "LogProcessingWorker" and record.msg == "An error occurred while sending events: %s"
return (
record.name == "LogProcessingWorker"
and record.msg == "An error occurred while sending events: %s"
)
def filter(self, record):
if self.is_error_to_suppress(record):
......@@ -71,6 +80,7 @@ class LogSuppressErrorSpam(logging.Formatter):
return True
class LogAnnotator(logging.Formatter):
"""Annotates log records with:
......@@ -102,6 +112,7 @@ class LogAnnotator(logging.Formatter):
# we just annotate, we don't filter
return True
def configure_logger(logger: logging.Logger = None, log_extra=None, debug=False):
"""
Configure the given logger (or root if None) to:
......@@ -140,7 +151,12 @@ def configure_logger(logger: logging.Logger=None, log_extra=None, debug=False):
# Always also log the hostname because it makes the origin of the log clear.
hostname = socket.gethostname()
formatter = logging.Formatter(fmt = '%(asctime)s.%(msecs)d %(levelname)s - %(tango_device)s: %(message)s [%(funcName)s in %(filename)s:%(lineno)d]'.format(hostname), datefmt = '%Y-%m-%dT%H:%M:%S')
formatter = logging.Formatter(
fmt="%(asctime)s.%(msecs)d %(levelname)s - %(tango_device)s: %(message)s [%(funcName)s in %(filename)s:%(lineno)d]".format(
hostname
),
datefmt="%Y-%m-%dT%H:%M:%S",
)
handler.setFormatter(formatter)
handler.addFilter(LogSuppressErrorSpam())
handler.addFilter(LogAnnotator())
......@@ -153,10 +169,15 @@ def configure_logger(logger: logging.Logger=None, log_extra=None, debug=False):
# Log to Logstash-Loki
try:
from logstash_async.handler import AsynchronousLogstashHandler, LogstashFormatter
from logstash_async.handler import (
AsynchronousLogstashHandler,
LogstashFormatter,
)
# log to the tcp_input of logstash in our logstash-loki container
handler = AsynchronousLogstashHandler("logstash", 5959, database_path='/tmp/lofar_pending_log_messages.db')
handler = AsynchronousLogstashHandler(
"logstash", 5959, database_path="/tmp/lofar_pending_log_messages.db"
)
# configure log messages
formatter = LogstashFormatter(extra=log_extra, tags=["python", "lofar"])
......@@ -167,7 +188,9 @@ def configure_logger(logger: logging.Logger=None, log_extra=None, debug=False):
# install the handler
logger.addHandler(handler)
except ImportError:
logger.exception("Cannot forward logs to Logstash-Loki: logstash_async module not found.")
logger.exception(
"Cannot forward logs to Logstash-Loki: logstash_async module not found."
)
except Exception:
logger.exception("Cannot forward logs to Logstash-Loki.")
......@@ -185,13 +208,16 @@ def configure_logger(logger: logging.Logger=None, log_extra=None, debug=False):
return logger
def device_logging_to_python():
"""Decorator. Call this on a Tango Device instance or class to have your Tango Device log to python instead."""
def inner(cls):
# we'll be doing very weird things if this class isnt
if not issubclass(cls, Device):
raise ValueError("device_logging_to_python decorator is to be used on Tango Device classes only.")
raise ValueError(
"device_logging_to_python decorator is to be used on Tango Device classes only."
)
# Monkey patch the python logger to replace the tango logger
logger = logging.getLogger()
......@@ -208,6 +234,7 @@ def device_logging_to_python():
return inner
def log_exceptions(logger: logging.Logger = None):
"""Decorator that logs all exceptions that the function raises."""
......@@ -217,7 +244,9 @@ def log_exceptions(logger: logging.Logger=None):
try:
return func(self, *args, **kwargs)
except Exception as e:
(logger or logging.getLogger()).exception(f"Unhandled exception: {e.__class__.__name__}: {e}")
(logger or logging.getLogger()).exception(
f"Unhandled exception: {e.__class__.__name__}: {e}"
)
# we can log but we cannot hide
raise
......
# -*- coding: utf-8 -*-
#
# Distributed under the terms of the APACHE license.
# See LICENSE.txt for more info.
# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
# SPDX-License-Identifier: Apache-2.0
""" Utility functions for managing the casacore 'measures' tables.
......@@ -31,13 +29,13 @@ needs to be restarted in order to clear the cache.
"""
import pathlib
import urllib.request
import tarfile
import datetime
import os
import sys
import pathlib
import shutil
import sys
import tarfile
import urllib.request
# Where to store the measures table sets
IERS_ROOTDIR = "/opt/IERS"
......@@ -48,11 +46,13 @@ DOWNLOAD_DIR = "/tmp"
# Where new measures can be downloaded
MEASURES_URL = "ftp://ftp.astron.nl/outgoing/Measures/WSRT_Measures.ztar"
def get_measures_directory():
"""Return the directory of the current measures table in use."""
return str(pathlib.Path(IERS_ROOTDIR, "current").resolve())
def use_measures_directory(newdir):
"""Select a new set of measures tables to use.
......@@ -69,7 +69,7 @@ def use_measures_directory(newdir):
raise ValueError(f"Target is not an available measures directory: {newdir}")
# be sure newdir must point to a directory containing measures
for subdir in ['ephemerides', 'geodetic']:
for subdir in ["ephemerides", "geodetic"]:
subdir = pathlib.Path(newdir, subdir)
if not subdir.is_dir():
......@@ -81,6 +81,7 @@ def use_measures_directory(newdir):
current_symlink.unlink()
current_symlink.symlink_to(newdir)
def restart_python():
"""Force a restart this python program.
......@@ -91,13 +92,18 @@ def restart_python():
# NOTE: Python 3.4+ closes all file descriptors > 2 automatically, see https://www.python.org/dev/peps/pep-0446/
os.execv(exe_path, [exe_path.name] + sys.argv)
def get_available_measures_directories() -> list:
"""Returns the set of installed measures tables."""
return [str(d) for d in pathlib.Path(IERS_ROOTDIR).glob("IERS-*") if d.is_dir() and not d.is_symlink()]
return [
str(d)
for d in pathlib.Path(IERS_ROOTDIR).glob("IERS-*")
if d.is_dir() and not d.is_symlink()
]
def download_measures() -> str:
""" Download new measures and return the directory in which they were installed.
"""
"""Download new measures and return the directory in which they were installed."""
# create target directory for new measures
now = datetime.datetime.now()
......
# -*- coding: utf-8 -*-
#
# This file is part of the LOFAR2.0 Station Control project.
#
# Distributed under the terms of the APACHE license.
# See LICENSE.txt for more info.
# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
# SPDX-License-Identifier: Apache-2.0
import logging
import time
from datetime import datetime
from tango import DevFailed, DevState, Except, Util, EventType, DeviceProxy
from tangostationcontrol.common.lofar_logging import log_exceptions
from tangostationcontrol.configuration import ObservationSettings
......@@ -25,6 +20,7 @@ class RunningObservation(object):
@property
def class_name(self) -> str:
from tangostationcontrol.devices.observation import Observation
return Observation.__name__
@property
......@@ -53,7 +49,10 @@ class RunningObservation(object):
self._tango_util.create_device(self.class_name, f"{self.device_name}")
except DevFailed as ex:
logger.exception(ex)
if ex.args[0].desc == f"The device {self.device_name.lower()} is already defined in the database":
if (
ex.args[0].desc
== f"The device {self.device_name.lower()} is already defined in the database"
):
# and self.is_observation_running(self.observation_id) is False:
self._tango_util.delete_device(self.class_name, self.device_name)
error_string = f"Cannot create the Observation device {self.device_name} because it is already present in the Database but it is not running. Try to re-run the start_observation command"
......@@ -85,13 +84,15 @@ class RunningObservation(object):
# Turn on the polling for the attribute.
# Note that this is not automatically done despite the attribute
# having the right polling values set in the ctor.
self._device_proxy.poll_attribute(self.attribute_name.split('/')[-1], 1000)
self._device_proxy.poll_attribute(self.attribute_name.split("/")[-1], 1000)
# Right. Now subscribe to periodic events.
self._event_id = self._device_proxy.subscribe_event(self.attribute_name.split('/')[-1],
EventType.PERIODIC_EVENT,
cb)
logger.info(f"Successfully started an observation with ID={self.observation_id}.")
self._event_id = self._device_proxy.subscribe_event(
self.attribute_name.split("/")[-1], EventType.PERIODIC_EVENT, cb
)
logger.info(
f"Successfully started an observation with ID={self.observation_id}."
)
def shutdown(self):
# Check if the device has not terminated itself in the meanwhile.
......@@ -99,7 +100,8 @@ class RunningObservation(object):
self._device_proxy.ping()
except DevFailed:
logger.warning(
f"The device for the Observation with ID={self.observation_id} has unexpectedly already disappeared. It is advised to check the logs up to 10s prior to this message to see what happened.")
f"The device for the Observation with ID={self.observation_id} has unexpectedly already disappeared. It is advised to check the logs up to 10s prior to this message to see what happened."
)
else:
# Unsubscribe from the subscribed event.
event_id = self._event_id
......@@ -124,17 +126,21 @@ class RunningObservation(object):
remaining_wait_time = remaining_wait_time - sleep_time
# Check if the observation object is really in OFF state.
if stopped:
logger.info(f"Successfully stopped the observation with ID={self.observation_id}")
logger.info(
f"Successfully stopped the observation with ID={self.observation_id}"
)
else:
logger.warning(
f"Could not shut down the Observation device ( {self.device_name} ) for observation ID={self.observation_id}. This means that there is a chance for a memory leak. Will continue anyway and forcefully delete the Observation object.")
f"Could not shut down the Observation device ( {self.device_name} ) for observation ID={self.observation_id}. This means that there is a chance for a memory leak. Will continue anyway and forcefully delete the Observation object."
)
# Finally remove the device object from the Tango DB.
try:
self._tango_util.delete_device(self.class_name, self.device_name)
except DevFailed:
logger.warning(
f"Something went wrong when the device {self.device_name} was removed from the Tango DB. There is nothing that can be done about this here at this moment but you should check the Tango DB yourself.")
f"Something went wrong when the device {self.device_name} was removed from the Tango DB. There is nothing that can be done about this here at this moment but you should check the Tango DB yourself."
)
class ObservationController(object):
......@@ -166,7 +172,8 @@ class ObservationController(object):
if event.err:
# Something is fishy with this event.
logger.warning(
f"The Observation device {event.device} sent an event but the event signals an error. It is advised to check the logs for any indication that something went wrong in that device. Event data={event}")
f"The Observation device {event.device} sent an event but the event signals an error. It is advised to check the logs for any indication that something went wrong in that device. Event data={event}"
)
return
# Get the Observation ID from the sending device.
......@@ -177,7 +184,8 @@ class ObservationController(object):
if not running_obs:
# No obs is running???
logger.warning(
f"Received an observation_running event for the observation with ID={obs_id}. According to the records in ObservationControl, this observation is not supposed to run. Please check previous logs, especially around the time an observation with this ID was started. Will continue and ignore this event.")
f"Received an observation_running event for the observation with ID={obs_id}. According to the records in ObservationControl, this observation is not supposed to run. Please check previous logs, especially around the time an observation with this ID was started. Will continue and ignore this event."
)
return
if obs_id in running_obs:
......@@ -197,13 +205,14 @@ class ObservationController(object):
else:
# The observation that we are trying to process is not part of the running_obs dictionary
logger.warning(
f"Received an observation_running event for the observation with ID={obs_id}. According to the records in ObservationControl, this observation is not supposed to run. Please check previous logs, especially around the time an observation with this ID was started. Will continue and ignore this event.")
f"Received an observation_running event for the observation with ID={obs_id}. According to the records in ObservationControl, this observation is not supposed to run. Please check previous logs, especially around the time an observation with this ID was started. Will continue and ignore this event."
)
return
def start_observation(self, settings: ObservationSettings):
# Check further properties that cannot be validated through a JSON schema
if settings.stop_time <= datetime.now():
error = f"Cannot start an observation with ID={settings.observation_id} because the parameter stop_time parameter value=\"{settings.stop_time}\" is invalid. Set a stop_time parameter later in time than the start time."
error = f'Cannot start an observation with ID={settings.observation_id} because the parameter stop_time parameter value="{settings.stop_time}" is invalid. Set a stop_time parameter later in time than the start time.'
Except.throw_exception("IllegalCommand", error, __name__)
obs = RunningObservation(self._tango_domain, settings)
......@@ -227,7 +236,9 @@ class ObservationController(object):
except DevFailed as ex:
self._tango_util.delete_device(obs.class_name, obs.device_name)
error_string = "Cannot access the Observation device instance for observation ID=%s with device class name=%s and device instance name=%s. This means that the observation cannot be controlled and/or forcefully be stopped."
logger.exception(error_string, obs.observation_id, obs.class_name, obs.device_name)
logger.exception(
error_string, obs.observation_id, obs.class_name, obs.device_name
)
Except.re_throw_exception(ex, "DevFailed", error_string, __name__)
def stop_observation(self, obs_id):
......
# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
# SPDX-License-Identifier: Apache-2.0
from tango import DevState
# The Device states in which we consider our device operational,
......
# -*- coding: utf-8 -*-
#
# Distributed under the terms of the APACHE license.
# See LICENSE.txt for more info.
# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
# SPDX-License-Identifier: Apache-2.0
from tango.utils import is_seq
......
# -*- coding: utf-8 -*-
#
# This file is part of the LOFAR2.0 Station Control project.
#
# Distributed under the terms of the APACHE license.
# See LICENSE.txt for more info.
# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
# SPDX-License-Identifier: Apache-2.0
from .observation_settings import ObservationSettings
from .pointing import Pointing
from .sap import Sap
__all__ = ['ObservationSettings', 'Pointing', 'Sap', ]
__all__ = [
"ObservationSettings",
"Pointing",
"Sap",
]
# -*- coding: utf-8 -*-
#
# This file is part of the LOFAR 2.0 Station Software
#
#
#
# Distributed under the terms of the APACHE license.
# See LICENSE.txt for more info.
# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
# SPDX-License-Identifier: Apache-2.0
from typing import Type
from jsonschema.exceptions import ValidationError
......@@ -13,6 +8,7 @@ from jsonschema.exceptions import ValidationError
def _from_json_hook_t(primary: Type):
from tangostationcontrol.configuration import Pointing, Sap, ObservationSettings
def actual_hook(json_dct):
primary_ex = None
for t in [Pointing, Sap, ObservationSettings]:
......
# -*- coding: utf-8 -*-
#
# This file is part of the LOFAR 2.0 Station Software
#
#
#
# Distributed under the terms of the APACHE license.
# See LICENSE.txt for more info.
# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
# SPDX-License-Identifier: Apache-2.0
import json
import re
......@@ -17,10 +11,9 @@ import jsonschema
import requests
from jsonschema import Draft7Validator, FormatChecker, ValidationError
from jsonschema.validators import RefResolver
from tangostationcontrol.configuration._json_parser import _from_json_hook_t
T = TypeVar('T')
T = TypeVar("T")
def _fetch_url(url):
......@@ -39,7 +32,6 @@ def _fetch_url(url):
class RetryHttpRefResolver(RefResolver):
def resolve_remote(self, uri):
result = _fetch_url(uri)
......@@ -52,8 +44,11 @@ def _is_object(_, instance):
return isinstance(instance, dict) or issubclass(type(instance), _ConfigurationBase)
jsonschema.validators.Draft7Validator.TYPE_CHECKER = Draft7Validator.TYPE_CHECKER.redefine(
"object", _is_object,
jsonschema.validators.Draft7Validator.TYPE_CHECKER = (
Draft7Validator.TYPE_CHECKER.redefine(
"object",
_is_object,
)
)
......@@ -62,15 +57,19 @@ class _ConfigurationBase(ABC):
@staticmethod
def _class_to_url(cls_name):
return re.sub(r'(?<!^)(?=[A-Z])', '-', cls_name).lower()
return re.sub(r"(?<!^)(?=[A-Z])", "-", cls_name).lower()
@classmethod
def get_validator(cls):
name = cls.__name__
url = f"{_ConfigurationBase.BASE_URL}{_ConfigurationBase._class_to_url(name)}.json"
resolver = RetryHttpRefResolver(base_uri=_ConfigurationBase.BASE_URL, referrer=url)
resolver = RetryHttpRefResolver(
base_uri=_ConfigurationBase.BASE_URL, referrer=url
)
_, resolved = resolver.resolve(url)
return Draft7Validator(resolved, format_checker=FormatChecker(), resolver=resolver)
return Draft7Validator(
resolved, format_checker=FormatChecker(), resolver=resolver
)
@abstractmethod
def __iter__(self):
......@@ -102,5 +101,7 @@ class _ConfigurationBase(ABC):
def from_json(cls: Type[T], data: str) -> T:
s = json.loads(data, object_hook=_from_json_hook_t(cls))
if not isinstance(s, cls):
raise ValidationError(f"Unexpected type: expected <{cls.__class__.__name__}>, got <{type(s).__name__}>")
raise ValidationError(
f"Unexpected type: expected <{cls.__class__.__name__}>, got <{type(s).__name__}>"
)
return s
# -*- coding: utf-8 -*-
#
# This file is part of the LOFAR2.0 Station Control project.
#
# Distributed under the terms of the APACHE license.
# See LICENSE.txt for more info.
# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
# SPDX-License-Identifier: Apache-2.0
from datetime import datetime
from typing import Sequence
......@@ -14,9 +10,16 @@ from tangostationcontrol.configuration.sap import Sap
class ObservationSettings(_ConfigurationBase):
def __init__(self, observation_id: int, stop_time: datetime, antenna_mask: Sequence[int], filter: str,
def __init__(
self,
observation_id: int,
stop_time: datetime,
antenna_mask: Sequence[int],
filter: str,
SAPs: Sequence[Sap],
tile_beam: Pointing = None, first_beamlet: int = 0):
tile_beam: Pointing = None,
first_beamlet: int = 0,
):
self.observation_id = observation_id
self.stop_time = stop_time
self.antenna_mask = antenna_mask
......@@ -31,16 +34,20 @@ class ObservationSettings(_ConfigurationBase):
"stop_time": self.stop_time.isoformat(),
"antenna_mask": self.antenna_mask,
"filter": self.filter,
"SAPs" : [dict(s) for s in self.SAPs]
"SAPs": [dict(s) for s in self.SAPs],
}.items()
if self.tile_beam:
yield "tile_beam", dict(self.tile_beam)
yield "first_beamlet", self.first_beamlet
@staticmethod
def to_object(json_dct) -> 'ObservationSettings':
return ObservationSettings(json_dct['observation_id'], datetime.fromisoformat(json_dct['stop_time']),
json_dct['antenna_mask'],
json_dct['filter'], json_dct['SAPs'],
json_dct['tile_beam'] if 'tile_beam' in json_dct else None,
json_dct['first_beamlet'] if 'first_beamlet' in json_dct else 0)
def to_object(json_dct) -> "ObservationSettings":
return ObservationSettings(
json_dct["observation_id"],
datetime.fromisoformat(json_dct["stop_time"]),
json_dct["antenna_mask"],
json_dct["filter"],
json_dct["SAPs"],
json_dct["tile_beam"] if "tile_beam" in json_dct else None,
json_dct["first_beamlet"] if "first_beamlet" in json_dct else 0,
)
# -*- coding: utf-8 -*-
#
# This file is part of the LOFAR2.0 Station Control project.
#
# Distributed under the terms of the APACHE license.
# See LICENSE.txt for more info.
# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
# SPDX-License-Identifier: Apache-2.0
from tangostationcontrol.configuration.configuration_base import _ConfigurationBase
......@@ -11,7 +7,12 @@ from tangostationcontrol.configuration.configuration_base import _ConfigurationB
class Pointing(_ConfigurationBase):
VALIDATOR = None
def __init__(self, angle1=0.6624317181687094, angle2=1.5579526427549426, direction_type="J2000"):
def __init__(
self,
angle1=0.6624317181687094,
angle2=1.5579526427549426,
direction_type="J2000",
):
self.angle1 = angle1
self.angle2 = angle2
self.direction_type = direction_type
......@@ -20,9 +21,11 @@ class Pointing(_ConfigurationBase):
yield from {
"angle1": self.angle1,
"angle2": self.angle2,
"direction_type": self.direction_type
"direction_type": self.direction_type,
}.items()
@staticmethod
def to_object(json_dct) -> 'Pointing':
return Pointing(json_dct['angle1'], json_dct['angle2'], json_dct['direction_type'])
def to_object(json_dct) -> "Pointing":
return Pointing(
json_dct["angle1"], json_dct["angle2"], json_dct["direction_type"]
)
# -*- coding: utf-8 -*-
#
# This file is part of the LOFAR2.0 Station Control project.
#
# Distributed under the terms of the APACHE license.
# See LICENSE.txt for more info.
# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
# SPDX-License-Identifier: Apache-2.0
from tangostationcontrol.configuration.configuration_base import _ConfigurationBase
from tangostationcontrol.configuration.pointing import Pointing
......@@ -15,11 +11,8 @@ class Sap(_ConfigurationBase):
self.pointing = pointing
def __iter__(self):
yield from {
"subbands": self.subbands,
"pointing": dict(self.pointing)
}.items()
yield from {"subbands": self.subbands, "pointing": dict(self.pointing)}.items()
@staticmethod
def to_object(json_dct) -> 'Sap':
return Sap(json_dct['subbands'], json_dct['pointing'])
def to_object(json_dct) -> "Sap":
return Sap(json_dct["subbands"], json_dct["pointing"])
# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
# SPDX-License-Identifier: Apache-2.0
# -*- coding: utf-8 -*-
#
# This file is part of the RECV project
#
#
#
# Distributed under the terms of the APACHE license.
# See LICENSE.txt for more info.
# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
# SPDX-License-Identifier: Apache-2.0
""" APSCT Device Server for LOFAR2.0
"""
import logging
import numpy
from tango import AttrWriteType
# PyTango imports
from tango import DebugIt
from tango.server import command, attribute, device_property
from tango import AttrWriteType
import numpy
# Additional import
from tangostationcontrol.clients.attribute_wrapper import AttributeWrapper
from tangostationcontrol.common.constants import DEFAULT_POLLING_PERIOD
from tangostationcontrol.common.entrypoint import entry
from tangostationcontrol.common.lofar_logging import device_logging_to_python
from tangostationcontrol.common.states import DEFAULT_COMMAND_STATES
from tangostationcontrol.common.constants import DEFAULT_POLLING_PERIOD
from tangostationcontrol.devices.device_decorators import only_in_states
from tangostationcontrol.devices.opcua_device import OPCUADevice
import logging
# Additional import
logger = logging.getLogger()
__all__ = ["APSCT", "main"]
......@@ -39,53 +35,103 @@ class APSCT(OPCUADevice):
# -----------------
APSCTTR_monitor_rate_RW_default = device_property(
dtype='DevLong64',
mandatory=False,
default_value=1
dtype="DevLong64", mandatory=False, default_value=1
)
# ----- Timing values
APSCT_On_Off_timeout = device_property(
doc='Maximum amount of time to wait after turning APSCT on or off',
dtype='DevFloat',
doc="Maximum amount of time to wait after turning APSCT on or off",
dtype="DevFloat",
mandatory=False,
default_value=10.0
default_value=10.0,
)
TRANSLATOR_DEFAULT_SETTINGS = [
'APSCTTR_monitor_rate_RW'
]
TRANSLATOR_DEFAULT_SETTINGS = ["APSCTTR_monitor_rate_RW"]
# ----------
# Attributes
# ----------
APSCTTR_I2C_error_R = AttributeWrapper(comms_annotation=["APSCTTR_I2C_error_R" ],datatype=numpy.int64 )
APSCTTR_monitor_rate_RW = AttributeWrapper(comms_annotation=["APSCTTR_monitor_rate_RW" ],datatype=numpy.int64 , access=AttrWriteType.READ_WRITE)
APSCTTR_translator_busy_R = AttributeWrapper(comms_annotation=["APSCTTR_translator_busy_R" ],datatype=bool)
APSCT_INPUT_10MHz_good_R = AttributeWrapper(comms_annotation=["APSCT_INPUT_10MHz_good_R" ],datatype=bool)
APSCT_INPUT_PPS_good_R = AttributeWrapper(comms_annotation=["APSCT_INPUT_PPS_good_R" ],datatype=bool)
APSCT_PCB_ID_R = AttributeWrapper(comms_annotation=["APSCT_PCB_ID_R" ],datatype=numpy.int64 )
APSCT_PCB_number_R = AttributeWrapper(comms_annotation=["APSCT_PCB_number_R" ],datatype=str )
APSCT_PCB_version_R = AttributeWrapper(comms_annotation=["APSCT_PCB_version_R" ],datatype=str )
APSCT_PLL_160MHz_error_R = AttributeWrapper(comms_annotation=["APSCT_PLL_160MHz_error_R" ],datatype=bool)
APSCT_PLL_160MHz_locked_R = AttributeWrapper(comms_annotation=["APSCT_PLL_160MHz_locked_R" ],datatype=bool)
APSCT_PLL_200MHz_error_R = AttributeWrapper(comms_annotation=["APSCT_PLL_200MHz_error_R" ],datatype=bool)
APSCT_PLL_200MHz_locked_R = AttributeWrapper(comms_annotation=["APSCT_PLL_200MHz_locked_R" ],datatype=bool)
APSCT_PPS_ignore_R = AttributeWrapper(comms_annotation=["APSCT_PPS_ignore_R" ],datatype=bool)
APSCT_PPS_ignore_RW = AttributeWrapper(comms_annotation=["APSCT_PPS_ignore_RW" ],datatype=bool, access=AttrWriteType.READ_WRITE)
APSCT_PWR_CLKDIST1_3V3_R = AttributeWrapper(comms_annotation=["APSCT_PWR_CLKDIST1_3V3_R" ],datatype=numpy.float64)
APSCT_PWR_CLKDIST2_3V3_R = AttributeWrapper(comms_annotation=["APSCT_PWR_CLKDIST2_3V3_R" ],datatype=numpy.float64)
APSCT_PWR_CTRL_3V3_R = AttributeWrapper(comms_annotation=["APSCT_PWR_CTRL_3V3_R" ],datatype=numpy.float64)
APSCT_PWR_INPUT_3V3_R = AttributeWrapper(comms_annotation=["APSCT_PWR_INPUT_3V3_R" ],datatype=numpy.float64)
APSCT_PWR_on_R = AttributeWrapper(comms_annotation=["APSCT_PWR_on_R" ],datatype=bool)
APSCT_PWR_PLL_160MHz_3V3_R = AttributeWrapper(comms_annotation=["APSCT_PWR_PLL_160MHz_3V3_R"],datatype=numpy.float64)
APSCT_PWR_PLL_160MHz_on_R = AttributeWrapper(comms_annotation=["APSCT_PWR_PLL_160MHz_on_R" ],datatype=bool)
APSCT_PWR_PLL_200MHz_3V3_R = AttributeWrapper(comms_annotation=["APSCT_PWR_PLL_200MHz_3V3_R"],datatype=numpy.float64)
APSCT_PWR_PLL_200MHz_on_R = AttributeWrapper(comms_annotation=["APSCT_PWR_PLL_200MHz_on_R" ],datatype=bool)
APSCT_PWR_PPSDIST_3V3_R = AttributeWrapper(comms_annotation=["APSCT_PWR_PPSDIST_3V3_R" ],datatype=numpy.float64)
APSCT_TEMP_R = AttributeWrapper(comms_annotation=["APSCT_TEMP_R" ],datatype=numpy.float64)
APSCTTR_I2C_error_R = AttributeWrapper(
comms_annotation=["APSCTTR_I2C_error_R"], datatype=numpy.int64
)
APSCTTR_monitor_rate_RW = AttributeWrapper(
comms_annotation=["APSCTTR_monitor_rate_RW"],
datatype=numpy.int64,
access=AttrWriteType.READ_WRITE,
)
APSCTTR_translator_busy_R = AttributeWrapper(
comms_annotation=["APSCTTR_translator_busy_R"], datatype=bool
)
APSCT_INPUT_10MHz_good_R = AttributeWrapper(
comms_annotation=["APSCT_INPUT_10MHz_good_R"], datatype=bool
)
APSCT_INPUT_PPS_good_R = AttributeWrapper(
comms_annotation=["APSCT_INPUT_PPS_good_R"], datatype=bool
)
APSCT_PCB_ID_R = AttributeWrapper(
comms_annotation=["APSCT_PCB_ID_R"], datatype=numpy.int64
)
APSCT_PCB_number_R = AttributeWrapper(
comms_annotation=["APSCT_PCB_number_R"], datatype=str
)
APSCT_PCB_version_R = AttributeWrapper(
comms_annotation=["APSCT_PCB_version_R"], datatype=str
)
APSCT_PLL_160MHz_error_R = AttributeWrapper(
comms_annotation=["APSCT_PLL_160MHz_error_R"], datatype=bool
)
APSCT_PLL_160MHz_locked_R = AttributeWrapper(
comms_annotation=["APSCT_PLL_160MHz_locked_R"], datatype=bool
)
APSCT_PLL_200MHz_error_R = AttributeWrapper(
comms_annotation=["APSCT_PLL_200MHz_error_R"], datatype=bool
)
APSCT_PLL_200MHz_locked_R = AttributeWrapper(
comms_annotation=["APSCT_PLL_200MHz_locked_R"], datatype=bool
)
APSCT_PPS_ignore_R = AttributeWrapper(
comms_annotation=["APSCT_PPS_ignore_R"], datatype=bool
)
APSCT_PPS_ignore_RW = AttributeWrapper(
comms_annotation=["APSCT_PPS_ignore_RW"],
datatype=bool,
access=AttrWriteType.READ_WRITE,
)
APSCT_PWR_CLKDIST1_3V3_R = AttributeWrapper(
comms_annotation=["APSCT_PWR_CLKDIST1_3V3_R"], datatype=numpy.float64
)
APSCT_PWR_CLKDIST2_3V3_R = AttributeWrapper(
comms_annotation=["APSCT_PWR_CLKDIST2_3V3_R"], datatype=numpy.float64
)
APSCT_PWR_CTRL_3V3_R = AttributeWrapper(
comms_annotation=["APSCT_PWR_CTRL_3V3_R"], datatype=numpy.float64
)
APSCT_PWR_INPUT_3V3_R = AttributeWrapper(
comms_annotation=["APSCT_PWR_INPUT_3V3_R"], datatype=numpy.float64
)
APSCT_PWR_on_R = AttributeWrapper(
comms_annotation=["APSCT_PWR_on_R"], datatype=bool
)
APSCT_PWR_PLL_160MHz_3V3_R = AttributeWrapper(
comms_annotation=["APSCT_PWR_PLL_160MHz_3V3_R"], datatype=numpy.float64
)
APSCT_PWR_PLL_160MHz_on_R = AttributeWrapper(
comms_annotation=["APSCT_PWR_PLL_160MHz_on_R"], datatype=bool
)
APSCT_PWR_PLL_200MHz_3V3_R = AttributeWrapper(
comms_annotation=["APSCT_PWR_PLL_200MHz_3V3_R"], datatype=numpy.float64
)
APSCT_PWR_PLL_200MHz_on_R = AttributeWrapper(
comms_annotation=["APSCT_PWR_PLL_200MHz_on_R"], datatype=bool
)
APSCT_PWR_PPSDIST_3V3_R = AttributeWrapper(
comms_annotation=["APSCT_PWR_PPSDIST_3V3_R"], datatype=numpy.float64
)
APSCT_TEMP_R = AttributeWrapper(
comms_annotation=["APSCT_TEMP_R"], datatype=numpy.float64
)
# ----------
# Summarising Attributes
......@@ -93,29 +139,46 @@ class APSCT(OPCUADevice):
APSCT_error_R = attribute(dtype=bool, fisallowed="is_attribute_access_allowed")
def read_APSCT_error_R(self):
errors = [self.read_attribute("APSCTTR_I2C_error_R") > 0,
errors = [
self.read_attribute("APSCTTR_I2C_error_R") > 0,
self.alarm_val("APSCT_PCB_ID_R"),
not self.read_attribute("APSCT_INPUT_10MHz_good_R"),
not self.read_attribute("APSCT_INPUT_PPS_good_R") and not self.read_attribute("APSCT_PPS_ignore_R"),
not self.read_attribute("APSCT_PLL_160MHz_locked_R") and not self.read_attribute("APSCT_PLL_200MHz_locked_R"),
self.read_attribute("APSCT_PLL_200MHz_locked_R") and self.read_attribute("APSCT_PLL_200MHz_error_R"),
self.read_attribute("APSCT_PLL_160MHz_locked_R") and self.read_attribute("APSCT_PLL_160MHz_error_R")]
not self.read_attribute("APSCT_INPUT_PPS_good_R")
and not self.read_attribute("APSCT_PPS_ignore_R"),
not self.read_attribute("APSCT_PLL_160MHz_locked_R")
and not self.read_attribute("APSCT_PLL_200MHz_locked_R"),
self.read_attribute("APSCT_PLL_200MHz_locked_R")
and self.read_attribute("APSCT_PLL_200MHz_error_R"),
self.read_attribute("APSCT_PLL_160MHz_locked_R")
and self.read_attribute("APSCT_PLL_160MHz_error_R"),
]
return any(errors)
APSCT_TEMP_error_R = attribute(dtype=bool, fisallowed="is_attribute_access_allowed", polling_period=DEFAULT_POLLING_PERIOD)
APSCT_TEMP_error_R = attribute(
dtype=bool,
fisallowed="is_attribute_access_allowed",
polling_period=DEFAULT_POLLING_PERIOD,
)
APSCT_VOUT_error_R = attribute(dtype=bool, fisallowed="is_attribute_access_allowed")
def read_APSCT_TEMP_error_R(self):
return (self.alarm_val("APSCT_TEMP_R"))
return self.alarm_val("APSCT_TEMP_R")
def read_APSCT_VOUT_error_R(self):
return ( self.alarm_val("APSCT_PWR_PPSDIST_3V3_R")
return (
self.alarm_val("APSCT_PWR_PPSDIST_3V3_R")
or self.alarm_val("APSCT_PWR_CLKDIST1_3V3_R")
or self.alarm_val("APSCT_PWR_CLKDIST2_3V3_R")
or self.alarm_val("APSCT_PWR_CTRL_3V3_R")
or self.alarm_val("APSCT_PWR_INPUT_3V3_R")
or (self.read_attribute("APSCT_PWR_PLL_160MHz_on_R") and self.alarm_val("APSCT_PWR_PLL_160MHz_3V3_R"))
or (self.read_attribute("APSCT_PWR_PLL_200MHz_on_R") and self.alarm_val("APSCT_PWR_PLL_200MHz_3V3_R"))
or (
self.read_attribute("APSCT_PWR_PLL_160MHz_on_R")
and self.alarm_val("APSCT_PWR_PLL_160MHz_3V3_R")
)
or (
self.read_attribute("APSCT_PWR_PLL_200MHz_on_R")
and self.alarm_val("APSCT_PWR_PLL_200MHz_3V3_R")
)
or (not self.read_attribute("APSCT_PWR_on_R"))
)
......@@ -128,22 +191,32 @@ class APSCT(OPCUADevice):
# Cycle clock
self.APSCT_off()
self.wait_attribute("APSCTTR_translator_busy_R", False, self.APSCT_On_Off_timeout)
self.wait_attribute(
"APSCTTR_translator_busy_R", False, self.APSCT_On_Off_timeout
)
self.APSCT_200MHz_on()
self.wait_attribute("APSCTTR_translator_busy_R", False, self.APSCT_On_Off_timeout)
self.wait_attribute(
"APSCTTR_translator_busy_R", False, self.APSCT_On_Off_timeout
)
if not self.read_attribute("APSCT_PLL_200MHz_locked_R"):
if self.read_attribute("APSCTTR_I2C_error_R"):
raise Exception("I2C is not working. Maybe power cycle subrack to restart CLK board and translator?")
raise Exception(
"I2C is not working. Maybe power cycle subrack to restart CLK board and translator?"
)
else:
raise Exception("200MHz signal is not locked. The subrack probably do not receive clock input or the CLK PCB is broken?")
raise Exception(
"200MHz signal is not locked. The subrack probably do not receive clock input or the CLK PCB is broken?"
)
def _disable_hardware(self):
"""Disable the APSCT hardware."""
# Turn off the APSCT
self.APSCT_off()
self.wait_attribute("APSCTTR_translator_busy_R", False, self.APSCT_On_Off_timeout)
self.wait_attribute(
"APSCTTR_translator_busy_R", False, self.APSCT_On_Off_timeout
)
# --------
# Commands
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment