Skip to content
Snippets Groups Projects
Commit b44aa656 authored by Jan David Mol's avatar Jan David Mol
Browse files

Merge branch 'L2SS-461_replace_percentage-S_with_f_in_logging' into 'master'

Resolve L2SS-461 "Replace percentage s with f in logging"

Closes L2SS-461

See merge request !188
parents ec6354fb ff5f26a6
Branches
Tags
1 merge request!188Resolve L2SS-461 "Replace percentage s with f in logging"
Showing
with 76 additions and 130 deletions
...@@ -29,7 +29,7 @@ class attribute_wrapper(attribute): ...@@ -29,7 +29,7 @@ class attribute_wrapper(attribute):
# see also https://pytango.readthedocs.io/en/stable/server_api/server.html?highlight=devlong#module-tango.server for # see also https://pytango.readthedocs.io/en/stable/server_api/server.html?highlight=devlong#module-tango.server for
# more details about type conversion Python/numpy -> PyTango # more details about type conversion Python/numpy -> PyTango
if "numpy" not in str(datatype) and datatype != str: if "numpy" not in str(datatype) and datatype != str:
raise ValueError("Attribute needs to be a Tango-supported numpy or str type, but has type \"%s\"" % (datatype,)) raise ValueError(f"Attribute needs to be a Tango-supported numpy or str type, but has type {datatype}")
""" """
Numpy has a depracated string type called numpy.str_. Numpy has a depracated string type called numpy.str_.
...@@ -82,8 +82,7 @@ class attribute_wrapper(attribute): ...@@ -82,8 +82,7 @@ class attribute_wrapper(attribute):
try: try:
return device.value_dict[self] return device.value_dict[self]
except Exception as e: except Exception as e:
raise Exception("Attribute read_RW function error, attempted to read value_dict with key: `%s`, are you sure this exists?", raise Exception(f"Attribute read_RW function error, attempted to read value_dict with key: `{self}`, are you sure this exists?") from e
self) from e
@only_in_states([DevState.STANDBY, DevState.ON], log=False) @only_in_states([DevState.STANDBY, DevState.ON], log=False)
@fault_on_error() @fault_on_error()
......
...@@ -106,7 +106,7 @@ class OPCUAConnection(AsyncCommClient): ...@@ -106,7 +106,7 @@ class OPCUAConnection(AsyncCommClient):
# so cannot be used here. see https://reference.opcfoundation.org/v104/Core/docs/Part6/7.1.3/ # so cannot be used here. see https://reference.opcfoundation.org/v104/Core/docs/Part6/7.1.3/
_ = await self.client.get_namespace_array() _ = await self.client.get_namespace_array()
except Exception as e: except Exception as e:
raise IOError("Lost connection to server %s: %s", self._servername(), e) raise IOError(f"Lost connection to server {self._servername()}: {e}")
def get_node_path(self, annotation): def get_node_path(self, annotation):
""" """
...@@ -116,13 +116,13 @@ class OPCUAConnection(AsyncCommClient): ...@@ -116,13 +116,13 @@ class OPCUAConnection(AsyncCommClient):
if isinstance(annotation, dict): if isinstance(annotation, dict):
# check if required path inarg is present # check if required path inarg is present
if annotation.get('path') is None: if annotation.get('path') is None:
raise Exception("OPC-ua mapping requires a path argument in the annotation, was given: %s", annotation) raise Exception(f"OPC-ua mapping requires a path argument in the annotation, was given: {annotation}")
path = annotation.get("path") # required path = annotation.get("path") # required
elif isinstance(annotation, list): elif isinstance(annotation, list):
path = annotation path = annotation
else: else:
raise Exception("OPC-ua mapping requires either a list of the path or dict with the path. Was given %s type containing: %s", type(annotation), annotation) raise Exception(f"OPC-ua mapping requires either a list of the path or dict with the path. Was given {type(annotation)} type containing: {annotation}")
# add path prefix # add path prefix
...@@ -169,8 +169,8 @@ class OPCUAConnection(AsyncCommClient): ...@@ -169,8 +169,8 @@ class OPCUAConnection(AsyncCommClient):
try: try:
node = await self.get_node(path) node = await self.get_node(path)
except Exception as e: except Exception as e:
logger.exception("Could not get node: %s on server %s", path, self._servername()) logger.exception(f"Could not get node: {path} on server {self._servername()}")
raise Exception("Could not get node: %s on server %s", path, self._servername()) from e raise Exception(f"Could not get node: {path} on server {self._servername()}") from e
# get all the necessary data to set up the read/write functions from the attribute_wrapper # get all the necessary data to set up the read/write functions from the attribute_wrapper
dim_x = attribute.dim_x dim_x = attribute.dim_x
......
...@@ -109,7 +109,7 @@ class StatisticsClient(AsyncCommClient): ...@@ -109,7 +109,7 @@ class StatisticsClient(AsyncCommClient):
def read_function(): def read_function():
return numpy.uint64(self._queue_fill_percentage(self.tcp.queue)) return numpy.uint64(self._queue_fill_percentage(self.tcp.queue))
else: else:
raise ValueError("Unknown queue parameter requested: %s" % parameter) raise ValueError(f"Unknown queue parameter requested: {parameter}")
elif annotation["type"] == "replicator": elif annotation["type"] == "replicator":
if parameter == "clients": if parameter == "clients":
def read_function(): def read_function():
...@@ -124,7 +124,7 @@ class StatisticsClient(AsyncCommClient): ...@@ -124,7 +124,7 @@ class StatisticsClient(AsyncCommClient):
def read_function(): def read_function():
return numpy.uint64(self.tcp.nof_tasks_pending) return numpy.uint64(self.tcp.nof_tasks_pending)
else: else:
raise ValueError("Unknown replicator parameter requested: %s" % parameter) raise ValueError(f"Unknown replicator parameter requested: {parameter}")
def write_function(value): def write_function(value):
""" """
......
...@@ -346,7 +346,7 @@ class TCPReplicator(Thread, StatisticsClientThread): ...@@ -346,7 +346,7 @@ class TCPReplicator(Thread, StatisticsClientThread):
def clients(self): def clients(self):
""" Return the list of connected clients. """ """ Return the list of connected clients. """
return ["%s:%s" % client.transport.get_extra_info('peername') for client in self._connected_clients] return [f"{client.transport.get_extra_info('peername')}" for client in self._connected_clients]
@property @property
def nof_tasks_pending(self): def nof_tasks_pending(self):
......
...@@ -208,7 +208,7 @@ def log_exceptions(logger: logging.Logger=None): ...@@ -208,7 +208,7 @@ def log_exceptions(logger: logging.Logger=None):
try: try:
return func(self, *args, **kwargs) return func(self, *args, **kwargs)
except Exception as e: except Exception as e:
(logger or logging.getLogger()).exception("Unhandled exception: %s: %s", e.__class__.__name__, e) (logger or logging.getLogger()).exception(f"Unhandled exception: {e.__class__.__name__}: {e}")
# we can log but we cannot hide # we can log but we cannot hide
raise raise
......
...@@ -19,9 +19,9 @@ def only_in_states(allowed_states, log=True): ...@@ -19,9 +19,9 @@ def only_in_states(allowed_states, log=True):
return func(self, *args, **kwargs) return func(self, *args, **kwargs)
if log: if log:
logger.warning("Illegal command: Function %s can only be called in states %s. Current state: %s" % (func.__name__, allowed_states, self.get_state())) logger.warning(f"Illegal command: Function {func.__name__} can only be called in states {allowed_states}. Current state: {self.get_state()}")
Except.throw_exception("IllegalCommand", "Function can only be called in states %s. Current state: %s" % (allowed_states, self.get_state()), func.__name__) Except.throw_exception(f"IllegalCommand: Function {func.__name__} can only be called in states {allowed_states}. Current state: {self.get_state()}")
return state_check_wrapper return state_check_wrapper
......
...@@ -97,7 +97,8 @@ class SST(Statistics): ...@@ -97,7 +97,8 @@ class SST(Statistics):
nof_payload_errors_R = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "nof_payload_errors"}, dims=(SSTCollector.MAX_FPGAS,), datatype=numpy.uint64) nof_payload_errors_R = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "nof_payload_errors"}, dims=(SSTCollector.MAX_FPGAS,), datatype=numpy.uint64)
# latest SSTs # latest SSTs
sst_R = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "sst_values"}, dims=(SSTCollector.MAX_SUBBANDS, SSTCollector.MAX_INPUTS), datatype=numpy.uint64) sst_R = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "sst_values"}, dims=(SSTCollector.MAX_SUBBANDS, SSTCollector.MAX_INPUTS), datatype=numpy.uint64)
# reported timestamp for each row in the latest SSTs # reported timestamp
# for each row in the latest SSTs
sst_timestamp_R = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "sst_timestamps"}, dims=(SSTCollector.MAX_INPUTS,), datatype=numpy.uint64) sst_timestamp_R = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "sst_timestamps"}, dims=(SSTCollector.MAX_INPUTS,), datatype=numpy.uint64)
# integration interval for each row in the latest SSTs # integration interval for each row in the latest SSTs
integration_interval_R = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "integration_intervals"}, dims=(SSTCollector.MAX_INPUTS,), datatype=numpy.float32) integration_interval_R = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "integration_intervals"}, dims=(SSTCollector.MAX_INPUTS,), datatype=numpy.float32)
......
...@@ -158,11 +158,11 @@ class XSTCollector(StatisticsCollector): ...@@ -158,11 +158,11 @@ class XSTCollector(StatisticsCollector):
for antenna in (0,1): for antenna in (0,1):
if fields.first_baseline[antenna] + fields.nof_signal_inputs >= self.MAX_INPUTS: if fields.first_baseline[antenna] + fields.nof_signal_inputs >= self.MAX_INPUTS:
# packet describes an input that is out of bounds for us # packet describes an input that is out of bounds for us
raise ValueError("Packet describes {0} x {0} baselines starting at {1}, but we are limited to describing MAX_INPUTS={2}".format(fields.nof_signal_inputs, fields.first_baseline, self.MAX_INPUTS)) raise ValueError(f"Packet describes {fields.nof_signal_inputs} x {fields.nof_signal_inputs} baselines starting at {fields.first_baseline}, but we are limited to describing MAX_INPUTS={self.MAX_INPUTS}")
# the blocks of baselines need to be tightly packed, and thus be provided at exact intervals # the blocks of baselines need to be tightly packed, and thus be provided at exact intervals
if fields.first_baseline[antenna] % self.BLOCK_LENGTH != 0: if fields.first_baseline[antenna] % self.BLOCK_LENGTH != 0:
raise ValueError("Packet describes baselines starting at %s, but we require a multiple of BLOCK_LENGTH=%d" % (fields.first_baseline, self.MAX_INPUTS)) raise ValueError(f"Packet describes baselines starting at {fields.first_baseline}, but we require a multiple of BLOCK_LENGTH={self.MAX_INPUTS}")
# Make sure we always have a baseline (a,b) with a>=b. If not, we swap the indices and mark that the data must be conjugated and transposed when processed. # Make sure we always have a baseline (a,b) with a>=b. If not, we swap the indices and mark that the data must be conjugated and transposed when processed.
first_baseline = fields.first_baseline first_baseline = fields.first_baseline
......
...@@ -60,7 +60,7 @@ class SNMP_client(CommClient): ...@@ -60,7 +60,7 @@ class SNMP_client(CommClient):
""" """
Try to connect to the client Try to connect to the client
""" """
logger.debug("Connecting to community: %s, host: %s", self.community, self.host) logger.debug(f"Connecting to community: {self.community}, host: {self.host}")
self.connected = True self.connected = True
return True return True
......
...@@ -28,7 +28,7 @@ def timeit(method): ...@@ -28,7 +28,7 @@ def timeit(method):
sizeMb = process.memory_info().rss / 1024 / 1024 sizeMb = process.memory_info().rss / 1024 / 1024
sizeMbStr = "{0:,}".format(round(sizeMb, 2)) sizeMbStr = "{0:,}".format(round(sizeMb, 2))
logger.debug('Time taken = %s, %s ,size = %s MB' % (e - s, method.__name__, sizeMbStr)) logger.debug(f'Time taken = {e - s}, {method.__name__} ,size = {sizeMbStr} MB')
return RESULT return RESULT
return timed return timed
......
...@@ -20,7 +20,7 @@ class UDP_Server: ...@@ -20,7 +20,7 @@ class UDP_Server:
server_address = (self.ip, self.port) server_address = (self.ip, self.port)
s.bind(server_address) s.bind(server_address)
print("Do Ctrl+c to exit the program !!") print("Do Ctrl+c to exit the program !!")
print("\n\n####### Server is listening on %s - port %s #######" % (self.ip,self.port)) print(f"\n\n####### Server is listening on {self.ip} - port {self.port} #######")
while True: while True:
......
...@@ -7,8 +7,8 @@ UDP_IP = "127.0.0.1" ...@@ -7,8 +7,8 @@ UDP_IP = "127.0.0.1"
UDP_PORT = 5001 UDP_PORT = 5001
MESSAGE = "{}".format(i) MESSAGE = "{}".format(i)
print("UDP target IP: %s" % UDP_IP) print(f"UDP target IP: {UDP_IP}")
print("UDP target port: %s" % UDP_PORT) print(f"UDP target port: {UDP_PORT}")
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # create UDP socket sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # create UDP socket
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment