Skip to content
Snippets Groups Projects
Commit cbeaacef authored by Thomas Juerges's avatar Thomas Juerges
Browse files

Merge branch 'master' into...

Merge branch 'master' into 2021-04-02T17.33.12-branched_from_master-Adjustments_for_attribute_wrapper
parents c48194b0 f215048e
No related branches found
No related tags found
1 merge request!12Adjustments and fixes for attribute_wrapper merge
Showing
with 205 additions and 8 deletions
......@@ -26,9 +26,11 @@ import numpy
from wrappers import only_in_states, only_when_on, fault_on_error
from opcua_connection import OPCUAConnection
from lofar_logging import device_logging_to_python, log_exceptions
__all__ = ["SDP", "main"]
@device_logging_to_python({"device": "SDP"})
class SDP(Device):
"""
......@@ -192,6 +194,8 @@ class SDP(Device):
self.info_stream("Mapping OPC-UA MP/CP to attributes done.")
@log_exceptions
@DebugIt()
def init_device(self):
""" Instantiates the device in the OFF state. """
......@@ -200,6 +204,7 @@ class SDP(Device):
self.set_state(DevState.OFF)
@log_exceptions
def initialise(self):
"""Initialises the attributes and properties of the SDP."""
......@@ -475,7 +480,6 @@ class SDP(Device):
"""
self.set_state(DevState.FAULT)
# ----------
# Run server
# ----------
......
......@@ -17,6 +17,7 @@ from tango import DevState, DebugIt
# Additional import
from src.attribute_wrapper import *
from src.lofar_logging import device_logging_to_python, log_exceptions
__all__ = ["hardware_device"]
......@@ -24,6 +25,7 @@ __all__ = ["hardware_device"]
from src.wrappers import only_in_states
@device_logging_to_python({"device": "SDP"})
class hardware_device(Device):
"""
......@@ -59,6 +61,7 @@ class hardware_device(Device):
self.value_dict = {i: i.initial_value() for i in self.attr_list()}
@log_exceptions()
def init_device(self):
""" Instantiates the device in the OFF state. """
......@@ -162,6 +165,7 @@ class hardware_device(Device):
"""Method always executed before any TANGO command is executed."""
pass
@log_exceptions()
def delete_device(self):
"""Hook to delete resources allocated in init_device.
......
import logging
from functools import wraps
# Always also log the hostname because it makes the origin of the log clear.
import socket
hostname = socket.gethostname()
def configure_logger(logger: logging.Logger, log_extra=None):
logger.setLevel(logging.DEBUG)
try:
from logstash_async.handler import AsynchronousLogstashHandler, LogstashFormatter
# log to the tcp_input of logstash in our ELK stack
handler = AsynchronousLogstashHandler("elk", 5959, database_path='pending_log_messages.db')
# configure log messages
formatter = LogstashFormatter(extra=log_extra, tags=["python", "lofar"])
handler.setFormatter(formatter)
# install the handler
logger.addHandler(handler)
# for now, also log to stderr
# Set up logging in a way that it can be understood by a human reader, be
# easily grep'ed, be parsed with a couple of shell commands and
# easily fed into an Kibana/Elastic search system.
handler = logging.StreamHandler()
formatter = logging.Formatter(fmt = '%(asctime)s.%(msecs)d %(levelname)s - HOST="{}" PID="%(process)d" TNAME="%(threadName)s" TID="%(thread)d" FILE="%(pathname)s" LINE="%(lineno)d" FUNC="%(funcName)s" MSG="%(message)s"'.format(hostname), datefmt = '%Y-%m-%dT%H:%M:%S')
handler.setFormatter(formatter)
logger.addHandler(handler)
except Exception:
logger.exception("Cannot import or configure logstash_async module, not forwarding logs to ELK stack.")
return logger
def device_logging_to_python(log_extra: dict = None):
""" Call this on a Tango Device instance or class to have your Tango Device log to python instead. """
def inner(cls):
# Create a logger that logs to ELK, dedicated for this class
logger = logging.getLogger(cls.__name__)
configure_logger(logger, log_extra)
# Monkey patch the python logger to replace the tango logger
cls.debug_stream = logger.debug
cls.info_stream = logger.info
cls.warn_stream = logger.warning
cls.warning_stream = logger.warning
cls.error_stream = logger.error
cls.fatal_stream = logger.fatal
cls.critical_stream = logger.critical
return cls
return inner
def log_exceptions():
""" Decorator that logs all exceptions that the function raises. """
def wrapper(func):
@wraps(func)
def inner(self, *args, **kwargs):
try:
return func(self, *args, **kwargs)
except Exception as e:
self.error_stream("Caught exception: %s: %s", e.__class__.__name__, e, exc_info=1)
raise e
return inner
return wrapper
......@@ -82,7 +82,7 @@ endif
DOCKER_COMPOSE_ARGS := DISPLAY=$(DISPLAY) XAUTHORITY=$(XAUTHORITY) TANGO_HOST=$(TANGO_HOST) \
NETWORK_MODE=$(NETWORK_MODE) XAUTHORITY_MOUNT=$(XAUTHORITY_MOUNT) TANGO_SKA_CONTAINER_MOUNT=$(TANGO_SKA_CONTAINER_MOUNT) TANGO_LOFAR_CONTAINER_MOUNT=$(TANGO_LOFAR_CONTAINER_MOUNT) TANGO_LOFAR_CONTAINER_DIR=${TANGO_LOFAR_CONTAINER_DIR} MYSQL_HOST=$(MYSQL_HOST) \
CONTAINER_NAME_PREFIX=$(CONTAINER_NAME_PREFIX) COMPOSE_IGNORE_ORPHANS=true
CONTAINER_NAME_PREFIX=$(CONTAINER_NAME_PREFIX) COMPOSE_IGNORE_ORPHANS=true CONTAINER_EXECUTION_UID=$(shell id -u)
.PHONY: up down minimal start stop status clean pull help
......
......@@ -67,4 +67,5 @@ services:
- ${TANGO_LOFAR_CONTAINER_MOUNT}
- ${HOME}:/hosthome
- ../docker/tango/tango-archiver:/tango-archiver
restart: on-failure
......@@ -21,11 +21,12 @@ services:
- ${TANGO_LOFAR_CONTAINER_MOUNT}
environment:
- TANGO_HOST=${TANGO_HOST}
user: ${CONTAINER_EXECUTION_UID}
entrypoint:
- /usr/local/bin/wait-for-it.sh
- ${TANGO_HOST}
- --timeout=30
- --strict
- --
- python3 -u ${TANGO_LOFAR_CONTAINER_DIR}/PCC/PCC LTS -v
- python3 -u ${TANGO_LOFAR_CONTAINER_DIR}/devices/PCC.py LTS -v
restart: on-failure
......@@ -21,11 +21,12 @@ services:
- ${TANGO_LOFAR_CONTAINER_MOUNT}
environment:
- TANGO_HOST=${TANGO_HOST}
user: ${CONTAINER_EXECUTION_UID}
entrypoint:
- /usr/local/bin/wait-for-it.sh
- ${TANGO_HOST}
- --timeout=30
- --strict
- --
- python3 -u ${TANGO_LOFAR_CONTAINER_DIR}/SDP/SDP LTS -v
- python3 -u ${TANGO_LOFAR_CONTAINER_DIR}/devices/SDP.py LTS -v
restart: on-failure
FROM ubuntu:20.04
# Expand VM settings as required by ELK
CMD bash -c "if [ -w /proc/sys/vm/max_map_count ] && [ $(cat /proc/sys/vm/max_map_count) -lt 262144 ]; then echo 262144 > /proc/sys/vm/max_map_count; fi"
#
# Docker compose file that launches an ELK stack.
# See https://elk-docker.readthedocs.io/
#
# Defines:
# - elk-configure-host: Configures the hosts's kernel to be able to use the ELK stack
# - elk: ELK stack
#
version: '2'
volumes:
elk-data: {}
services:
elk-configure-host:
image: elk-configure-host
build:
context: elk-configure-host
container_name: ${CONTAINER_NAME_PREFIX}elk-configure-host
network_mode: ${NETWORK_MODE}
privileged: true
elk:
image: elk
build:
context: elk
container_name: ${CONTAINER_NAME_PREFIX}elk
network_mode: ${NETWORK_MODE}
volumes:
- elk-data:/var/lib/elasticsearch
ports:
- "5601:5601" # kibana
- "9200:9200" # elasticsearch
- "5044:5044" # logstash beats input
- "1514:1514" # logstash syslog input
- "5959:5959" # logstash tcp json input
depends_on:
- elk-configure-host
FROM sebp/elk
# Give more time for ElasticSearch to startup on our poor dev laptops
ENV ES_CONNECT_RETRY=60
# Provide our logstash config
ADD logstash /etc/logstash/
# Provide our kibana config
#
# For now:
# 1. Start the container,
# 2. Make sure there are log messages in the database (to populate their patterns),
# 3. Run in this directory
# $ curl -X POST http://localhost:5601/api/saved_objects/_import -H "kbn-xsrf: true" --form file=@kibana/default-objects.ndjson
#
# See also https://www.elastic.co/guide/en/kibana/7.x/saved-objects-api-import.html
# and https://github.com/Bitergia/archimedes
{"attributes":{"fieldAttrs":"{\"level\":{\"count\":1},\"message\":{\"count\":1},\"program\":{\"count\":2}}","fields":"[]","timeFieldName":"@timestamp","title":"logstash-*"},"id":"5340ad20-923d-11eb-9cc8-2fc0b321a697","migrationVersion":{"index-pattern":"7.11.0"},"references":[],"type":"index-pattern","updated_at":"2021-03-31T18:16:04.716Z","version":"WzE0NiwxXQ=="}
{"attributes":{"columns":["extra.device","level","message"],"description":"","hits":0,"kibanaSavedObjectMeta":{"searchSourceJSON":"{\"highlightAll\":true,\"version\":true,\"query\":{\"query\":\"\",\"language\":\"kuery\"},\"filter\":[],\"indexRefName\":\"kibanaSavedObjectMeta.searchSourceJSON.index\"}"},"sort":[],"title":"Last hour","version":1},"id":"cf4947d0-924f-11eb-9cc8-2fc0b321a697","migrationVersion":{"search":"7.9.3"},"references":[{"id":"5340ad20-923d-11eb-9cc8-2fc0b321a697","name":"kibanaSavedObjectMeta.searchSourceJSON.index","type":"index-pattern"}],"type":"search","updated_at":"2021-03-31T18:35:04.269Z","version":"WzE2NywxXQ=="}
{"exportedCount":2,"missingRefCount":0,"missingReferences":[]}
\ No newline at end of file
input {
beats {
port => 5044
ssl => true
ssl_certificate => "/etc/pki/tls/certs/logstash-beats.crt"
ssl_key => "/etc/pki/tls/private/logstash-beats.key"
}
}
input {
syslog {
port => 1514
}
}
input {
tcp {
port => 5959
codec => json
}
}
filter {
if [type] == "syslog" {
grok {
match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" }
add_field => [ "received_at", "%{@timestamp}" ]
add_field => [ "received_from", "%{host}" ]
}
syslog_pri { }
date {
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
}
}
}
filter {
if [type] == "nginx-access" {
grok {
match => { "message" => "%{NGINXACCESS}" }
}
}
}
output {
elasticsearch {
hosts => ["localhost"]
manage_template => false
index => "logstash-%{+YYYY.MM.dd}"
}
}
......@@ -28,6 +28,7 @@ services:
- TANGO_HOST=${TANGO_HOST}
- XAUTHORITY=${XAUTHORITY}
- DISPLAY=${DISPLAY}
user: ${CONTAINER_EXECUTION_UID}
stdin_open: true
tty: true
entrypoint:
......
......@@ -8,8 +8,8 @@ RUN sudo apt-get -y update && \
sudo apt-get -y install apt-file apt-transport-https apt-utils aptitude && \
sudo aptitude -y install htop iftop iproute2 mc most net-tools tcpdump telnet tmux traceroute vim xterm && \
sudo aptitude clean && \
sudo aptitude autoclean && \
sudo pip3 install "opcua >= 0.98.9" asyncua astropy && \
sudo pip3 uninstall -y jedi parso && \
sudo pip3 install "parso == 0.7.1" "jedi == 0.17.2" && \
sudo aptitude autoclean
COPY lofar-requirements.txt /lofar-requirements.txt
RUN sudo pip3 install -r /lofar-requirements.txt && \
sudo pip3 cache purge
parso == 0.7.1
jedi == 0.17.2
opcua >= 0.98.9
asyncua
astropy
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment