Skip to content
Snippets Groups Projects
Commit f215048e authored by Thomas Juerges's avatar Thomas Juerges
Browse files

Merge branch '2021-03-16-Branched_from_master-ELK-stack' into 'master'

Resolve #2021 "03 16 branched from master elk stack"

See merge request !7
parents f2c573c0 52d916bf
No related branches found
No related tags found
1 merge request!7Resolve #2021 "03 16 branched from master elk stack"
Showing
with 227 additions and 3 deletions
...@@ -26,9 +26,11 @@ import numpy ...@@ -26,9 +26,11 @@ import numpy
from wrappers import only_in_states, only_when_on, fault_on_error from wrappers import only_in_states, only_when_on, fault_on_error
from opcua_connection import OPCUAConnection from opcua_connection import OPCUAConnection
from lofar_logging import device_logging_to_python, log_exceptions
__all__ = ["SDP", "main"] __all__ = ["SDP", "main"]
@device_logging_to_python({"device": "SDP"})
class SDP(Device): class SDP(Device):
""" """
...@@ -192,6 +194,8 @@ class SDP(Device): ...@@ -192,6 +194,8 @@ class SDP(Device):
self.info_stream("Mapping OPC-UA MP/CP to attributes done.") self.info_stream("Mapping OPC-UA MP/CP to attributes done.")
@log_exceptions
@DebugIt()
def init_device(self): def init_device(self):
""" Instantiates the device in the OFF state. """ """ Instantiates the device in the OFF state. """
...@@ -200,6 +204,7 @@ class SDP(Device): ...@@ -200,6 +204,7 @@ class SDP(Device):
self.set_state(DevState.OFF) self.set_state(DevState.OFF)
@log_exceptions
def initialise(self): def initialise(self):
"""Initialises the attributes and properties of the SDP.""" """Initialises the attributes and properties of the SDP."""
...@@ -475,7 +480,6 @@ class SDP(Device): ...@@ -475,7 +480,6 @@ class SDP(Device):
""" """
self.set_state(DevState.FAULT) self.set_state(DevState.FAULT)
# ---------- # ----------
# Run server # Run server
# ---------- # ----------
......
...@@ -17,6 +17,7 @@ from tango import DevState, DebugIt ...@@ -17,6 +17,7 @@ from tango import DevState, DebugIt
# Additional import # Additional import
from src.attribute_wrapper import * from src.attribute_wrapper import *
from src.lofar_logging import device_logging_to_python, log_exceptions
__all__ = ["hardware_device"] __all__ = ["hardware_device"]
...@@ -24,6 +25,7 @@ __all__ = ["hardware_device"] ...@@ -24,6 +25,7 @@ __all__ = ["hardware_device"]
from src.wrappers import only_in_states from src.wrappers import only_in_states
@device_logging_to_python({"device": "SDP"})
class hardware_device(Device): class hardware_device(Device):
""" """
...@@ -59,6 +61,7 @@ class hardware_device(Device): ...@@ -59,6 +61,7 @@ class hardware_device(Device):
self.value_dict = {i: i.initial_value() for i in self.attr_list()} self.value_dict = {i: i.initial_value() for i in self.attr_list()}
@log_exceptions()
def init_device(self): def init_device(self):
""" Instantiates the device in the OFF state. """ """ Instantiates the device in the OFF state. """
...@@ -162,6 +165,7 @@ class hardware_device(Device): ...@@ -162,6 +165,7 @@ class hardware_device(Device):
"""Method always executed before any TANGO command is executed.""" """Method always executed before any TANGO command is executed."""
pass pass
@log_exceptions()
def delete_device(self): def delete_device(self):
"""Hook to delete resources allocated in init_device. """Hook to delete resources allocated in init_device.
......
import logging
from functools import wraps
# Always also log the hostname because it makes the origin of the log clear.
import socket
hostname = socket.gethostname()
def configure_logger(logger: logging.Logger, log_extra=None):
logger.setLevel(logging.DEBUG)
try:
from logstash_async.handler import AsynchronousLogstashHandler, LogstashFormatter
# log to the tcp_input of logstash in our ELK stack
handler = AsynchronousLogstashHandler("elk", 5959, database_path='pending_log_messages.db')
# configure log messages
formatter = LogstashFormatter(extra=log_extra, tags=["python", "lofar"])
handler.setFormatter(formatter)
# install the handler
logger.addHandler(handler)
# for now, also log to stderr
# Set up logging in a way that it can be understood by a human reader, be
# easily grep'ed, be parsed with a couple of shell commands and
# easily fed into an Kibana/Elastic search system.
handler = logging.StreamHandler()
formatter = logging.Formatter(fmt = '%(asctime)s.%(msecs)d %(levelname)s - HOST="{}" PID="%(process)d" TNAME="%(threadName)s" TID="%(thread)d" FILE="%(pathname)s" LINE="%(lineno)d" FUNC="%(funcName)s" MSG="%(message)s"'.format(hostname), datefmt = '%Y-%m-%dT%H:%M:%S')
handler.setFormatter(formatter)
logger.addHandler(handler)
except Exception:
logger.exception("Cannot import or configure logstash_async module, not forwarding logs to ELK stack.")
return logger
def device_logging_to_python(log_extra: dict = None):
""" Call this on a Tango Device instance or class to have your Tango Device log to python instead. """
def inner(cls):
# Create a logger that logs to ELK, dedicated for this class
logger = logging.getLogger(cls.__name__)
configure_logger(logger, log_extra)
# Monkey patch the python logger to replace the tango logger
cls.debug_stream = logger.debug
cls.info_stream = logger.info
cls.warn_stream = logger.warning
cls.warning_stream = logger.warning
cls.error_stream = logger.error
cls.fatal_stream = logger.fatal
cls.critical_stream = logger.critical
return cls
return inner
def log_exceptions():
""" Decorator that logs all exceptions that the function raises. """
def wrapper(func):
@wraps(func)
def inner(self, *args, **kwargs):
try:
return func(self, *args, **kwargs)
except Exception as e:
self.error_stream("Caught exception: %s: %s", e.__class__.__name__, e, exc_info=1)
raise e
return inner
return wrapper
FROM ubuntu:20.04
# Expand VM settings as required by ELK
CMD bash -c "if [ -w /proc/sys/vm/max_map_count ] && [ $(cat /proc/sys/vm/max_map_count) -lt 262144 ]; then echo 262144 > /proc/sys/vm/max_map_count; fi"
#
# Docker compose file that launches an ELK stack.
# See https://elk-docker.readthedocs.io/
#
# Defines:
# - elk-configure-host: Configures the hosts's kernel to be able to use the ELK stack
# - elk: ELK stack
#
version: '2'
volumes:
elk-data: {}
services:
elk-configure-host:
image: elk-configure-host
build:
context: elk-configure-host
container_name: ${CONTAINER_NAME_PREFIX}elk-configure-host
network_mode: ${NETWORK_MODE}
privileged: true
elk:
image: elk
build:
context: elk
container_name: ${CONTAINER_NAME_PREFIX}elk
network_mode: ${NETWORK_MODE}
volumes:
- elk-data:/var/lib/elasticsearch
ports:
- "5601:5601" # kibana
- "9200:9200" # elasticsearch
- "5044:5044" # logstash beats input
- "1514:1514" # logstash syslog input
- "5959:5959" # logstash tcp json input
depends_on:
- elk-configure-host
FROM sebp/elk
# Give more time for ElasticSearch to startup on our poor dev laptops
ENV ES_CONNECT_RETRY=60
# Provide our logstash config
ADD logstash /etc/logstash/
# Provide our kibana config
#
# For now:
# 1. Start the container,
# 2. Make sure there are log messages in the database (to populate their patterns),
# 3. Run in this directory
# $ curl -X POST http://localhost:5601/api/saved_objects/_import -H "kbn-xsrf: true" --form file=@kibana/default-objects.ndjson
#
# See also https://www.elastic.co/guide/en/kibana/7.x/saved-objects-api-import.html
# and https://github.com/Bitergia/archimedes
{"attributes":{"fieldAttrs":"{\"level\":{\"count\":1},\"message\":{\"count\":1},\"program\":{\"count\":2}}","fields":"[]","timeFieldName":"@timestamp","title":"logstash-*"},"id":"5340ad20-923d-11eb-9cc8-2fc0b321a697","migrationVersion":{"index-pattern":"7.11.0"},"references":[],"type":"index-pattern","updated_at":"2021-03-31T18:16:04.716Z","version":"WzE0NiwxXQ=="}
{"attributes":{"columns":["extra.device","level","message"],"description":"","hits":0,"kibanaSavedObjectMeta":{"searchSourceJSON":"{\"highlightAll\":true,\"version\":true,\"query\":{\"query\":\"\",\"language\":\"kuery\"},\"filter\":[],\"indexRefName\":\"kibanaSavedObjectMeta.searchSourceJSON.index\"}"},"sort":[],"title":"Last hour","version":1},"id":"cf4947d0-924f-11eb-9cc8-2fc0b321a697","migrationVersion":{"search":"7.9.3"},"references":[{"id":"5340ad20-923d-11eb-9cc8-2fc0b321a697","name":"kibanaSavedObjectMeta.searchSourceJSON.index","type":"index-pattern"}],"type":"search","updated_at":"2021-03-31T18:35:04.269Z","version":"WzE2NywxXQ=="}
{"exportedCount":2,"missingRefCount":0,"missingReferences":[]}
\ No newline at end of file
input {
beats {
port => 5044
ssl => true
ssl_certificate => "/etc/pki/tls/certs/logstash-beats.crt"
ssl_key => "/etc/pki/tls/private/logstash-beats.key"
}
}
input {
syslog {
port => 1514
}
}
input {
tcp {
port => 5959
codec => json
}
}
filter {
if [type] == "syslog" {
grok {
match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" }
add_field => [ "received_at", "%{@timestamp}" ]
add_field => [ "received_from", "%{host}" ]
}
syslog_pri { }
date {
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
}
}
}
filter {
if [type] == "nginx-access" {
grok {
match => { "message" => "%{NGINXACCESS}" }
}
}
}
output {
elasticsearch {
hosts => ["localhost"]
manage_template => false
index => "logstash-%{+YYYY.MM.dd}"
}
}
...@@ -34,5 +34,5 @@ services: ...@@ -34,5 +34,5 @@ services:
- --timeout=30 - --timeout=30
- --strict - --strict
- -- - --
- /usr/bin/tini -- jupyter notebook --port=8888 --no-browser --ip=0.0.0.0 --allow-root --NotebookApp.token= --NotebookApp.password= - /usr/bin/tini -- /usr/local/bin/jupyter-notebook --port=8888 --no-browser --ip=0.0.0.0 --allow-root --NotebookApp.token= --NotebookApp.password=
restart: on-failure restart: on-failure
...@@ -16,6 +16,10 @@ COPY ipython-profiles /opt/ipython-profiles/ ...@@ -16,6 +16,10 @@ COPY ipython-profiles /opt/ipython-profiles/
RUN sudo chown tango.tango -R /opt/ipython-profiles RUN sudo chown tango.tango -R /opt/ipython-profiles
COPY jupyter-kernels /usr/local/share/jupyter/kernels/ COPY jupyter-kernels /usr/local/share/jupyter/kernels/
# Install patched jupyter executable
RUN sudo pip3 install python-logstash-async
COPY jupyter-notebook /usr/local/bin/jupyter-notebook
# Add Tini. Tini operates as a process subreaper for jupyter. This prevents kernel crashes. # Add Tini. Tini operates as a process subreaper for jupyter. This prevents kernel crashes.
ENV TINI_VERSION v0.6.0 ENV TINI_VERSION v0.6.0
ENV JUPYTER_RUNTIME_DIR=/tmp ENV JUPYTER_RUNTIME_DIR=/tmp
......
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# An adjustment of the `jupyter-notebook' executable patched to:
# - log to the ELK stack
#
# We go straight for the notebook executable here, as the "jupyter" command
# execvp's into the requested notebook subcommand, erasing all configuration
# we set here.
import re
import sys
from notebook.notebookapp import main
from logstash_async.handler import AsynchronousLogstashHandler, LogstashFormatter
import logging
if __name__ == '__main__':
# log to the tcp_input of logstash in our ELK stack
handler = AsynchronousLogstashHandler("elk", 5959, database_path='/tmp/pending_log_messages.db')
# add to logger of Jupyter traitlets Application. As that logger is configured not to propagate
# messages upward, we need to configure it directly.
logger = logging.getLogger("NotebookApp")
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
...@@ -2,3 +2,6 @@ FROM nexus.engageska-portugal.pt/ska-docker/tango-itango:latest ...@@ -2,3 +2,6 @@ FROM nexus.engageska-portugal.pt/ska-docker/tango-itango:latest
COPY lofar-requirements.txt /lofar-requirements.txt COPY lofar-requirements.txt /lofar-requirements.txt
RUN sudo pip3 install -r /lofar-requirements.txt RUN sudo pip3 install -r /lofar-requirements.txt
ENV TANGO_LOG_PATH=/var/log/tango
RUN sudo mkdir -p /var/log/tango && sudo chmod a+rwx /var/log/tango
opcua >= 0.98.9 opcua >= 0.98.9
astropy astropy
python-logstash-async
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment