Skip to content
Snippets Groups Projects
Commit 19a22235 authored by Stefano Di Frischia's avatar Stefano Di Frischia
Browse files

Merge branch 'master' into L2SS-970-add-loki-instance

parents ed189cf4 6c960fa9
No related branches found
No related tags found
1 merge request!447Resolve L2SS-970 "Add loki instance"
Showing
with 763 additions and 1277 deletions
[submodule "tangostationcontrol/tangostationcontrol/toolkit/libhdbpp-python"]
path = tangostationcontrol/tangostationcontrol/toolkit/libhdbpp-python
url = https://gitlab.com/tango-controls/hdbpp/libhdbpp-python.git
[submodule "docker-compose/grafana/dashboards"]
path = docker-compose/grafana/dashboards
url = https://git.astron.nl/lofar2.0/grafana-station-dashboards.git
......@@ -117,7 +117,7 @@
"24",
"24"
],
"HBAT_PQR_to_ETRS_rotation_matrix": [
"PQR_to_ETRS_rotation_matrix": [
"-0.1195951054", "-0.7919544517", "0.5987530018",
" 0.9928227484", "-0.0954186800", "0.0720990002",
" 0.0000330969", " 0.6030782884", "0.7976820024"
......@@ -229,7 +229,7 @@
"24",
"24"
],
"HBAT_PQR_to_ETRS_rotation_matrix": [
"PQR_to_ETRS_rotation_matrix": [
"-0.1195951054", "-0.7919544517", "0.5987530018",
" 0.9928227484", "-0.0954186800", "0.0720990002",
" 0.0000330969", " 0.6030782884", "0.7976820024"
......@@ -341,7 +341,7 @@
"24",
"24"
],
"HBAT_PQR_to_ETRS_rotation_matrix": [
"PQR_to_ETRS_rotation_matrix": [
"-0.1195951054", "-0.7919544517", "0.5987530018",
" 0.9928227484", "-0.0954186800", "0.0720990002",
" 0.0000330969", " 0.6030782884", "0.7976820024"
......@@ -453,7 +453,7 @@
"24",
"24"
],
"HBAT_PQR_to_ETRS_rotation_matrix": [
"PQR_to_ETRS_rotation_matrix": [
"-0.1195951054", "-0.7919544517", "0.5987530018",
" 0.9928227484", "-0.0954186800", "0.0720990002",
" 0.0000330969", " 0.6030782884", "0.7976820024"
......
......@@ -188,10 +188,20 @@
"H36", "H37", "H38", "H39", "H40", "H41",
"H42", "H43", "H44", "H45", "H46", "H47"
],
"Antenna_Cables": [
"115m", "115m", "115m", "115m", "115m", "115m",
"115m", "115m", "115m", "115m", "115m", "115m",
"115m", "115m", "115m", "115m", "115m", "115m",
"115m", "115m", "115m", "115m", "115m", "115m",
"115m", "115m", "115m", "115m", "115m", "115m",
"115m", "115m", "115m", "115m", "115m", "115m",
"115m", "115m", "115m", "115m", "115m", "115m",
"115m", "115m", "115m", "115m", "115m", "115m"
],
"Control_to_RECV_mapping": [
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "1", "25", "1", "27", "1", "29", "0", "-1",
"0", "-1", "0", "-1", "1", "31", "1", "33", "1", "35", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
......@@ -200,8 +210,8 @@
],
"Power_to_RECV_mapping": [
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "1", "24", "1", "26", "1", "28", "0", "-1",
"0", "-1", "0", "-1", "1", "30", "1", "32", "1", "34", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
......@@ -210,8 +220,8 @@
],
"Antenna_to_SDP_Mapping": [
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "3", "0", "3", "1", "3", "2", "0", "-1",
"0", "-1", "0", "-1", "3", "3", "3", "4", "3", "5", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
......@@ -314,59 +324,77 @@
"L84", "L85", "L86", "L87", "L88", "L89",
"L90", "L91", "L92", "L93", "L94", "L95"
],
"Antenna_Cables": [
"80m", "80m", "80m", "80m", "80m", "80m",
"80m", "80m", "80m", "80m", "80m", "80m",
"115m", "80m", "80m", "80m", "80m", "80m",
"80m", "80m", "80m", "80m", "80m", "80m",
"115m", "115m", "115m", "80m", "80m", "80m",
"80m", "80m", "80m", "80m", "80m", "80m",
"80m", "80m", "80m", "80m", "115m", "115m",
"115m", "80m", "80m", "80m", "80m", "80m",
"80m", "80m", "115m", "50m", "50m", "80m",
"80m", "80m", "115m", "115m", "115m", "115m",
"80m", "80m", "80m", "50m", "50m", "50m",
"80m", "80m", "80m", "115m", "115m", "115m",
"115m", "115m", "80m", "80m", "50m", "50m",
"50m", "50m", "80m", "80m", "115m", "115m",
"115m", "115m", "115m", "115m", "115m", "80m",
"50m", "50m", "80m", "80m", "115m", "115m"
],
"Control_to_RECV_mapping": [
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "1", "1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "1", "3", "1", "5", "0", "-1",
"1", "7", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"1", "9", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "1", "11",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "1", "13", "1", "15",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1"
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "1", "17", "0", "-1"
],
"Power_to_RECV_mapping": [
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "1", "0", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "1", "2", "1", "4", "0", "-1",
"1", "6", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"1", "8", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "1", "10",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "1", "12", "1", "14",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1"
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "1", "16", "0", "-1"
],
"Antenna_to_SDP_Mapping": [
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "0", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "1", "0", "2", "0", "-1",
"0", "3", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "4", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "5",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "1", "6", "1", "7",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1"
"0", "-1", "0", "-1", "0", "-1", "0", "-1", "1", "8", "0", "-1"
],
"Antenna_Field_Reference_ETRS": [
"3826923.942", "460915.117", "5064643.229"
......
......@@ -181,7 +181,7 @@
"45.73",
"54.40"
],
"HBAT_PQR_to_ETRS_rotation_matrix": [
"PQR_to_ETRS_rotation_matrix": [
"-0.11660087", "-0.79095632", "0.60065992",
" 0.99317077", "-0.09529842", "0.06730545",
" 0.00400627", " 0.60440575", "0.79666658"
......
......@@ -213,7 +213,7 @@
"45.73",
"54.40"
],
"HBAT_PQR_to_ETRS_rotation_matrix": [
"PQR_to_ETRS_rotation_matrix": [
"-0.11660087","-0.79095632","0.60065992",
" 0.99317077","-0.09529842","0.06730545",
" 0.00400627"," 0.60440575","0.79666658"
......
......@@ -208,7 +208,7 @@
"24",
"24"
],
"HBAT_PQR_to_ETRS_rotation_matrix": [
"PQR_to_ETRS_rotation_matrix": [
"-0.1195951054", "-0.7919544517", "0.5987530018",
" 0.9928227484", "-0.0954186800", "0.0720990002",
" 0.0000330969", " 0.6030782884", "0.7976820024"
......
FROM grafana/grafana
FROM git.astron.nl:5000/lofar2.0/grafana-station-dashboards:latest
# Install some plugins
RUN grafana-cli plugins install briangann-datatable-panel
RUN grafana-cli plugins install ae3e-plotly-panel
RUN grafana-cli plugins install yesoreyeram-infinity-datasource
RUN grafana-cli plugins install aceiot-svg-panel
RUN grafana-cli plugins install yesoreyeram-boomtable-panel
RUN grafana-cli plugins install orchestracities-map-panel
RUN wget https://algenty.github.io/flowcharting-repository/archives/agenty-flowcharting-panel-1.0.0b-SNAPSHOT.zip -O /tmp/agenty-flowcharting-panel.zip
RUN cd /var/lib/grafana/plugins/ && unzip /tmp/agenty-flowcharting-panel.zip && mv grafana-flowcharting agenty-flowcharting-panel
COPY grafana.ini /etc/grafana/
# Add default configuration through provisioning (see https://grafana.com/docs/grafana/latest/administration/provisioning)
COPY datasources /etc/grafana/provisioning/datasources/
COPY dashboards /var/lib/grafana/dashboards/
COPY stationcontrol-dashboards.yaml /etc/grafana/provisioning/dashboards/
{
"template_files": {},
"alertmanager_config": {
"route": {
"receiver": "Alerta",
"repeat_interval": "10m"
},
"templates": null,
"receivers": [
{
"name": "Alerta",
"grafana_managed_receiver_configs": [
{
"name": "Alerta",
"type": "webhook",
"disableResolveMessage": false,
"settings": {
"url": "http://alerta-server:8080/api/webhooks/prometheus?api-key=demo-key"
},
"secureFields": {}
}
]
}
]
}
}
Subproject commit faf5cbb2fc7981ca4430a9e341145ce66d304851
apiVersion: 1
datasources:
# <string, required> name of the datasource. Required
- name: Alerta UI
# <string, required> datasource type. Required
type: yesoreyeram-infinity-datasource
# <string, required> access mode. proxy or direct (Server or Browser in the UI). Required
access: proxy
# <int> org id. will default to orgId 1 if not specified
orgId: 1
# <string> custom UID which can be used to reference this datasource in other parts of the configuration, if not specified will be generated automatically
uid: alertaui
# <string> url
url: http://alerta-server:8080/api
# <string> Deprecated, use secureJsonData.password
password:
# <string> database user, if used
user: postgres
# <string> database name, if used
database: hdb
# <bool> enable/disable basic auth
basicAuth: false
# <string> basic auth username
basicAuthUser:
# <string> Deprecated, use secureJsonData.basicAuthPassword
basicAuthPassword:
# <bool> enable/disable with credentials headers
withCredentials:
# <bool> mark as default datasource. Max one per org
isDefault: false
# <map> fields that will be converted to json and stored in jsonData
jsonData:
secureQueryName1: "api-key"
# <string> json object of data that will be encrypted.
secureJsonData:
secureQueryValue1: "demo-key"
version: 1
# <bool> allow users to edit datasources from the UI.
editable: false
apiVersion: 1
datasources:
# <string, required> name of the datasource. Required
- name: Grafana API
# <string, required> datasource type. Required
type: yesoreyeram-infinity-datasource
# <string, required> access mode. proxy or direct (Server or Browser in the UI). Required
access: proxy
# <int> org id. will default to orgId 1 if not specified
orgId: 1
# <string> custom UID which can be used to reference this datasource in other parts of the configuration, if not specified will be generated automatically
uid: grafanaapi
# <string> url
url: http://localhost:3000/api
# <string> Deprecated, use secureJsonData.password
password:
# <string> database user, if used
user: postgres
# <string> database name, if used
database: hdb
# <bool> enable/disable basic auth
basicAuth: false
# <string> basic auth username
basicAuthUser:
# <string> Deprecated, use secureJsonData.basicAuthPassword
basicAuthPassword:
# <bool> enable/disable with credentials headers
withCredentials:
# <bool> mark as default datasource. Max one per org
isDefault: false
# <map> fields that will be converted to json and stored in jsonData
version: 1
# <bool> allow users to edit datasources from the UI.
editable: false
apiVersion: 1
datasources:
# <string, required> name of the datasource. Required
- name: Prometheus
# <string, required> datasource type. Required
type: prometheus
# <string, required> access mode. proxy or direct (Server or Browser in the UI). Required
access: proxy
# <int> org id. will default to orgId 1 if not specified
orgId: 1
# <string> custom UID which can be used to reference this datasource in other parts of the configuration, if not specified will be generated automatically
uid: prometheus
# <string> url
url: http://prometheus:9090
# <string> Deprecated, use secureJsonData.password
password:
# <string> database user, if used
user:
# <string> database name, if used
database:
# <bool> enable/disable basic auth
basicAuth: false
# <string> basic auth username
basicAuthUser:
# <string> Deprecated, use secureJsonData.basicAuthPassword
basicAuthPassword:
# <bool> enable/disable with credentials headers
withCredentials:
# <bool> mark as default datasource. Max one per org
isDefault: true
# <map> fields that will be converted to json and stored in jsonData
jsonData:
httpMethod: POST
# <string> json object of data that will be encrypted.
secureJsonData:
version: 1
# <bool> allow users to edit datasources from the UI.
editable: false
This diff is collapsed.
#!/usr/bin/python3
import json
import os
import argparse
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=
"""
Generate rule import files and script for Grafana.
This script expands a given rules.json file into individual rules and
prints the bash commands to import them in Grafana.
To export rules from Grafana, use
curl <grafana>/api/ruler/grafana/api/v1/rules > rules.json
""")
parser.add_argument(
'-c', '--alert-config-file', type=str, required=False, help="Input alertmanager configuration JSON to parse, output of 'curl <grafana>/api/ruler/grafana/api/v1/rules' [%(default)s]")
parser.add_argument(
'-r', '--rules-file', type=str, required=True, help="Input rules JSON to parse, output of 'curl <grafana>/api/ruler/grafana/api/v1/rules' [%(default)s]")
parser.add_argument(
'-o', '--output-dir', type=str, default="rules", help="Directory to store the output [%(default)s]")
parser.add_argument(
'-B', '--authorization-bearer', type=str, default="abcdefghijklmnopqrstuvwxyz", help="Authorization bearer from the Grafana 'editor' API key [%(default)s]")
parser.add_argument(
'-g', '--grafana_url', type=str, default="http://localhost:3000", help="Base URL of Grafana [%(default)s]")
parser.add_argument(
'-u', '--update', default=False, action='store_true', help="Update existing alerts, instead of creating new ones [%(default)s]")
args = parser.parse_args()
if args.alert_config_file:
print(f"echo Importing alert configuration file {args.alert_config_file}")
print(f"curl -X POST {args.grafana_url}/api/alertmanager/grafana/config/api/v1/alerts -H 'Content-Type: application/json' -H 'Accept: application/json' -H 'Authorization: Bearer {args.authorization_bearer}' -d '@{args.alert_config_file}'")
print(f"echo ''")
with open(args.rules_file) as f:
data=json.load(f)
try:
os.mkdir(args.output_dir)
except FileExistsError as e:
pass
# the rules are of format {"folder": [{alert}, {alert}] }
for folder, rules in data.items():
try:
os.mkdir(f"{args.output_dir}/{folder}")
except FileExistsError as e:
pass
# print command to create folder
payload = json.dumps({"title": folder})
print(f"echo Creating folder {folder}")
print(f"curl -X POST {args.grafana_url}/api/folders -H 'Content-Type: application/json' -H 'Accept: application/json' -H 'Authorization: Bearer {args.authorization_bearer}' -d '{payload}'")
print(f"echo ''")
for rule in rules:
rule_filename = f"{args.output_dir}/{folder}/{rule['name']}.json"
if not args.update:
# strip rule UIDs
for subrule in rule["rules"]:
del subrule["grafana_alert"]["uid"]
# dump this rule
with open(rule_filename, "w") as rule_file:
json.dump(rule, rule_file)
# print import statement for this rule
print(f"echo Processing rule {folder}/{rule['name']}")
print(f"curl -X POST {args.grafana_url}/api/ruler/grafana/api/v1/rules/{folder} -H 'Content-Type: application/json' -H 'Accept: application/json' -H 'Authorization: Bearer {args.authorization_bearer}' -d '@{rule_filename}'")
print(f"echo ''")
{"station":[{"name":"FPGA processing error","interval":"10s","rules":[{"expr":"","for":"20s","labels":{"severity":"major"},"annotations":{"__dashboardUid__":"nC8N_kO7k","__panelId__":"9","summary":"One or more FPGAs are unusable."},"grafana_alert":{"id":1,"orgId":1,"title":"FPGA processing error","condition":"B","data":[{"refId":"A","queryType":"","relativeTimeRange":{"from":600,"to":0},"datasourceUid":"timescaledb","model":{"format":"time_series","group":[],"hide":false,"intervalMs":1000,"maxDataPoints":43200,"metricColumn":"none","rawQuery":true,"rawSql":"SELECT\n $__timeGroup(data_time, $__interval),\n x::text,\n device,\n name,\n value\nFROM lofar_array_boolean\nWHERE\n $__timeFilter(data_time) AND\n name = 'fpga_error_r'\nORDER BY 1,2","refId":"A","select":[[{"params":["value_r"],"type":"column"}]],"table":"att_scalar_devdouble","timeColumn":"data_time","timeColumnType":"timestamp","where":[{"name":"$__timeFilter","params":[],"type":"macro"}]}},{"refId":"B","queryType":"","relativeTimeRange":{"from":0,"to":0},"datasourceUid":"-100","model":{"conditions":[{"evaluator":{"params":[0],"type":"gt"},"operator":{"type":"and"},"query":{"params":["A"]},"reducer":{"params":[],"type":"last"},"type":"query"}],"datasource":{"type":"__expr__","uid":"-100"},"expression":"A","hide":false,"intervalMs":1000,"maxDataPoints":43200,"reducer":"last","refId":"B","settings":{"mode":"dropNN"},"type":"reduce"}}],"updated":"2022-04-04T18:01:53Z","intervalSeconds":10,"version":3,"uid":"kujybCynk","namespace_uid":"R_jsbCynz","namespace_id":6,"rule_group":"FPGA processing error","no_data_state":"NoData","exec_err_state":"Alerting"}}]}]}
\ No newline at end of file
#
# Docker compose file that launches Jupyter Lab for interactive iTango sessions over HTTP.
#
# Connect by surfing to http://localhost:8889/
# View logs through 'docker logs -f -t jupyter-lab'
#
# Defines:
# - jupyter-lab: Jupyter Lab with iTango support
#
version: '2.1'
services:
jupyter-lab:
build:
context: jupyterlab
args:
CONTAINER_EXECUTION_UID: ${CONTAINER_EXECUTION_UID}
SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
container_name: ${CONTAINER_NAME_PREFIX}jupyter-lab
logging:
driver: "json-file"
options:
max-size: "100m"
max-file: "10"
networks:
- control
volumes:
- ..:/opt/lofar/tango:rw
- ../jupyter-notebooks:/jupyter-notebooks:rw
- ${HOME}:/hosthome
- ${SCRATCH}:/scratch:rw
environment:
- TANGO_HOST=${TANGO_HOST}
ports:
- "8889:8889"
user: ${CONTAINER_EXECUTION_UID}
working_dir: /jupyter-notebooks
entrypoint:
- /opt/lofar/tango/bin/start-ds.sh
- jupyter lab --port=8889 --no-browser --ip=0.0.0.0 --allow-root --NotebookApp.token= --NotebookApp.password=
restart: unless-stopped
ARG SOURCE_IMAGE
FROM ${SOURCE_IMAGE}
# UID if the user that this container will run under. This is needed to give directories
# that are needed for temporary storage the proper owner and access rights.
ARG CONTAINER_EXECUTION_UID=1000
# Create new user with uid but only if uid not used
RUN sudo adduser --disabled-password --system --uid ${CONTAINER_EXECUTION_UID} --no-create-home --home ${HOME} user || exit 0
RUN sudo chown ${CONTAINER_EXECUTION_UID} -R ${HOME}
# Add compiler to install python packages which come with C++ code
RUN sudo apt-get update -y
RUN sudo apt-get install -y g++ gcc python3-dev
# Install git to install pip requirements from git
RUN sudo apt-get install -y git
# Install dependencies of our scripts (bin/start-ds.sh)
RUN sudo apt-get install -y rsync
COPY requirements.txt ./
RUN sudo pip3 install -r requirements.txt
# Install some version of the casacore measures tables, to allow basic delay computation analysis in the notebooks
RUN sudo apt-get install -y casacore-data
# see https://github.com/jupyter/nbconvert/issues/1434
RUN sudo bash -c "echo DEFAULT_ARGS += [\\\"--no-sandbox\\\"] >> /usr/local/lib/python3.7/dist-packages/pyppeteer/launcher.py"
RUN sudo apt-get update -y
RUN sudo apt-get install -y git gconf-service libasound2 libatk1.0-0 libatk-bridge2.0-0 libc6 libcairo2 libcups2 libdbus-1-3 libexpat1 libfontconfig1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget libcairo-gobject2 libxinerama1 libgtk2.0-0 libpangoft2-1.0-0 libthai0 libpixman-1-0 libxcb-render0 libharfbuzz0b libdatrie1 libgraphite2-3 libgbm1
# Allow Download as -> PDF via LaTeX
RUN sudo apt-get install -y texlive-xetex texlive-fonts-recommended texlive-latex-recommended cm-super
# Configure jupyter_bokeh
RUN sudo mkdir -p /usr/share/jupyter /usr/etc
RUN sudo chmod a+rwx /usr/share/jupyter /usr/etc
RUN sudo jupyter nbextension install --sys-prefix --symlink --py jupyter_bokeh
RUN sudo jupyter nbextension enable jupyter_bokeh --py --sys-prefix
# Install profiles for ipython & jupyter
COPY ipython-profiles /opt/ipython-profiles/
RUN sudo chown ${CONTAINER_EXECUTION_UID} -R /opt/ipython-profiles
COPY jupyter-kernels /usr/local/share/jupyter/kernels/
# Install patched jupyter executable
COPY jupyter-notebook /usr/local/bin/jupyter-notebook
# Add Tini. Tini operates as a process subreaper for jupyter. This prevents kernel crashes.
ENV TINI_VERSION v0.6.0
ENV JUPYTER_RUNTIME_DIR=/tmp
ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /usr/bin/tini
RUN sudo chmod +x /usr/bin/tini
USER ${CONTAINER_EXECUTION_UID}
# pyppeteer-install installs in the homedir, so run it as the user that will execute the notebook
RUN pyppeteer-install
# Enable Jupyter lab
ENV JUPYTER_ENABLE_LAB=yes
from tango import *
# Create shortcuts for our devices
apsct = DeviceProxy("STAT/APSCT/1")
ccd = DeviceProxy("STAT/CCD/1")
apspu = DeviceProxy("STAT/APSPU/1")
recv = DeviceProxy("STAT/RECV/1")
sdp = DeviceProxy("STAT/SDP/1")
bst = DeviceProxy("STAT/BST/1")
sst = DeviceProxy("STAT/SST/1")
xst = DeviceProxy("STAT/XST/1")
unb2 = DeviceProxy("STAT/UNB2/1")
boot = DeviceProxy("STAT/Boot/1")
tilebeam = DeviceProxy("STAT/TileBeam/1")
psoc = DeviceProxy("STAT/PSOC/1")
beamlet = DeviceProxy("STAT/Beamlet/1")
digitalbeam = DeviceProxy("STAT/DigitalBeam/1")
antennafield = DeviceProxy("STAT/AntennaField/1")
docker = DeviceProxy("STAT/Docker/1")
temperaturemanager = DeviceProxy("STAT/TemperatureManager/1")
# Put them in a list in case one wants to iterate
devices = [apsct, ccd, apspu, recv, sdp, bst, sst, xst, unb2, boot, tilebeam, beamlet, digitalbeam, antennafield, temperaturemanager, docker]
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment