Skip to content
Snippets Groups Projects
Commit 15fb91f9 authored by Stefano Di Frischia's avatar Stefano Di Frischia
Browse files

Merge branch 'master' into L2SS-406-grafana-archiver

parents 028ae6c8 88c9f7db
No related branches found
No related tags found
1 merge request!190Resolve L2SS-406 "Grafana archiver"
...@@ -5,4 +5,5 @@ COPY resources/01_admin.sql docker-entrypoint-initdb.d/002_admin.sql ...@@ -5,4 +5,5 @@ COPY resources/01_admin.sql docker-entrypoint-initdb.d/002_admin.sql
COPY resources/02_hdb_schema.sql docker-entrypoint-initdb.d/003_hdb_schema.sql COPY resources/02_hdb_schema.sql docker-entrypoint-initdb.d/003_hdb_schema.sql
COPY resources/03_hdb_roles.sql docker-entrypoint-initdb.d/004_hdb_roles.sql COPY resources/03_hdb_roles.sql docker-entrypoint-initdb.d/004_hdb_roles.sql
COPY resources/04_hdb_ext_aggregates.sql docker-entrypoint-initdb.d/005_hdb_ext_aggregates.sql COPY resources/04_hdb_ext_aggregates.sql docker-entrypoint-initdb.d/005_hdb_ext_aggregates.sql
COPY resources/05_cleanup.sql docker-entrypoint-initdb.d/006_cleanup.sql COPY resources/05_lofar_views.sql docker-entrypoint-initdb.d/006_lofar_views.sql
COPY resources/06_cleanup.sql docker-entrypoint-initdb.d/007_cleanup.sql
...@@ -16,8 +16,3 @@ The next ones have been pulled from the official Tango repository in order to cr ...@@ -16,8 +16,3 @@ The next ones have been pulled from the official Tango repository in order to cr
- hdb_ext_aggregates.sql creates the continuous aggregate views (https://docs.timescale.com/timescaledb/latest/how-to-guides/continuous-aggregates/) - hdb_ext_aggregates.sql creates the continuous aggregate views (https://docs.timescale.com/timescaledb/latest/how-to-guides/continuous-aggregates/)
- cleanup.sql strips the SUPERUSER trait from hdb_admin - cleanup.sql strips the SUPERUSER trait from hdb_admin
Last updates on these scripts are dated to August 2021 (more info can be found at https://github.com/tango-controls-hdbpp/hdbpp-timescale-project/blob/master/doc/db-schema-config.md) Last updates on these scripts are dated to August 2021 (more info can be found at https://github.com/tango-controls-hdbpp/hdbpp-timescale-project/blob/master/doc/db-schema-config.md)
There is an older version of these Tango-Archiving generating scripts in the directory 'resources_v2'. These files have been pulled from Tango-LibHdbpp-Timescale (https://github.com/tango-controls-hdbpp/libhdbpp-timescale/tree/master/db-schema)
-- LOFAR 2.0 CUSTOMIZED VIEWS
\c hdb
-- SDP FPGA Temperature
create or replace view "sdp_fpga_temp" as
select
ac.att_name as "attribute",
aad.data_time AS "time",
aad.value_r
FROM att_array_devdouble aad join att_conf ac
on aad.att_conf_id = ac.att_conf_id
where aad.value_R is not null
and ac."domain" ='stat' and ac."family" ='sdp' and ac."member" ='1'
ORDER BY aad.data_time;
-- SDP FPGA Mask
create or replace view "sdp_tr_fpga_mask" as
select
ac.att_name as "attribute",
aab.data_time AS "time",
aab.value_r
FROM att_array_devboolean aab join att_conf ac
on aab.att_conf_id = ac.att_conf_id
where aab.value_R is not null
and ac."domain" ='stat' and ac."family" ='sdp' and ac."member" ='1'
ORDER BY aab.data_time;
-- SDP Masked values (rounded to 1 second)
create or replace view "sdp_masked_temp_values" as
select time_bucket('1 second',t.time) as "temp_time",
time_bucket('1 second',m.time) as "mask_time",
t.value_r as "temperature",
m.value_r as "mask"
from sdp_fpga_temp as t
inner join sdp_tr_fpga_mask as m
on time_bucket('1 second',t.time) = time_bucket('1 second',m.time)
/* Replace if possible with SQL loop */
where m.value_r[1]=true and
m.value_r[2]=true and
m.value_r[3]=true and
m.value_r[4]=true and
m.value_r[5]=true and
m.value_r[6]=true and
m.value_r[7]=true and
m.value_r[8]=true and
m.value_r[9]=true and
m.value_r[10]=true and
m.value_r[11]=true and
m.value_r[12]=true and
m.value_r[13]=true and
m.value_r[14]=true and
m.value_r[15]=true and
m.value_r[16]=true
order by t."time" ;
ALTER TABLE att_scalar_devboolean CLUSTER ON att_scalar_devboolean_att_conf_id_data_time_idx;
ALTER TABLE att_array_devboolean CLUSTER ON att_array_devboolean_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devuchar CLUSTER ON att_scalar_devuchar_att_conf_id_data_time_idx;
ALTER TABLE att_array_devuchar CLUSTER ON att_array_devuchar_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devshort CLUSTER ON att_scalar_devshort_att_conf_id_data_time_idx;
ALTER TABLE att_array_devshort CLUSTER ON att_array_devshort_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devushort CLUSTER ON att_scalar_devushort_att_conf_id_data_time_idx;
ALTER TABLE att_array_devushort CLUSTER ON att_array_devushort_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devlong CLUSTER ON att_scalar_devlong_att_conf_id_data_time_idx;
ALTER TABLE att_array_devlong CLUSTER ON att_array_devlong_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devulong CLUSTER ON att_scalar_devulong_att_conf_id_data_time_idx;
ALTER TABLE att_array_devulong CLUSTER ON att_array_devulong_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devlong64 CLUSTER ON att_scalar_devlong64_att_conf_id_data_time_idx;
ALTER TABLE att_array_devlong64 CLUSTER ON att_array_devlong64_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devulong64 CLUSTER ON att_scalar_devulong64_att_conf_id_data_time_idx;
ALTER TABLE att_array_devulong64 CLUSTER ON att_array_devulong64_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devfloat CLUSTER ON att_scalar_devfloat_att_conf_id_data_time_idx;
ALTER TABLE att_array_devfloat CLUSTER ON att_array_devfloat_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devdouble CLUSTER ON att_scalar_devdouble_att_conf_id_data_time_idx;
ALTER TABLE att_array_devdouble CLUSTER ON att_array_devdouble_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devstring CLUSTER ON att_scalar_devstring_att_conf_id_data_time_idx;
ALTER TABLE att_array_devstring CLUSTER ON att_array_devstring_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devstate CLUSTER ON att_scalar_devstate_att_conf_id_data_time_idx;
ALTER TABLE att_array_devstate CLUSTER ON att_array_devstate_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devencoded CLUSTER ON att_scalar_devencoded_att_conf_id_data_time_idx;
ALTER TABLE att_array_devencoded CLUSTER ON att_array_devencoded_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devenum CLUSTER ON att_scalar_devenum_att_conf_id_data_time_idx;
ALTER TABLE att_array_devenum CLUSTER ON att_array_devenum_att_conf_id_data_time_idx;
CLUSTER att_scalar_devboolean;
CLUSTER att_array_devboolean;
CLUSTER att_scalar_devuchar;
CLUSTER att_array_devuchar;
CLUSTER att_scalar_devshort;
CLUSTER att_array_devshort;
CLUSTER att_scalar_devushort;
CLUSTER att_array_devushort;
CLUSTER att_scalar_devlong;
CLUSTER att_array_devlong;
CLUSTER att_scalar_devulong;
CLUSTER att_array_devulong;
CLUSTER att_scalar_devlong64;
CLUSTER att_array_devlong64;
CLUSTER att_scalar_devulong64;
CLUSTER att_array_devulong64;
CLUSTER att_scalar_devfloat;
CLUSTER att_array_devfloat;
CLUSTER att_scalar_devdouble;
CLUSTER att_array_devdouble;
CLUSTER att_scalar_devstring;
CLUSTER att_array_devstring;
CLUSTER att_scalar_devstate;
CLUSTER att_array_devstate;
CLUSTER att_scalar_devencoded;
CLUSTER att_array_devencoded;
CLUSTER att_scalar_devenum;
CLUSTER att_array_devenum;
This diff is collapsed.
-- Roles
CREATE ROLE readonly;
CREATE ROLE readwrite;
-- Permissions - readonly
GRANT CONNECT ON DATABASE hdb TO readonly;
GRANT USAGE ON SCHEMA public TO readonly;
GRANT SELECT ON ALL TABLES IN SCHEMA public TO readonly;
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT ON TABLES TO readonly;
-- Permissions - readwrite
GRANT CONNECT ON DATABASE hdb TO readwrite;
GRANT USAGE ON SCHEMA public TO readwrite;
GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA public TO readwrite;
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO readwrite;
GRANT USAGE ON ALL SEQUENCES IN SCHEMA public TO readwrite;
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT USAGE ON SEQUENCES TO readwrite;
GRANT ALL ON SCHEMA public TO readwrite;
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO readwrite;
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO readwrite;
-- Users
CREATE ROLE hdb_cfg_man WITH LOGIN PASSWORD 'hdbpp';
GRANT readwrite TO hdb_cfg_man;
CREATE ROLE hdb_event_sub WITH LOGIN PASSWORD 'hdbpp';
GRANT readwrite TO hdb_event_sub;
CREATE ROLE hdb_java_reporter WITH LOGIN PASSWORD 'hdbpp';
GRANT readonly TO hdb_java_reporter;
%% Cell type:markdown id:6bdc7054 tags:
# Test LRx.y: Test Name
This notebook documents test output.
Instructions:
1. *PRESS File -> Make a Copy*,
1. Rename the notebook to the name of the test,
1. Update the title at the top of this section,
1. Fill in the sections with empty blocks with code and text,
1. Execute the provided initialisation code,
1. Run the tests,
1. *Rerun everything top to bottom to make sure the notebook is correct*,
1. Fill in the results & verdict.
1. Export the Jupyter Notebook to PDF and upload the file to the Polarion test results page.
1. ...and you're done!
%% Cell type:markdown id:63785004 tags:
## Author
Descibe who ran this test, and who helped (if applicable):
%% Cell type:markdown id:3c720d4b tags:
(your name)
%% Cell type:markdown id:ff837bcb tags:
## Timestamp
This test was executed at:
%% Cell type:code id:00418ee4 tags:
``` python
# Run this code
import datetime
print(f"Test was executed at {datetime.datetime.isoformat(datetime.datetime.now(), ' ')}")
```
%% Cell type:markdown id:0b4a59a5 tags:
## Purpose
Describe the purpose and context of this notebook, possibly including any links to external references, e.g. the Polarion reference number:
%% Cell type:markdown id:09d4d0a1 tags:
(purpose)
%% Cell type:markdown id:4c6489f3 tags:
## Methodology
Provide a summary of how we are going to prove compliance:
%% Cell type:markdown id:ceae21d4 tags:
(methodology)
%% Cell type:markdown id:9033f262 tags:
## Initialisation
The following sections contain boilerplate code to get the station to a well-defined state. If this is not applicable or broken, just note that here:
%% Cell type:markdown id:00d4e336 tags:
%% Cell type:markdown id:7532d05e tags:
### Hot reboot
Makes sure the software and hardware are all in a known state.
%% Cell type:code id:c7a3effa tags:
``` python
# Restart boot device
boot.off()
boot.initialise()
boot.on()
```
%% Cell type:code id:b4dd21b1 tags:
``` python
# Reboot the station
boot.boot()
```
%% Cell type:code id:78a4db84 tags:
``` python
# Wait for reboot to complete
import time
while boot.booting_R:
time.sleep(2)
print(f"Initialisation at {boot.progress_R}%: {boot.status_R}")
assert boot.progress_R == 100, f"Failed to fully initialise station: {boot.status_R}"
if boot.uninitialised_devices_R:
print(f"Warning! Did not initialise {boot.uninitialised_devices_R}. This might be inconsequential for this test.")
```
%% Cell type:markdown id:9bc072af tags:
### Active versions
List the versions currently running on the station.
%% Cell type:code id:35b815d4 tags:
``` python
def summarise(l: list) -> list:
return [f"{idx}: {version}" for idx,version in enumerate(l) if version] or ["no versions reported"]
versions = {
"SC": {dev.name():dev.version_R for dev in devices},
"SDP": {
"FPGA firmware": summarise(sdp.FPGA_firmware_version_R),
"FPGA hardware": summarise(sdp.FPGA_hardware_version_R),
"SDPTR": sdp.TR_software_version_R,
},
"RECV": {
"PCB": summarise(recv.RCU_PCB_version_R),
},
"APSCT": {
"PCB": apsct.APSCT_PCB_version_R,
},
"APSPU": {
"PCB": apspu.APSPU_PCB_version_R,
},
"UNB2": {
"PCB": summarise(unb2.UNB2_PCB_version_R),
}
}
from pprint import pprint
pprint(versions, width=120)
```
%% Cell type:markdown id:e51a06b7 tags:
## Test setup
Setup the hardware for the test:
%% Cell type:code id:e72dc2df tags:
``` python
# Your code to configure the station for this test
```
%% Cell type:markdown id:772dff7c tags:
## Run test
%% Cell type:code id:26570aea tags:
``` python
# Your code that triggers the actual test (if this is an explicit step)
```
%% Cell type:markdown id:6c604116 tags:
## Test results
%% Cell type:code id:d290d8dd tags:
``` python
# plot sst in after
```
%% Cell type:markdown id:d3cdb620 tags:
## Discuss results
How should the results be interpreted? Are there remaining worries and todo's based on this result?
%% Cell type:markdown id:6e082c7c tags:
(Explain results, and caveats)
%% Cell type:markdown id:a95fbf48 tags:
## Verdict
The test passed/did not pass:
%% Cell type:markdown id:9a2a7a97 tags:
(Explain whether the result is good enough, or what needs to be done to improve)
...@@ -58,6 +58,9 @@ class OPCUAConnection(AsyncCommClient): ...@@ -58,6 +58,9 @@ class OPCUAConnection(AsyncCommClient):
# prefix path to all nodes with this. this allows the user to switch trees more easily. # prefix path to all nodes with this. this allows the user to switch trees more easily.
self.node_path_prefix = [] self.node_path_prefix = []
# cache of looked up child node lists for each comma-separated parent path
self._node_cache = {}
super().__init__(fault_func, event_loop) super().__init__(fault_func, event_loop)
def _servername(self): def _servername(self):
...@@ -130,13 +133,41 @@ class OPCUAConnection(AsyncCommClient): ...@@ -130,13 +133,41 @@ class OPCUAConnection(AsyncCommClient):
return path return path
async def get_node(self, path):
""" Retrieve an OPC-UA node from either the cache, or the server. """
if not path:
return self.obj
cache_key = ",".join(path)
# lookup in cache
if cache_key in self._node_cache:
return self._node_cache[cache_key]
# cache it and all of its siblings to save us the round trips for them later on.
parent_path = path[:-1]
parent_node = await self.obj.get_child(parent_path) if parent_path else self.obj
child_nodes = await parent_node.get_children_descriptions()
for child_node in child_nodes:
# add node to the cache
child_path = parent_path + [f"{self.name_space_index}:{child_node.DisplayName.Text}"]
self._node_cache[",".join(child_path)] = self.client.get_node(child_node.NodeId)
# lookup in cache again. if the name is valid, it should be in there.
if cache_key in self._node_cache:
return self._node_cache[cache_key]
# we couldnt find the requested child, ask server directly to get the appropriate error
return await self.obj.get_child(path)
async def setup_protocol_attribute(self, annotation, attribute): async def setup_protocol_attribute(self, annotation, attribute):
# process the annotation # process the annotation
path = self.get_node_path(annotation) path = self.get_node_path(annotation)
try: try:
node = await self.obj.get_child(path) node = await self.get_node(path)
except Exception as e: except Exception as e:
logger.exception("Could not get node: %s on server %s", path, self._servername()) logger.exception("Could not get node: %s on server %s", path, self._servername())
raise Exception("Could not get node: %s on server %s", path, self._servername()) from e raise Exception("Could not get node: %s on server %s", path, self._servername()) from e
...@@ -180,7 +211,7 @@ class OPCUAConnection(AsyncCommClient): ...@@ -180,7 +211,7 @@ class OPCUAConnection(AsyncCommClient):
try: try:
# call method in its parent node # call method in its parent node
node = await self.obj.get_child(method_path[:-1]) if len(method_path) > 1 else self.obj node = await self.get_node(method_path[:-1])
result = await node.call_method(method_path[-1], *args) result = await node.call_method(method_path[-1], *args)
except Exception as e: except Exception as e:
raise Exception(f"Calling method {method_path} failed") from e raise Exception(f"Calling method {method_path} failed") from e
......
...@@ -86,6 +86,7 @@ class TestOPCua(base.AsyncTestCase): ...@@ -86,6 +86,7 @@ class TestOPCua(base.AsyncTestCase):
m_opc_client_members.send_hello = asynctest.asynctest.CoroutineMock() m_opc_client_members.send_hello = asynctest.asynctest.CoroutineMock()
m_objects_node = asynctest.Mock() m_objects_node = asynctest.Mock()
m_objects_node.get_child = asynctest.asynctest.CoroutineMock() m_objects_node.get_child = asynctest.asynctest.CoroutineMock()
m_objects_node.get_children_descriptions = asynctest.asynctest.CoroutineMock()
m_opc_client_members.get_objects_node = asynctest.Mock(return_value=m_objects_node) m_opc_client_members.get_objects_node = asynctest.Mock(return_value=m_objects_node)
m_opc_client.return_value = m_opc_client_members m_opc_client.return_value = m_opc_client_members
...@@ -108,7 +109,7 @@ class TestOPCua(base.AsyncTestCase): ...@@ -108,7 +109,7 @@ class TestOPCua(base.AsyncTestCase):
m_attribute = mock_attr(i.numpy_type, dim_x, dim_y) m_attribute = mock_attr(i.numpy_type, dim_x, dim_y)
# pretend like there is a running OPCua server with a node that has this name # pretend like there is a running OPCua server with a node that has this name
m_annotation = ["2:PCC", f"2:testNode_{str(i.numpy_type)}_{str(dim_x)}_{str(dim_y)}"] m_annotation = [f"2:testNode_{str(i.numpy_type)}_{str(dim_x)}_{str(dim_y)}"]
test_client = OPCUAConnection("opc.tcp://localhost:4874/freeopcua/server/", "http://lofar.eu", 5, mock.Mock(), self.loop) test_client = OPCUAConnection("opc.tcp://localhost:4874/freeopcua/server/", "http://lofar.eu", 5, mock.Mock(), self.loop)
try: try:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment