Skip to content
Snippets Groups Projects
Commit 3c14863b authored by Stefano Di Frischia's avatar Stefano Di Frischia
Browse files

L2SS-398: merge with master and fix conflicts

parents 8e4c969f 2ec8cc96
No related branches found
No related tags found
1 merge request!161Resolve L2SS-398 "Archiver multi es"
Showing
with 2463 additions and 11 deletions
# TODO(Corne): Update this image to use our own registry once building
# images is in place.
image: artefact.skao.int/ska-tango-images-tango-itango:9.3.5
image: artefact.skao.int/ska-tango-images-tango-itango:9.3.7
variables:
GIT_SUBMODULE_STRATEGY: recursive
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
......
......@@ -4,12 +4,12 @@ LOCAL_DOCKER_REGISTRY_HOST=git.astron.nl:5000
LOCAL_DOCKER_REGISTRY_USER=lofar2.0/tango
TANGO_ARCHIVER_VERSION=2021-05-28
TANGO_CPP_VERSION=9.3.5
TANGO_DB_VERSION=10.4.11
TANGO_DSCONFIG_VERSION=1.5.1
TANGO_CPP_VERSION=9.3.9
TANGO_DB_VERSION=10.4.14
TANGO_DSCONFIG_VERSION=1.5.3
TANGO_HDBPP_VIEWER_VERSION=2021-05-28
TANGO_ITANGO_VERSION=9.3.7
TANGO_JAVA_VERSION=9.3.4
TANGO_POGO_VERSION=9.6.32
TANGO_REST_VERSION=1.14.2
TANGO_JAVA_VERSION=9.3.6
TANGO_POGO_VERSION=9.6.34
TANGO_REST_VERSION=1.14.6
TANGO_STARTER_VERSION=2021-05-28
version: '2'
services:
archiver-timescale:
image: timescaledb
build:
context: timescaledb
container_name: ${CONTAINER_NAME_PREFIX}archiver-timescale
networks:
- control
ports:
- "5432:5432/tcp"
depends_on:
- databaseds
environment:
- POSTGRES_PASSWORD=password
- TANGO_HOST=${TANGO_HOST}
logging:
driver: syslog
options:
syslog-address: udp://${LOG_HOSTNAME}:1514
syslog-format: rfc3164
tag: "{{.Name}}"
restart: unless-stopped
hdbppts-cm:
image: hdbppts-cm
build:
context: ../docker/tango/tango-archiver-ts
networks:
- control
container_name: ${CONTAINER_NAME_PREFIX}hdbppts-cm
depends_on:
- databaseds
- dsconfig
- archiver-timescale
environment:
- TANGO_HOST=${TANGO_HOST}
- HdbManager=archiving/hdbppts/confmanager01
command: >
/bin/bash -c "
wait-for-it.sh archiver-timescale:5432 --timeout=30 --strict --
wait-for-it.sh ${TANGO_HOST} --timeout=30 --strict --
hdbppcm-srv 02"
logging:
driver: syslog
options:
syslog-address: udp://${LOG_HOSTNAME}:1514
syslog-format: rfc3164
tag: "{{.Name}}"
hdbppts-es:
image: hdbppts-es
build:
context: ../docker/tango/tango-archiver-ts
networks:
- control
container_name: ${CONTAINER_NAME_PREFIX}hdbppts-es
depends_on:
- databaseds
- dsconfig
- archiver-timescale
environment:
- TANGO_HOST=${TANGO_HOST}
- HdbManager=archiving/hdbppts/confmanager01
command: >
/bin/bash -c "
wait-for-it.sh archiver-timescale:5432 --timeout=30 --strict --
wait-for-it.sh ${TANGO_HOST} --timeout=30 --strict --
hdbppes-srv 02"
logging:
driver: syslog
options:
syslog-address: udp://${LOG_HOSTNAME}:1514
syslog-format: rfc3164
tag: "{{.Name}}"
restart: unless-stopped
......@@ -24,7 +24,7 @@ services:
syslog-format: rfc3164
tag: "{{.Name}}"
restart: unless-stopped
hdbpp-es:
image: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-archiver:2021-05-28
networks:
......
FROM timescale/timescaledb:latest-pg12
COPY resources/01_admin.sql docker-entrypoint-initdb.d/002_admin.sql
COPY resources/02_hdb_schema.sql docker-entrypoint-initdb.d/003_hdb_schema.sql
COPY resources/03_hdb_roles.sql docker-entrypoint-initdb.d/004_hdb_roles.sql
COPY resources/04_hdb_ext_aggregates.sql docker-entrypoint-initdb.d/005_hdb_ext_aggregates.sql
COPY resources/05_lofar_views.sql docker-entrypoint-initdb.d/006_lofar_views.sql
COPY resources/06_cleanup.sql docker-entrypoint-initdb.d/007_cleanup.sql
# TimescaleDB Docker Image
The Dockerfile in this directory allows to create a container with a PostrgreSQL-Timescale DBMS (https://www.timescale.com/), and then initialise it with the DB schema required by the Tango Archiving framework.
The main image is pulled from the official PostgreSQL repository in the Docker Hub (https://hub.docker.com/_/postgres). This image offers several features to customize and extend itself.
## Initialization scripts
If you would like to do additional initialization in an image derived from the Postgres official one, add one or more *.sql, *.sql.gz, or *.sh scripts under /docker-entrypoint-initdb.d (creating the directory if necessary). After the entrypoint calls initdb to create the default postgres user and database, it will run any *.sql files, run any executable *.sh scripts, and source any non-executable *.sh scripts found in that directory to do further initialization before starting the service.
The script files in the directory /docker-entrypoint-initdb.d are sequentially executed following their preempted number in the filename. Hence, the first ones (000_install_timescaledb.sh and 001_timescaledb_tune.sh) are provided by default.
The next ones have been pulled from the official Tango repository in order to create the desired DB schema. These files are in the 'resources' directory and they have been pulled from Tango-Hdbpp_Timescale_Project (https://github.com/tango-controls-hdbpp/hdbpp-timescale-project/tree/master/resources/schema):
- admin.sql creates the admin user that will create the tables
- hdb_schema.sql creates the standard Tango Archiving DB (This is the only MANDATORY script)
- hdb_roles.sql creates additional roles
- hdb_ext_aggregates.sql creates the continuous aggregate views (https://docs.timescale.com/timescaledb/latest/how-to-guides/continuous-aggregates/)
- cleanup.sql strips the SUPERUSER trait from hdb_admin
Last updates on these scripts are dated to August 2021 (more info can be found at https://github.com/tango-controls-hdbpp/hdbpp-timescale-project/blob/master/doc/db-schema-config.md)
CREATE ROLE hdb_admin WITH LOGIN PASSWORD 'hdbpp';
ALTER USER hdb_admin CREATEDB;
ALTER USER hdb_admin CREATEROLE;
ALTER USER hdb_admin SUPERUSER;
This diff is collapsed.
-- -----------------------------------------------------------------------------
-- This file is part of the hdbpp-timescale-project
--
-- Copyright (C) : 2014-2019
-- European Synchrotron Radiation Facility
-- BP 220, Grenoble 38043, FRANCE
--
-- libhdb++timescale is free software: you can redistribute it and/or modify
-- it under the terms of the Lesser GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- libhdb++timescale is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the Lesser
-- GNU General Public License for more details.
--
-- You should have received a copy of the Lesser GNU General Public License
-- along with libhdb++timescale. If not, see <http://www.gnu.org/licenses/>.
-- -----------------------------------------------------------------------------
-- Setup roles to access the hdb database
CREATE ROLE readonly;
CREATE ROLE readwrite;
-- Permissions - readonly
GRANT CONNECT ON DATABASE hdb TO readonly;
GRANT USAGE ON SCHEMA public TO readonly;
GRANT SELECT ON ALL TABLES IN SCHEMA public TO readonly;
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT ON TABLES TO readonly;
-- Permissions - readwrite
GRANT CONNECT ON DATABASE hdb TO readwrite;
GRANT USAGE ON SCHEMA public TO readwrite;
GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA public TO readwrite;
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO readwrite;
GRANT USAGE ON ALL SEQUENCES IN SCHEMA public TO readwrite;
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT USAGE ON SEQUENCES TO readwrite;
GRANT ALL ON SCHEMA public TO readwrite;
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO readwrite;
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO readwrite;
This diff is collapsed.
-- LOFAR 2.0 CUSTOMIZED VIEWS
\c hdb
-- SDP FPGA Temperature
create or replace view "sdp_fpga_temp" as
select
ac.att_name as "attribute",
aad.data_time AS "time",
aad.value_r
FROM att_array_devdouble aad join att_conf ac
on aad.att_conf_id = ac.att_conf_id
where aad.value_R is not null
and ac."domain" ='stat' and ac."family" ='sdp' and ac."member" ='1'
ORDER BY aad.data_time;
-- SDP FPGA Mask
create or replace view "sdp_tr_fpga_mask" as
select
ac.att_name as "attribute",
aab.data_time AS "time",
aab.value_r
FROM att_array_devboolean aab join att_conf ac
on aab.att_conf_id = ac.att_conf_id
where aab.value_R is not null
and ac."domain" ='stat' and ac."family" ='sdp' and ac."member" ='1'
ORDER BY aab.data_time;
-- SDP Masked values (rounded to 1 second)
create or replace view "sdp_masked_temp_values" as
select time_bucket('1 second',t.time) as "temp_time",
time_bucket('1 second',m.time) as "mask_time",
t.value_r as "temperature",
m.value_r as "mask"
from sdp_fpga_temp as t
inner join sdp_tr_fpga_mask as m
on time_bucket('1 second',t.time) = time_bucket('1 second',m.time)
/* Replace if possible with SQL loop */
where m.value_r[1]=true and
m.value_r[2]=true and
m.value_r[3]=true and
m.value_r[4]=true and
m.value_r[5]=true and
m.value_r[6]=true and
m.value_r[7]=true and
m.value_r[8]=true and
m.value_r[9]=true and
m.value_r[10]=true and
m.value_r[11]=true and
m.value_r[12]=true and
m.value_r[13]=true and
m.value_r[14]=true and
m.value_r[15]=true and
m.value_r[16]=true
order by t."time" ;
ALTER USER hdb_admin NOSUPERUSER;
#ARG DOCKER_REGISTRY_USER
#ARG DOCKER_REGISTRY_HOST
#FROM ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}/tango-cpp:latest
FROM artefact.skao.int/ska-tango-images-tango-cpp:9.3.9
USER root
RUN apt-get update && \
apt-get install -y ca-certificates
RUN echo "deb http://deb.debian.org/debian buster-backports main contrib non-free" >> /etc/apt/sources.list && \
more /etc/apt/sources.list && \
apt-get update && \
apt-get install -y \
checkinstall \
git \
cmake \
make \
g++ \
libomniorb4-dev \
libzmq3-dev \
libcos4-dev \
mariadb-server \
libmariadb-dev-compat libmariadb-dev \
libmariadbclient-dev \
postgresql \
postgresql-contrib \
libpq5 \
libpqxx-6.2 \
libpq-dev \
libpqxx-dev
RUN git clone -b v2.0.0 https://github.com/Diego91RA/libhdbpp.git
RUN cd libhdbpp \
&& mkdir build \
&& cd build \
&& cmake .. -DCMAKE_INCLUDE_PATH=/usr/local/include/tango \
&& make -j4
RUN cd libhdbpp/build \
&& checkinstall \
--install=yes \
--fstrans=no \
--showinstall=no \
--backup=no \
--type=debian \
--pkgsource="https://github.com/tango-controls-hdbpp/libhdbpp" \
--pkglicense="LGPLv3" \
--deldesc=no \
--nodoc \
--strip \
--stripso \
--maintainer="tango" \
--pkgarch=$(dpkg --print-architecture) \
--pkgversion="2.0.0" \
--pkgrelease="SNAPSHOT" \
--pkgname="libhdbpp" \
--requires="libzmq5,libomniorb4-2,libcos4-2,libomnithread4" \
make install
RUN git clone -b master --recurse-submodules https://github.com/Diego91RA/libhdbpp-timescale.git
RUN cd libhdbpp-timescale && git checkout 85c03657d4518e876869b322d6a50c46875a1300
RUN cd libhdbpp-timescale \
&& mkdir -p build \
&& cd build \
&& cmake .. -DCMAKE_PREFIX_PATH=/usr/local/include/tango -DPostgreSQL_TYPE_INCLUDE_DIR=/usr/local/include/postgresql \
&& make -j4
RUN cd libhdbpp-timescale/build \
&& checkinstall \
--install=yes \
--fstrans=no \
--showinstall=no \
--backup=no \
--type=debian \
--pkgsource="https://github.com/tango-controls-hdbpp/libhdbpp-timescale" \
--pkglicense="LGPLv3" \
--deldesc=no \
--nodoc \
--strip \
--stripso \
--maintainer="tango" \
--pkgarch=$(dpkg --print-architecture) \
--pkgversion="2.0.0" \
--pkgrelease="SNAPSHOT" \
--pkgname="libhdbpp-timescale" \
--requires="libpq5" \
make install
RUN git clone -b v2.0.0 https://github.com/Diego91RA/hdbpp-cm.git
RUN cd hdbpp-cm \
&& mkdir -p build \
&& cd build \
&& cmake .. -DCMAKE_PREFIX_PATH=/usr/local/include/tango \
&& make -j4
RUN cd hdbpp-cm/build \
&& checkinstall \
--install=yes \
--fstrans=no \
--showinstall=no \
--backup=no \
--type=debian \
--pkgsource="https://github.com/tango-controls-hdbpp/hdbpp-cm" \
--pkglicense="GPLv3" \
--deldesc=no \
--nodoc \
--strip \
--stripso \
--maintainer="tango" \
--pkgarch=$(dpkg --print-architecture) \
--pkgversion="2.0.0" \
--pkgrelease="SNAPSHOT" \
--pkgname="hdbpp-cm" \
--requires="libzmq5,libomniorb4-2,libcos4-2,libomnithread4" \
make install
RUN git clone -b master https://github.com/Diego91RA/hdbpp-es.git
RUN cd hdbpp-es && git checkout 473cbfbd7af76851bbf6ca2fcf5e4880e9f8e437
RUN cd hdbpp-es \
&& mkdir -p build \
&& cd build \
&& cmake .. -DCMAKE_PREFIX_PATH=/usr/local/include/tango -DFETCH_LIBHDBPP=OFF -DLIBHDBPP_BACKEND=timescale -DPostgreSQL_TYPE_INCLUDE_DIR=/usr/local/include/postgresql \
&& make -j4
RUN cd hdbpp-es/build \
&& checkinstall \
--install=yes \
--fstrans=no \
--showinstall=no \
--backup=no \
--type=debian \
--pkgsource="https://github.com/tango-controls-hdbpp/hdbpp-es" \
--pkglicense="GPLv3" \
--deldesc=no \
--nodoc \
--strip \
--stripso \
--maintainer="tango" \
--pkgarch=$(dpkg --print-architecture) \
--pkgversion="2.0.0" \
--pkgrelease="SNAPSHOT" \
--pkgname="hdbpp-es" \
--requires="libzmq5,libomniorb4-2,libcos4-2,libomnithread4" \
make install
RUN apt-get update && \
apt-get install -y \
build-essential && \
apt-get clean
RUN dpkg -i /libhdbpp/build/libhdbpp_2.0.0-SNAPSHOT_amd64.deb
RUN dpkg -i /libhdbpp-timescale/build/libhdbpp-timescale_2.0.0-SNAPSHOT_amd64.deb
RUN dpkg -i /hdbpp-cm/build/hdbpp-cm_2.0.0-SNAPSHOT_amd64.deb
RUN dpkg -i /hdbpp-es/build/hdbpp-es_2.0.0-SNAPSHOT_amd64.deb
RUN ldconfig
RUN mv /usr/local/bin/hdb++cm-srv /usr/local/bin/hdbppcm-srv
RUN mv /usr/local/bin/hdb++es-srv /usr/local/bin/hdbppes-srv
......@@ -14,6 +14,19 @@
}
}
},
"02": {
"HdbEventSubscriber": {
"archiving/hdbppts/eventsubscriber01": {
"attribute_properties": {},
"properties": {
"CheckPeriodicTimeoutDelay": ["5"],
"PollingThreadPeriod": ["3"],
"LibConfiguration": ["connect_string= user=postgres password=password host=archiver-timescale port=5432 dbname=hdb","host=archiver-timescale","libname=libhdb++timescale.so","dbname=hdb","port=5432", "user=postgres", "password=password"],
"polled_attr": []
}
}
}
},
"03": {
"HdbEventSubscriber": {
"archiving/hdbpp/eventsubscriber02": {
......@@ -41,6 +54,19 @@
}
}
}
},
"02": {
"HdbConfigurationManager": {
"archiving/hdbppts/confmanager01": {
"attribute_properties": {},
"properties": {
"ArchiverList": ["archiving/hdbppts/eventsubscriber01"],
"MaxSearchSize": ["1000"],
"LibConfiguration": ["connect_string= user=postgres password=password host=archiver-timescale port=5432 dbname=hdb","host=archiver-timescale","libname=libhdb++timescale.so","dbname=hdb","port=5432", "user=postgres", "password=password"],
"polled_attr": []
}
}
}
}
}
}
......
......@@ -58,6 +58,9 @@ class OPCUAConnection(AsyncCommClient):
# prefix path to all nodes with this. this allows the user to switch trees more easily.
self.node_path_prefix = []
# cache of looked up child node lists for each comma-separated parent path
self._node_cache = {}
super().__init__(fault_func, event_loop)
def _servername(self):
......@@ -130,13 +133,41 @@ class OPCUAConnection(AsyncCommClient):
return path
async def get_node(self, path):
""" Retrieve an OPC-UA node from either the cache, or the server. """
if not path:
return self.obj
cache_key = ",".join(path)
# lookup in cache
if cache_key in self._node_cache:
return self._node_cache[cache_key]
# cache it and all of its siblings to save us the round trips for them later on.
parent_path = path[:-1]
parent_node = await self.obj.get_child(parent_path) if parent_path else self.obj
child_nodes = await parent_node.get_children_descriptions()
for child_node in child_nodes:
# add node to the cache
child_path = parent_path + [f"{self.name_space_index}:{child_node.DisplayName.Text}"]
self._node_cache[",".join(child_path)] = self.client.get_node(child_node.NodeId)
# lookup in cache again. if the name is valid, it should be in there.
if cache_key in self._node_cache:
return self._node_cache[cache_key]
# we couldnt find the requested child, ask server directly to get the appropriate error
return await self.obj.get_child(path)
async def setup_protocol_attribute(self, annotation, attribute):
# process the annotation
path = self.get_node_path(annotation)
try:
node = await self.obj.get_child(path)
node = await self.get_node(path)
except Exception as e:
logger.exception("Could not get node: %s on server %s", path, self._servername())
raise Exception("Could not get node: %s on server %s", path, self._servername()) from e
......@@ -180,7 +211,7 @@ class OPCUAConnection(AsyncCommClient):
try:
# call method in its parent node
node = await self.obj.get_child(method_path[:-1]) if len(method_path) > 1 else self.obj
node = await self.get_node(method_path[:-1])
result = await node.call_method(method_path[-1], *args)
except Exception as e:
raise Exception(f"Calling method {method_path} failed") from e
......
......@@ -161,6 +161,10 @@ class SDP(opcua_device):
FPGA_bsn_monitor_input_nof_valid_R = attribute_wrapper(comms_annotation=["FPGA_bsn_monitor_input_nof_valid_R"], datatype=numpy.int32, dims=(N_pn,))
FPGA_bsn_monitor_input_nof_err_R = attribute_wrapper(comms_annotation=["FPGA_bsn_monitor_input_nof_err_R"], datatype=numpy.int32, dims=(N_pn,))
FPGA_signal_input_samples_delay_R = attribute_wrapper(comms_annotation=["FPGA_signal_input_samples_delay_R"], datatype=numpy.uint32, dims=(S_pn, N_pn))
FPGA_signal_input_samples_delay_RW = attribute_wrapper(comms_annotation=["FPGA_signal_input_samples_delay_RW"], datatype=numpy.uint32, dims=(S_pn, N_pn), access=AttrWriteType.READ_WRITE)
# --------
# overloaded functions
# --------
......
......@@ -86,6 +86,7 @@ class TestOPCua(base.AsyncTestCase):
m_opc_client_members.send_hello = asynctest.asynctest.CoroutineMock()
m_objects_node = asynctest.Mock()
m_objects_node.get_child = asynctest.asynctest.CoroutineMock()
m_objects_node.get_children_descriptions = asynctest.asynctest.CoroutineMock()
m_opc_client_members.get_objects_node = asynctest.Mock(return_value=m_objects_node)
m_opc_client.return_value = m_opc_client_members
......@@ -108,7 +109,7 @@ class TestOPCua(base.AsyncTestCase):
m_attribute = mock_attr(i.numpy_type, dim_x, dim_y)
# pretend like there is a running OPCua server with a node that has this name
m_annotation = ["2:PCC", f"2:testNode_{str(i.numpy_type)}_{str(dim_x)}_{str(dim_y)}"]
m_annotation = [f"2:testNode_{str(i.numpy_type)}_{str(dim_x)}_{str(dim_y)}"]
test_client = OPCUAConnection("opc.tcp://localhost:4874/freeopcua/server/", "http://lofar.eu", 5, mock.Mock(), self.loop)
try:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment