diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 4f376e71b9b06dcca9451962aafb178efb7f8368..aeb55ae73e83cf70c385418425cb68d5dfdef7d5 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -139,10 +139,6 @@ docker_build_image_all:
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-bst latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-sst latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-xst latest
-    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh archiver-timescale latest
-    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh hdbpp latest
-    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh hdbppts-cm latest
-    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh hdbppts-es latest
 
 # Build and push custom images on merge request if relevant files changed
 docker_build_image_lofar_device_base:
@@ -530,50 +526,6 @@ docker_build_image_device_temperature_manager:
   script:
     #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-temperature-manager $tag
-docker_build_image_archiver_timescale:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
-      - docker-compose/archiver-timescale.yml
-      - docker-compose/timescaledb/*
-  script:
-    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
-    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh archiver-timescale $tag
-docker_build_image_hdbpp:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
-      - docker-compose/archiver-timescale.yml
-      - docker-compose/hdbpp/*
-  script:
-    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
-    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh hdbpp $tag
-docker_build_image_hdbppts_cm:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
-      - docker-compose/archiver-timescale.yml
-      - docker-compose/hdbppts-cm/*
-  script:
-    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
-    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh hdbppts-cm $tag
-docker_build_image_hdbppts_es:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
-      - docker-compose/archiver-timescale.yml
-      - docker-compose/hdbppts-es/*
-  script:
-    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
-    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh hdbppts-es $tag
 newline_at_eof:
   stage: linting
   before_script:
@@ -699,7 +651,6 @@ integration_test_docker:
         echo "Saving log for container $container"
         docker logs "${container}" >& "log/${container}.log"
       done
-      PGPASSWORD=password pg_dump --host=docker --username=postgres hdb 2>log/archiver-timescale-dump.log | gzip > log/archiver-timescale-dump.txt.gz
   artifacts:
     when: always
     paths:
diff --git a/CDB/README.md b/CDB/README.md
index 52a0b14ada80973a6b1dfc28eb217f832a384119..4ad24de4f8425a304082d41801dd3fdecab0d6b0 100644
--- a/CDB/README.md
+++ b/CDB/README.md
@@ -12,7 +12,6 @@ The following files are provided:
 | File                                       | Description                                                 | Usage               |
 |--------------------------------------------|-------------------------------------------------------------|---------------------|
 | `LOFAR_ConfigDb.json`                      | Generic base configuration, registering all of the devices. | Always              |
-| `tango-archiver-data/archiver-devices.json`| Archiver configuration for TimescaleDB                      | Always              |
 | `test_environment_ConfigDb.json`           | Base delta for the unit- and integration test suites.       | Tests & development |
 | `stations/simulators_ConfigDb.json`        | A "station" configuration that points to our simulators.    | Tests & development |
 | `stations/dummy_positions_ConfigDb.json`   | An antenna configuration, just to have one (it's CS001).    | Tests & development |
diff --git a/CDB/tango-archiver-data/archiver-devices.json b/CDB/tango-archiver-data/archiver-devices.json
deleted file mode 100644
index f726543c6916327352719fa50732988533b6d3fd..0000000000000000000000000000000000000000
--- a/CDB/tango-archiver-data/archiver-devices.json
+++ /dev/null
@@ -1,34 +0,0 @@
-{
-    "servers": {
-        "hdbppes-srv": {
-            "01": {
-                "HdbEventSubscriber": {
-                    "archiving/hdbppts/eventsubscriber01": {
-                        "attribute_properties": {},
-                        "properties": {
-                            "CheckPeriodicTimeoutDelay": ["5"],
-                            "PollingThreadPeriod": ["3"],
-                            "LibConfiguration": ["connect_string= user=postgres password=password host=archiver-timescale port=5432 dbname=hdb","host=archiver-timescale","libname=libhdb++timescale.so","dbname=hdb","port=5432", "user=postgres", "password=password"],
-                            "polled_attr": []
-                        }
-                    }
-                }
-            }
-        },
-        "hdbppcm-srv": {
-            "01": {
-                "HdbConfigurationManager": {
-                    "archiving/hdbppts/confmanager01": {
-                        "attribute_properties": {},
-                        "properties": {
-                            "ArchiverList": ["archiving/hdbppts/eventsubscriber01"],
-                            "MaxSearchSize": ["1000"],
-                            "LibConfiguration": ["connect_string= user=postgres password=password host=archiver-timescale port=5432 dbname=hdb","host=archiver-timescale","libname=libhdb++timescale.so","dbname=hdb","port=5432", "user=postgres", "password=password"],
-                            "polled_attr": []
-                        }
-                    }
-                }
-            }
-        }
-    }
-}
diff --git a/README.md b/README.md
index 99c756984e8dfe60fd5ae159dd6a04898163e469..74e24faa1cb855ddcf36d27a1e824e7327f8e2d7 100644
--- a/README.md
+++ b/README.md
@@ -16,7 +16,6 @@ Station Control software related to Tango devices. \
     * [Bootstrap](#bootstrap)
 * [User documentation (ReadTheDocs (Sphinx / ReStructuredText))](tangostationcontrol/docs/README.md)
 * [Docker compose & station services documentation](docker-compose/README.md)
-    * [Timescaledb](docker-compose/timescaledb/README.md)
     * [Jupyter startup files](docker-compose/jupyterlab/ipython-profiles/stationcontrol-jupyter/startup/README.md)
     * [Tango Prometheus exporter](https://git.astron.nl/lofar2.0/ska-tango-grafana-exporter)
 * [Developer Documentation](#development)
@@ -24,7 +23,6 @@ Station Control software related to Tango devices. \
     * [Versioning](#versioning)
 * Source code documentation
     * [Attribute wrapper documentation](tangostationcontrol/tangostationcontrol/clients/README.md)
-    * [Archiver documentation](tangostationcontrol/tangostationcontrol/toolkit/README.md)
     * [Adding a new tango device](tangostationcontrol/tangostationcontrol/devices/README.md)
     * [HDF5 statistics](tangostationcontrol/tangostationcontrol/statistics/README.md)
 * [Unit tests](tangostationcontrol/tangostationcontrol/test/README.md)
@@ -117,6 +115,7 @@ Next change the version in the following places:
 
 # Release Notes
 
+* 0.13.0 Remove all `archiver-timescale`, `hdbppts-cm`, `hdbppts-es` functionalities
 * 0.12.1 Add `AbstractHierarchy` and `AbstractHierarchyDevice` classes and
          functionality
 * 0.12.0 Add `Calibration_SDP_Subband_Weights_<XXX>MHz_R` attributes to implement HDF5 calibration tables
diff --git a/docker-compose/.env b/docker-compose/.env
index cf3bb93f44613f5716a64ba7b167cc952a3d921c..9fa405a50f3d8185f628bfd44ed2573c9ab6eb3f 100644
--- a/docker-compose/.env
+++ b/docker-compose/.env
@@ -15,9 +15,6 @@ TANGO_POGO_VERSION=9.6.35
 TANGO_REST_VERSION=1.14.7
 TANGO_STARTER_VERSION=2021-05-28
 
-PG_TIMESCALEDB_VERSION=latest-pg12
-PG_SUPERUSER_PASSWORD=password
-PG_HDB_PASSWORD=hdbpp
 MYSQL_ROOT_PASSWORD=secret
 MYSQL_PASSWORD=tango
 
diff --git a/docker-compose/Makefile b/docker-compose/Makefile
index 9ce582c3b1f62e01d83ea0b42b38fe5f6f65d228..37b2315b436d55c2ce10efe64198add8dcb0d9b2 100644
--- a/docker-compose/Makefile
+++ b/docker-compose/Makefile
@@ -206,7 +206,6 @@ bootstrap: pull build # first start, initialise from scratch
 	$(MAKE) start dsconfig # boot up containers to load configurations
 	sleep 5 # wait for dsconfig container to come up
 	../sbin/update_ConfigDb.sh ../CDB/LOFAR_ConfigDb.json # load default configuration
-	../sbin/update_ConfigDb.sh ../CDB/tango-archiver-data/archiver-devices.json # load default archive configuration
 	../sbin/update_ConfigDb.sh ../CDB/stations/simulators_ConfigDb.json # by default, use simulators
 
 start: up ## start a service (usage: make start <servicename>)
diff --git a/docker-compose/README.md b/docker-compose/README.md
index 964f08cca268e049b6173efdc1ebb898f358efd3..aa29afbe860399ced7694db1c65a61409995cdda 100644
--- a/docker-compose/README.md
+++ b/docker-compose/README.md
@@ -38,11 +38,6 @@ are used in production.
 - Services
   - databases
     - dsconfig
-    - [timescaledb](timescaledb/README.md)
-      - archiver-timescale
-      - hbdpp
-      - hbdpp-cm (ConfigurationManager)
-      - hbdpp-es (EventSubscriber)
     - prometheus
   - webservers / user interfaces
     - jupyterlab
@@ -78,31 +73,6 @@ registry and uploaded to our own using matching tags.
 
 Services, same mechanism as devices.
 
-### HDB++ image updates
-
-The hdbpp Docker image is used as a base image for the `hdbppts-cm`
-(ConfigurationManager) and `hdbppts-es` (EventSubscriber)
-images. If one is developing on a branch and any updates is made
-in hdbpp/Dockerfile, those won't be automatically picked up from `hdbppts-cm`
-and `hdbppts-es`, because the argument `SOURCE_IMAGE` in the docker-compose
-yml file always refers to the remote `hdbpp` image in the repository.
-
-A temporary workaround for locally testing on these archiving containers
-is the following:
-
-- Stop and remove any running `hdbpp*` container
-- In the archiver-timescale.yml file, replace the `hdbppts-cm` and `hdbppts-es`
-  `SOURCE_IMAGE` tag 'latest' with the branch name
-  (e.g. `SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/hdbpp:l2ss-new-branch`)
-- Rebuild all the hdbpp* container (`make build hdbpp hdbppts-cm hdbppts-es`),
-  and then start them (`make start hdbpp hdbppts-cm hdbppts-es`)
-- Test the new features
-
-After the branch has been correctly developed, tested, the merge has been
-approved, and the new images have been built on the repository:
-- Put back 'latest' tag on the `archiver-timescale.yml` file, replacing the branch name
-- Merge on master
-
 ## Gitlab CI/CD
 
 1. [Image tagging and change detection](#image-tagging-and-change-detection)
diff --git a/docker-compose/archiver-timescale.yml b/docker-compose/archiver-timescale.yml
deleted file mode 100644
index 4626e5863564f049bd03273ae4505acba67b2882..0000000000000000000000000000000000000000
--- a/docker-compose/archiver-timescale.yml
+++ /dev/null
@@ -1,123 +0,0 @@
-# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
-# SPDX-License-Identifier: Apache-2.0
-
-version: '2.1'
-
-volumes:
-  archiver-timescale-data: { }
-
-services:
-  archiver-timescale:
-    image: timescaledb
-    build:
-      context: timescaledb
-      args:
-        SOURCE_IMAGE: timescale/timescaledb:${PG_TIMESCALEDB_VERSION}
-    container_name: archiver-timescale
-    networks:
-      - control
-    ports:
-      - "5432:5432/tcp"
-    extra_hosts:
-      - "host.docker.internal:host-gateway"
-    volumes:
-      - archiver-timescale-data:/var/lib/postgresql/data
-    depends_on:
-      - databaseds
-    environment:
-      - POSTGRES_PASSWORD=${PG_SUPERUSER_PASSWORD}
-      - PG_HDB_PASSWORD=${PG_HDB_PASSWORD}
-      - TANGO_HOST=${TANGO_HOST}
-    healthcheck:
-      test: nc -z -v localhost 5432
-      interval: 1m
-      timeout: 30s
-      retries: 3
-      start_period: 30s
-    logging:
-      driver: syslog
-      options:
-        syslog-address: udp://${LOG_HOSTNAME}:1514
-        syslog-format: rfc3164
-        tag: "{{.Name}}"
-    restart: unless-stopped
-
-  hdbpp:
-    image: hdbpp
-    build:
-      context: hdbpp
-      args:
-        SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-cpp:${TANGO_CPP_VERSION}
-    container_name: hdbpp
-    networks:
-      - control
-    depends_on:
-      - databaseds
-      - dsconfig
-      - archiver-timescale
-    extra_hosts:
-      - "host.docker.internal:host-gateway"
-    environment:
-      - TANGO_HOST=${TANGO_HOST}
-
-  hdbppts-cm:
-    image: hdbppts-cm
-    build:
-      context: hdbppts-cm
-      args:
-        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/hdbpp:latest
-    container_name: hdbppts-cm
-    networks:
-      - control
-    depends_on:
-      - databaseds
-      - dsconfig
-      - archiver-timescale
-    extra_hosts:
-      - "host.docker.internal:host-gateway"
-    environment:
-      - TANGO_HOST=${TANGO_HOST}
-      - HdbManager=archiving/hdbppts/confmanager01
-    command: >
-      /bin/bash -c "
-      wait-for-it.sh archiver-timescale:5432 --timeout=30 --strict --
-      wait-for-it.sh ${TANGO_HOST} --timeout=30 --strict --
-            hdbppcm-srv 01"
-    logging:
-      driver: syslog
-      options:
-        syslog-address: udp://${LOG_HOSTNAME}:1514
-        syslog-format: rfc3164
-        tag: "{{.Name}}"
-
-  hdbppts-es:
-    image: hdbppts-es
-    build:
-      context: hdbppts-es
-      args:
-        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/hdbpp:latest
-    container_name: hdbppts-es
-    networks:
-      - control
-    depends_on:
-      - hdbppts-cm
-      - databaseds
-      - dsconfig
-      - archiver-timescale
-    extra_hosts:
-      - "host.docker.internal:host-gateway"
-    environment:
-      - TANGO_HOST=${TANGO_HOST}
-      - HdbManager=archiving/hdbppts/confmanager01
-    command: >
-      /bin/bash -c "
-      wait-for-it.sh archiver-timescale:5432 --timeout=30 --strict --
-      wait-for-it.sh ${TANGO_HOST} --timeout=30 --strict --
-            hdbppes-srv 01"
-    logging:
-      driver: syslog
-      options:
-        syslog-address: udp://${LOG_HOSTNAME}:1514
-        syslog-format: rfc3164
-        tag: "{{.Name}}"
-    restart: unless-stopped
diff --git a/docker-compose/grafana/datasources/archiver-timescale.yaml b/docker-compose/grafana/datasources/archiver-timescale.yaml
deleted file mode 100644
index af3a3abdebd6f0e9c9a15edca39dd2f53ee76d27..0000000000000000000000000000000000000000
--- a/docker-compose/grafana/datasources/archiver-timescale.yaml
+++ /dev/null
@@ -1,45 +0,0 @@
-apiVersion: 1
-
-datasources:
-  # <string, required> name of the datasource. Required
-  - name: TimescaleDB
-    # <string, required> datasource type. Required
-    type: postgres
-    # <string, required> access mode. proxy or direct (Server or Browser in the UI). Required
-    access: proxy
-    # <int> org id. will default to orgId 1 if not specified
-    orgId: 1
-    # <string> custom UID which can be used to reference this datasource in other parts of the configuration, if not specified will be generated automatically
-    uid: timescaledb
-    # <string> url
-    url: archiver-timescale
-    # <string> Deprecated, use secureJsonData.password
-    password:
-    # <string> database user, if used
-    user: postgres
-    # <string> database name, if used
-    database: hdb
-    # <bool> enable/disable basic auth
-    basicAuth: false
-    # <string> basic auth username
-    basicAuthUser:
-    # <string> Deprecated, use secureJsonData.basicAuthPassword
-    basicAuthPassword:
-    # <bool> enable/disable with credentials headers
-    withCredentials:
-    # <bool> mark as default datasource. Max one per org
-    isDefault: false
-    # <map> fields that will be converted to json and stored in jsonData
-    jsonData:
-      # <string> determines whether or with what priority a secure TLS/SSL TCP/IP connection will be negotiated with the server.
-      sslmode: "disable"
-      # <bool> enable TimescaleDB
-      timescaledb: true
-    # <string> json object of data that will be encrypted.
-    secureJsonData:
-      # <string> database password, if used
-      password: password
-    version: 1
-    # <bool> allow users to edit datasources from the UI.
-    editable: false
-    
diff --git a/docker-compose/hdbpp/Dockerfile b/docker-compose/hdbpp/Dockerfile
deleted file mode 100644
index 16fe7d772997d3911015d1816aa5be375c95573c..0000000000000000000000000000000000000000
--- a/docker-compose/hdbpp/Dockerfile
+++ /dev/null
@@ -1,104 +0,0 @@
-ARG SOURCE_IMAGE
-FROM ${SOURCE_IMAGE}
-
-USER root
-
-RUN apt-get update && \
-    apt-get install -y ca-certificates
-
-RUN echo "deb http://deb.debian.org/debian buster-backports main contrib non-free" >> /etc/apt/sources.list && \
-    more /etc/apt/sources.list && \
-    apt-get update && \
-    apt-get install -y \
-        checkinstall \
-        git \
-        cmake \
-        make \
-        g++ \
-        libomniorb4-dev \
-        libzmq3-dev \
-        libcos4-dev \
-        mariadb-server \
-        libmariadb-dev-compat libmariadb-dev \
-        libmariadbclient-dev \
-        postgresql \
-        postgresql-contrib \
-        libpq5 \
-        libpqxx-6.2 \
-        libpq-dev \
-        libpqxx-dev       
-
-
-# ----------- LIBHDB++ -----------------------------
-
-RUN git clone https://gitlab.com/tango-controls/hdbpp/libhdbpp.git
-
-RUN cd libhdbpp \
- && mkdir build \
- && cd build \
- && cmake .. -DCMAKE_INCLUDE_PATH=/usr/local/include/tango \
- && make -j4
-
-RUN cd libhdbpp/build \
- && checkinstall \
-    --install=yes \
-    --fstrans=no \
-    --showinstall=no \
-    --backup=no \
-    --type=debian \
-    --pkgsource="https://gitlab.com/tango-controls/hdbpp/libhdbpp" \
-    --pkglicense="LGPLv3" \
-    --deldesc=no \
-    --nodoc \
-    --strip \
-    --stripso \
-    --maintainer="tango" \
-    --pkgarch=$(dpkg --print-architecture) \
-    --pkgversion="2.0.0" \
-    --pkgrelease="SNAPSHOT" \
-    --pkgname="libhdbpp" \
-    --requires="libzmq5,libomniorb4-2,libcos4-2,libomnithread4" \
-    make install
-
-
-# ----------- LIBHDB++ TIMESCALE ---------------
-
-#RUN git clone -b image_support --recurse-submodules https://github.com/tango-controls-hdbpp/libhdbpp-timescale.git
-RUN git clone -b image_support_lofar_fixes --recurse-submodules https://git.astron.nl/lofar2.0/libhdbpp-timescale.git
-
-RUN cd libhdbpp-timescale \
- && mkdir -p build \
- && cd build \
- && cmake .. -DCMAKE_PREFIX_PATH=/usr/local/include/tango -DPostgreSQL_TYPE_INCLUDE_DIR=/usr/local/include/postgresql \
- && make -j4
- 
-RUN cd libhdbpp-timescale/build \
- && checkinstall \
-    --install=yes \
-    --fstrans=no \
-    --showinstall=no \
-    --backup=no \
-    --type=debian \
-    --pkgsource="https://github.com/tango-controls-hdbpp/libhdbpp-timescale" \
-    --pkglicense="LGPLv3" \
-    --deldesc=no \
-    --nodoc \
-    --strip \
-    --stripso \
-    --maintainer="tango" \
-    --pkgarch=$(dpkg --print-architecture) \
-    --pkgversion="2.0.0" \
-    --pkgrelease="SNAPSHOT" \
-    --pkgname="libhdbpp-timescale" \
-    --requires="libpq5" \
-    make install  
-
-RUN apt-get update && \
-    apt-get install -y \
-    build-essential && \
-    apt-get clean
-
-RUN dpkg -i /libhdbpp/build/libhdbpp_2.0.0-SNAPSHOT_amd64.deb
-RUN dpkg -i /libhdbpp-timescale/build/libhdbpp-timescale_2.0.0-SNAPSHOT_amd64.deb
-
-RUN ldconfig
diff --git a/docker-compose/hdbppts-cm/Dockerfile b/docker-compose/hdbppts-cm/Dockerfile
deleted file mode 100644
index 71afb54d57d0c560c902e5b9ec588d8bb9463c56..0000000000000000000000000000000000000000
--- a/docker-compose/hdbppts-cm/Dockerfile
+++ /dev/null
@@ -1,46 +0,0 @@
-ARG SOURCE_IMAGE
-FROM ${SOURCE_IMAGE}
-
-# ----------- HDB++ CONFIGURATION MANAGER ---------------
-
-RUN git clone https://gitlab.com/tango-controls/hdbpp/hdbpp-cm.git
-
-RUN cd hdbpp-cm \
- && mkdir -p build \
- && cd build \
- && cmake .. -DCMAKE_PREFIX_PATH=/usr/local/include/tango \
- && make -j4
-
-RUN cd hdbpp-cm/build \
- && checkinstall \
-    --install=yes \
-    --fstrans=no \
-    --showinstall=no \
-    --backup=no \
-    --type=debian \
-    --pkgsource="https://gitlab.com/tango-controls/hdbpp/hdbpp-cm" \
-    --pkglicense="GPLv3" \
-    --deldesc=no \
-    --nodoc \
-    --strip \
-    --stripso \
-    --maintainer="tango" \
-    --pkgarch=$(dpkg --print-architecture) \
-    --pkgversion="2.0.0" \
-    --pkgrelease="SNAPSHOT" \
-    --pkgname="hdbpp-cm" \
-    --requires="libzmq5,libomniorb4-2,libcos4-2,libomnithread4" \
-    make install
-
-RUN apt-get update && \
-    apt-get install -y \
-    build-essential && \
-    apt-get clean
-
-RUN dpkg -i /libhdbpp/build/libhdbpp_2.0.0-SNAPSHOT_amd64.deb
-RUN dpkg -i /libhdbpp-timescale/build/libhdbpp-timescale_2.0.0-SNAPSHOT_amd64.deb
-RUN dpkg -i /hdbpp-cm/build/hdbpp-cm_2.0.0-SNAPSHOT_amd64.deb
-
-RUN ldconfig
-
-RUN mv /usr/local/bin/hdb++cm-srv /usr/local/bin/hdbppcm-srv
diff --git a/docker-compose/hdbppts-es/Dockerfile b/docker-compose/hdbppts-es/Dockerfile
deleted file mode 100644
index 231ecb3d7c4da3979628b54ef04848aab212cfd2..0000000000000000000000000000000000000000
--- a/docker-compose/hdbppts-es/Dockerfile
+++ /dev/null
@@ -1,46 +0,0 @@
-ARG SOURCE_IMAGE
-FROM ${SOURCE_IMAGE}
-
-# ----------- HDB++ EVENT SUBSCRIBER ---------------
-
-RUN git clone https://gitlab.com/tango-controls/hdbpp/hdbpp-es.git
-
-RUN cd hdbpp-es \
- && mkdir -p build \
- && cd build \
- && cmake .. -DCMAKE_PREFIX_PATH=/usr/local/include/tango -DFETCH_LIBHDBPP=OFF -DLIBHDBPP_BACKEND=timescale -DPostgreSQL_TYPE_INCLUDE_DIR=/usr/local/include/postgresql \
- && make -j4 
- 
-RUN cd hdbpp-es/build \
- && checkinstall \
-    --install=yes \
-    --fstrans=no \
-    --showinstall=no \
-    --backup=no \
-    --type=debian \
-    --pkgsource="https://gitlab.com/tango-controls/hdbpp/hdbpp-es" \
-    --pkglicense="GPLv3" \
-    --deldesc=no \
-    --nodoc \
-    --strip \
-    --stripso \
-    --maintainer="tango" \
-    --pkgarch=$(dpkg --print-architecture) \
-    --pkgversion="2.0.0" \
-    --pkgrelease="SNAPSHOT" \
-    --pkgname="hdbpp-es" \
-    --requires="libzmq5,libomniorb4-2,libcos4-2,libomnithread4" \
-    make install
-
-RUN apt-get update && \
-    apt-get install -y \
-    build-essential && \
-    apt-get clean
-
-RUN dpkg -i /libhdbpp/build/libhdbpp_2.0.0-SNAPSHOT_amd64.deb
-RUN dpkg -i /libhdbpp-timescale/build/libhdbpp-timescale_2.0.0-SNAPSHOT_amd64.deb
-RUN dpkg -i /hdbpp-es/build/hdbpp-es_2.0.0-SNAPSHOT_amd64.deb
-
-RUN ldconfig
-
-RUN mv /usr/local/bin/hdb++es-srv /usr/local/bin/hdbppes-srv
diff --git a/docker-compose/jupyterlab/requirements.txt b/docker-compose/jupyterlab/requirements.txt
index 52a55ba3aa9987585fe130a34271d9ef840b946e..b84764bf482f2b1ab5d9cf6ddc7208319e69d2c0 100644
--- a/docker-compose/jupyterlab/requirements.txt
+++ b/docker-compose/jupyterlab/requirements.txt
@@ -11,7 +11,6 @@ notebook-as-pdf
 python-logstash-async
 PyMySQL[rsa]
 psycopg2-binary >= 2.9.2 #LGPL
-sqlalchemy
 pyvisa
 pyvisa-py
 opcua
diff --git a/docker-compose/logstash/loki.conf b/docker-compose/logstash/loki.conf
index 0423b33144dbe6d61fb22e583c9385ce99a7dd90..b0b22e26996f82f5dcdb9561183090abb131dbad 100644
--- a/docker-compose/logstash/loki.conf
+++ b/docker-compose/logstash/loki.conf
@@ -97,16 +97,16 @@ filter {
 }
 
 filter {
-  # mark all our mariadb instances
+  # mark our tangodb instances
   grok {
     match => {
-      "program" => [ "archiver-maria-db", "tangodb" ]
+      "program" => ["tangodb" ]
     }
-    add_tag => [ "mariadb" ]
+    add_tag => [ "tangodb" ]
   }
 
-  # parse mariadb output
-  if "mariadb" in [tags] {
+  # parse tangodb output
+  if "tangodb" in [tags] {
     grok {
       match => {
         "message" => [
diff --git a/docker-compose/timescaledb/Dockerfile b/docker-compose/timescaledb/Dockerfile
deleted file mode 100644
index 73932cadd73c5bc0f2ac6d4b0d72d98bcb979d25..0000000000000000000000000000000000000000
--- a/docker-compose/timescaledb/Dockerfile
+++ /dev/null
@@ -1,27 +0,0 @@
-ARG SOURCE_IMAGE
-FROM ${SOURCE_IMAGE}
-
-# Set admin role to perform DB creation
-COPY resources/01_admin.sh docker-entrypoint-initdb.d/002_admin.sh
-# Create DB schema (tables, indexes, etc.)
-COPY resources/02_hdb_schema.sql docker-entrypoint-initdb.d/003_hdb_schema.sql
-COPY resources/03_hdb_images.sql docker-entrypoint-initdb.d/004_hdb_images.sql
-# Create DB roles
-COPY resources/04_hdb_roles.sql docker-entrypoint-initdb.d/005_hdb_roles.sql
-# Create further roles
-COPY resources/05_hdb_ext_users.sql docker-entrypoint-initdb.d/006_hdb_ext_users.sql
-# Add further functions
-COPY resources/06_hdb_ext_import.sql docker-entrypoint-initdb.d/007_hdb_ext_import.sql
-# Create timescaledb aggregates
-COPY resources/07_hdb_ext_aggregates.sql docker-entrypoint-initdb.d/008_hdb_ext_aggregates.sql
-COPY resources/08_hdb_ext_arrays_aggregates_helper.sql docker-entrypoint-initdb.d/009_hdb_ext_arrays_aggregates_helper.sql
-COPY resources/09_hdb_ext_arrays_aggregates.sql docker-entrypoint-initdb.d/010_hdb_ext_arrays_aggregates.sql
-# Add compress policy
-COPY resources/10_hdb_ext_compress_policy.sql docker-entrypoint-initdb.d/011_hdb_ext_compress_policy.sql
-# Add reorder policy
-COPY resources/11_hdb_ext_reorder_policy.sql docker-entrypoint-initdb.d/012_hdb_ext_reorder_policy.sql
-# Add LOFAR functions and views
-COPY resources/12_lofar_func.sh docker-entrypoint-initdb.d/013_lofar_func.sh
-COPY resources/13_lofar_views.sql docker-entrypoint-initdb.d/014_lofar_views.sql
-# Cleanup admin role
-COPY resources/14_cleanup.sql docker-entrypoint-initdb.d/015_cleanup.sql
diff --git a/docker-compose/timescaledb/README.md b/docker-compose/timescaledb/README.md
deleted file mode 100644
index 1aee7e0eddb909b8902e1442b14cc81a7611c98c..0000000000000000000000000000000000000000
--- a/docker-compose/timescaledb/README.md
+++ /dev/null
@@ -1,38 +0,0 @@
-# TimescaleDB Docker Image
-
-The Dockerfile in this directory allows to create a container with a
-PostrgreSQL-Timescale DBMS (https://www.timescale.com/), and then initialise
-it with the DB schema required by the Tango Archiving framework.
-
-The main image is pulled from the official PostgreSQL repository in the
-Docker Hub (https://hub.docker.com/_/postgres). This image offers several
-features to customize and extend itself.
-
-## Initialization scripts
-
-If you would like to do additional initialization in an image derived from
-the Postgres official one, add one or more *.sql, *.sql.gz, or *.sh scripts
-under /docker-entrypoint-initdb.d (creating the directory if necessary).
-After the entrypoint calls initdb to create the default postgres user and
-database, it will run any *.sql files, run any executable *.sh scripts, and
-source any non-executable *.sh scripts found in that directory to do further
-initialization before starting the service.
-
-The script files in the directory /docker-entrypoint-initdb.d are sequentially
-executed following their preempted number in the filename. Hence, the first
-ones (000_install_timescaledb.sh and 001_timescaledb_tune.sh) are provided by
-default.
-
-The next ones have been pulled from the official Tango repository in order to
-create the desired DB schema. These files are in the 'resources' directory
-and they have been pulled from Tango-Hdbpp_Timescale_Project
-(https://github.com/tango-controls-hdbpp/hdbpp-timescale-project/tree/master/resources/schema):
-- admin.sql creates the admin user that will create the tables
-- hdb_schema.sql creates the standard Tango Archiving DB (This is the only
-  MANDATORY script)
-- hdb_roles.sql creates additional roles
-- hdb_ext_aggregates.sql creates the continuous aggregate views
-  (https://docs.timescale.com/timescaledb/latest/how-to-guides/continuous-aggregates/)
-- cleanup.sql strips the SUPERUSER trait from hdb_admin
-Last updates on these scripts are dated to August 2021 (more info can be found
-  at https://github.com/tango-controls-hdbpp/hdbpp-timescale-project/blob/master/doc/db-schema-config.md)
diff --git a/docker-compose/timescaledb/resources/01_admin.sh b/docker-compose/timescaledb/resources/01_admin.sh
deleted file mode 100644
index 5cf506b30d9a992e47520b98ae23aa8a41dc98a3..0000000000000000000000000000000000000000
--- a/docker-compose/timescaledb/resources/01_admin.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/bash
-
-psql << EOF
-CREATE ROLE hdb_admin WITH LOGIN PASSWORD '${PG_HDB_PASSWORD}';
-ALTER USER hdb_admin CREATEDB;
-ALTER USER hdb_admin CREATEROLE;
-ALTER USER hdb_admin SUPERUSER;
-EOF
diff --git a/docker-compose/timescaledb/resources/02_hdb_schema.sql b/docker-compose/timescaledb/resources/02_hdb_schema.sql
deleted file mode 100644
index db73bf2a4c8888171dded345bf806a676e6fb6a3..0000000000000000000000000000000000000000
--- a/docker-compose/timescaledb/resources/02_hdb_schema.sql
+++ /dev/null
@@ -1,730 +0,0 @@
--- -----------------------------------------------------------------------------
--- This file is part of the hdbpp-timescale-project
---
--- Copyright (C) : 2014-2019
---   European Synchrotron Radiation Facility
---   BP 220, Grenoble 38043, FRANCE
---
--- libhdb++timescale is free software: you can redistribute it and/or modify
--- it under the terms of the Lesser GNU General Public License as published by
--- the Free Software Foundation, either version 3 of the License, or
--- (at your option) any later version.
---
--- libhdb++timescale is distributed in the hope that it will be useful,
--- but WITHOUT ANY WARRANTY; without even the implied warranty of
--- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the Lesser
--- GNU General Public License for more details.
---
--- You should have received a copy of the Lesser GNU General Public License
--- along with libhdb++timescale.  If not, see <http://www.gnu.org/licenses/>.
--- -----------------------------------------------------------------------------
-
--- Create the hdb database and use it
-CREATE DATABASE hdb;
-\c hdb
-
--- Add the timescaledb extension (Important)
-CREATE EXTENSION IF NOT EXISTS timescaledb CASCADE;
-
--------------------------------------------------------------------------------
-DO $$ BEGIN
-    CREATE DOMAIN uchar AS numeric(3) -- ALT smallint
-        CHECK(VALUE >= 0 AND VALUE <= 255);
-EXCEPTION
-    WHEN duplicate_object THEN null;
-END $$;
-
-DO $$ BEGIN
-    CREATE DOMAIN ushort AS numeric(5)  -- ALT integer
-        CHECK(VALUE >= 0 AND VALUE <= 65535);
-EXCEPTION
-    WHEN duplicate_object THEN null;
-END $$;
-
-DO $$ BEGIN
-    CREATE DOMAIN ulong AS numeric(10) -- ALT bigint
-        CHECK(VALUE >= 0 AND VALUE <= 4294967295);
-EXCEPTION
-    WHEN duplicate_object THEN null;
-END $$;
-
-DO $$ BEGIN
-    CREATE DOMAIN ulong64 AS numeric(20)
-        CHECK(VALUE >= 0 AND VALUE <= 18446744073709551615);
-EXCEPTION
-    WHEN duplicate_object THEN null;
-END $$;
--------------------------------------------------------------------------------
-
--- Mappings for ths Tango Data Type (used in att_conf)
-CREATE TABLE IF NOT EXISTS att_conf_type (
-    att_conf_type_id serial NOT NULL,
-    type text NOT NULL UNIQUE,
-    type_num smallint NOT NULL UNIQUE,
-    PRIMARY KEY (att_conf_type_id)
-);
-
-COMMENT ON TABLE att_conf_type is 'Attribute data type';
-
-INSERT INTO att_conf_type (type, type_num) VALUES
-('DEV_BOOLEAN', 1),('DEV_SHORT', 2),('DEV_LONG', 3),('DEV_FLOAT', 4),
-('DEV_DOUBLE', 5),('DEV_USHORT', 6),('DEV_ULONG', 7),('DEV_STRING', 8),
-('DEV_STATE', 19),('DEV_UCHAR',22),('DEV_LONG64', 23),('DEV_ULONG64', 24),
-('DEV_ENCODED', 28),('DEV_ENUM', 30);
-
--- Mappings for ths Tango Data Format Type (used in att_conf)
-CREATE TABLE IF NOT EXISTS att_conf_format (
-    att_conf_format_id serial NOT NULL,
-    format text NOT NULL UNIQUE,
-    format_num smallint NOT NULL UNIQUE,
-    PRIMARY KEY (att_conf_format_id)
-);
-
-COMMENT ON TABLE att_conf_format is 'Attribute format type';
-
-INSERT INTO att_conf_format (format, format_num) VALUES
-('SCALAR', 0),('SPECTRUM', 1),('IMAGE', 2);
-
--- Mappings for the Tango Data Write Type (used in att_conf)
-CREATE TABLE IF NOT EXISTS att_conf_write (
-    att_conf_write_id serial NOT NULL,
-    write text NOT NULL UNIQUE,
-    write_num smallint NOT NULL UNIQUE,
-    PRIMARY KEY (att_conf_write_id)
-);
-
-COMMENT ON TABLE att_conf_write is 'Attribute write type';
-
-INSERT INTO att_conf_write (write, write_num) VALUES
-('READ', 0),('READ_WITH_WRITE', 1),('WRITE', 2),('READ_WRITE', 3);
-
--- The att_conf table contains the primary key for all data tables, the
--- att_conf_id. Expanded on the normal hdb++ tables since we add information
--- about the type.
-CREATE TABLE IF NOT EXISTS att_conf (
-    att_conf_id serial NOT NULL,
-    att_name text NOT NULL,
-    att_conf_type_id smallint NOT NULL,
-    att_conf_format_id smallint NOT NULL,
-    att_conf_write_id smallint NOT NULL,
-    table_name text NOT NULL,
-    cs_name text NOT NULL DEFAULT '',
-    domain text NOT NULL DEFAULT '',
-    family text NOT NULL DEFAULT '',
-    member text NOT NULL DEFAULT '',
-    name text NOT NULL DEFAULT '',
-    ttl int,
-    hide boolean DEFAULT false,
-    PRIMARY KEY (att_conf_id),
-    FOREIGN KEY (att_conf_type_id) REFERENCES att_conf_type (att_conf_type_id),
-    FOREIGN KEY (att_conf_format_id) REFERENCES att_conf_format (att_conf_format_id),
-    FOREIGN KEY (att_conf_write_id) REFERENCES att_conf_write (att_conf_write_id),
-    UNIQUE (att_name)
-);
-
-COMMENT ON TABLE att_conf is 'Attribute Configuration Table';
-
--------------------------------------------------------------------------------
-CREATE TABLE IF NOT EXISTS att_history_event (
-    att_history_event_id serial NOT NULL,
-    event text NOT NULL,
-    PRIMARY KEY (att_history_event_id)
-);
-
-COMMENT ON TABLE att_history_event IS 'Attribute history events description';
-CREATE INDEX IF NOT EXISTS att_history_att_history_event_id_idx ON att_history_event (att_history_event_id);
-
-CREATE TABLE IF NOT EXISTS att_history (
-    att_conf_id integer NOT NULL,
-    att_history_event_id integer NOT NULL,
-    event_time timestamp WITH TIME ZONE,
-    details json,
-    PRIMARY KEY (att_conf_id, event_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_history_event_id) REFERENCES att_history_event (att_history_event_id)
-);
-
-COMMENT ON TABLE att_history is 'Attribute Configuration Events History Table';
-
--------------------------------------------------------------------------------
-CREATE TABLE IF NOT EXISTS att_parameter (
-    att_conf_id integer NOT NULL,
-    recv_time timestamp WITH TIME ZONE NOT NULL,
-    label text NOT NULL DEFAULT '',
-    unit text NOT NULL DEFAULT '',
-    standard_unit text NOT NULL DEFAULT '',
-    display_unit text NOT NULL DEFAULT '',
-    format text NOT NULL DEFAULT '',
-    archive_rel_change text NOT NULL DEFAULT '',
-    archive_abs_change text NOT NULL DEFAULT '',
-    archive_period text NOT NULL DEFAULT '',
-    description text NOT NULL DEFAULT '',
-    details json,
-    enum_labels text[] NOT NULL DEFAULT ARRAY[]::text[],
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id)
-);
-
--- ALTER statement if the table was already created
--- ALTER TABLE att_parameter ADD COLUMN enum_labels text[] NOT NULL DEFAULT ARRAY[]::text[];
-
-COMMENT ON TABLE att_parameter IS 'Attribute configuration parameters';
-CREATE INDEX IF NOT EXISTS att_parameter_recv_time_idx ON att_parameter (recv_time);
-CREATE INDEX IF NOT EXISTS att_parameter_att_conf_id_idx ON  att_parameter (att_conf_id);
-SELECT create_hypertable('att_parameter', 'recv_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
--------------------------------------------------------------------------------
-CREATE TABLE IF NOT EXISTS att_error_desc (
-    att_error_desc_id serial NOT NULL,
-    error_desc text NOT NULL,
-    PRIMARY KEY (att_error_desc_id),
-    UNIQUE (error_desc)
-);
-
-COMMENT ON TABLE att_error_desc IS 'Error Description Table';
-
--------------------------------------------------------------------------------
-CREATE TABLE IF NOT EXISTS att_scalar_devboolean (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r boolean,
-    value_w boolean,
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_scalar_devboolean IS 'Scalar Boolean Values Table';
-CREATE INDEX IF NOT EXISTS att_scalar_devboolean_att_conf_id_idx ON att_scalar_devboolean (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_scalar_devboolean_att_conf_id_data_time_idx ON att_scalar_devboolean (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_scalar_devboolean', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_array_devboolean (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r boolean[],
-    value_w boolean[],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_array_devboolean IS 'Array Boolean Values Table';
-CREATE INDEX IF NOT EXISTS att_array_devboolean_att_conf_id_idx ON att_array_devboolean (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_array_devboolean_att_conf_id_data_time_idx ON att_array_devboolean (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_array_devboolean', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_scalar_devuchar (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r uchar,
-    value_w uchar,
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_scalar_devuchar IS 'Scalar UChar Values Table';
-CREATE INDEX IF NOT EXISTS att_scalar_devuchar_att_conf_id_idx ON att_scalar_devuchar (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_scalar_devuchar_att_conf_id_data_time_idx ON att_scalar_devuchar (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_scalar_devuchar', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_array_devuchar (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r uchar[],
-    value_w uchar[],
-    quality smallint,
-    details json,
-    att_error_desc_id integer,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_array_devuchar IS 'Array UChar Values Table';
-CREATE INDEX IF NOT EXISTS att_array_devuchar_att_conf_id_idx ON att_array_devuchar (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_array_devuchar_att_conf_id_data_time_idx ON att_array_devuchar (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_array_devuchar', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_scalar_devshort (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r smallint,
-    value_w smallint,
-    quality smallint,
-    details json,
-    att_error_desc_id integer,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_scalar_devshort IS 'Scalar Short Values Table';
-CREATE INDEX IF NOT EXISTS att_scalar_devshort_att_conf_id_idx ON att_scalar_devshort (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_scalar_devshort_att_conf_id_data_time_idx ON att_scalar_devshort (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_scalar_devshort', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_array_devshort (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r smallint[],
-    value_w smallint[],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_array_devshort IS 'Array Short Values Table';
-CREATE INDEX IF NOT EXISTS att_array_devshort_att_conf_id_idx ON att_array_devshort (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_array_devshort_att_conf_id_data_time_idx ON att_array_devshort (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_array_devshort', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_scalar_devushort (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r ushort,
-    value_w ushort,
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_scalar_devushort IS 'Scalar UShort Values Table';
-CREATE INDEX IF NOT EXISTS att_scalar_devushort_att_conf_id_idx ON att_scalar_devushort (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_scalar_devushort_att_conf_id_data_time_idx ON att_scalar_devushort (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_scalar_devushort', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_array_devushort (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r ushort[],
-    value_w ushort[],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_array_devushort IS 'Array UShort Values Table';
-CREATE INDEX IF NOT EXISTS att_array_devushort_att_conf_id_idx ON att_array_devushort (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_array_devushort_att_conf_id_data_time_idx ON att_array_devushort (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_array_devushort', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_scalar_devlong (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r integer,
-    value_w integer,
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_scalar_devlong IS 'Scalar Long Values Table';
-CREATE INDEX IF NOT EXISTS att_scalar_devlong_att_conf_id_idx ON att_scalar_devlong (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_scalar_devlong_att_conf_id_data_time_idx ON att_scalar_devlong (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_scalar_devlong', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_array_devlong (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r integer[],
-    value_w integer[],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_array_devlong IS 'Array Long Values Table';
-CREATE INDEX IF NOT EXISTS att_array_devlong_att_conf_id_idx ON att_array_devlong (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_array_devlong_att_conf_id_data_time_idx ON att_array_devlong (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_array_devlong', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_scalar_devulong (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r ulong,
-    value_w ulong,
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_scalar_devulong IS 'Scalar ULong Values Table';
-CREATE INDEX IF NOT EXISTS att_scalar_devulong_att_conf_id_idx ON att_scalar_devulong (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_scalar_devulong_att_conf_id_data_time_idx ON att_scalar_devulong (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_scalar_devulong', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_array_devulong (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r ulong[],
-    value_w ulong[],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_array_devulong IS 'Array ULong Values Table';
-CREATE INDEX IF NOT EXISTS att_array_devulong_att_conf_id_idx ON att_array_devulong (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_array_devulong_att_conf_id_data_time_idx ON att_array_devulong (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_array_devulong', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_scalar_devlong64 (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r bigint,
-    value_w bigint,
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_scalar_devlong64 IS 'Scalar Long64 Values Table';
-CREATE INDEX IF NOT EXISTS att_scalar_devlong64_att_conf_id_idx ON att_scalar_devlong64 (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_scalar_devlong64_att_conf_id_data_time_idx ON att_scalar_devlong64 (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_scalar_devlong64', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_array_devlong64 (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r bigint[],
-    value_w bigint[],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_array_devlong64 IS 'Array Long64 Values Table';
-CREATE INDEX IF NOT EXISTS att_array_devlong64_att_conf_id_idx ON att_array_devlong64 (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_array_devlong64_att_conf_id_data_time_idx ON att_array_devlong64 (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_array_devlong64', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_scalar_devulong64 (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r ulong64,
-    value_w ulong64,
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_scalar_devulong64 IS 'Scalar ULong64 Values Table';
-CREATE INDEX IF NOT EXISTS att_scalar_devulong64_att_conf_id_idx ON att_scalar_devulong64 (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_scalar_devulong64_att_conf_id_data_time_idx ON att_scalar_devulong64 (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_scalar_devulong64', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_array_devulong64 (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r ulong64[],
-    value_w ulong64[],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_array_devulong64 IS 'Array ULong64 Values Table';
-CREATE INDEX IF NOT EXISTS att_array_devulong64_att_conf_id_idx ON att_array_devulong64 (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_array_devulong64_att_conf_id_data_time_idx ON att_array_devulong64 (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_array_devulong64', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_scalar_devfloat (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r real,
-    value_w real,
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_scalar_devfloat IS 'Scalar Float Values Table';
-CREATE INDEX IF NOT EXISTS att_scalar_devfloat_att_conf_id_idx ON att_scalar_devfloat (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_scalar_devfloat_att_conf_id_data_time_idx ON att_scalar_devfloat (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_scalar_devfloat', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_array_devfloat (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r real[],
-    value_w real[],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_array_devfloat IS 'Array Float Values Table';
-CREATE INDEX IF NOT EXISTS att_array_devfloat_att_conf_id_idx ON att_array_devfloat (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_array_devfloat_att_conf_id_data_time_idx ON att_array_devfloat (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_array_devfloat', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_scalar_devdouble (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r double precision,
-    value_w double precision,
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_scalar_devdouble IS 'Scalar Double Values Table';
-CREATE INDEX IF NOT EXISTS att_scalar_devdouble_att_conf_id_idx ON att_scalar_devdouble (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_scalar_devdouble_att_conf_id_data_time_idx ON att_scalar_devdouble (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_scalar_devdouble', 'data_time', chunk_time_interval => interval '14 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_array_devdouble (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r double precision[],
-    value_w double precision[],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_array_devdouble IS 'Array Double Values Table';
-CREATE INDEX IF NOT EXISTS att_array_devdouble_att_conf_id_idx ON att_array_devdouble (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_array_devdouble_att_conf_id_data_time_idx ON att_array_devdouble (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_array_devdouble', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_scalar_devstring (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r text,
-    value_w text,
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_scalar_devstring IS 'Scalar String Values Table';
-CREATE INDEX IF NOT EXISTS att_scalar_devstring_att_conf_id_idx ON att_scalar_devstring (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_scalar_devstring_att_conf_id_data_time_idx ON att_scalar_devstring (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_scalar_devstring', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_array_devstring (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r text[],
-    value_w text[],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_array_devstring IS 'Array String Values Table';
-CREATE INDEX IF NOT EXISTS att_array_devstring_att_conf_id_idx ON att_array_devstring (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_array_devstring_att_conf_id_data_time_idx ON att_array_devstring (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_array_devstring', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_scalar_devstate (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r integer,
-    value_w integer,
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_scalar_devstate IS 'Scalar State Values Table';
-CREATE INDEX IF NOT EXISTS att_scalar_devstate_att_conf_id_idx ON att_scalar_devstate (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_scalar_devstate_att_conf_id_data_time_idx ON att_scalar_devstate (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_scalar_devstate', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_array_devstate (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r integer[],
-    value_w integer[],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_array_devstate IS 'Array State Values Table';
-CREATE INDEX IF NOT EXISTS att_array_devstate_att_conf_id_idx ON att_array_devstate (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_array_devstate_att_conf_id_data_time_idx ON att_array_devstate (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_array_devstate', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_scalar_devencoded (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r bytea,
-    value_w bytea,
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-COMMENT ON TABLE att_scalar_devencoded IS 'Scalar DevEncoded Values Table';
-CREATE INDEX IF NOT EXISTS att_scalar_devencoded_att_conf_id_idx ON att_scalar_devencoded (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_scalar_devencoded_att_conf_id_data_time_idx ON att_scalar_devencoded (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_scalar_devencoded', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_array_devencoded (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r bytea[],
-    value_w bytea[],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-COMMENT ON TABLE att_array_devencoded IS 'Array DevEncoded Values Table';
-CREATE INDEX IF NOT EXISTS att_array_devencoded_att_conf_id_idx ON att_array_devencoded (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_array_devencoded_att_conf_id_data_time_idx ON att_array_devencoded (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_array_devencoded', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
--- The Enum tables are unique in that they store a value and text label for 
--- each data point
-CREATE TABLE IF NOT EXISTS att_scalar_devenum (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r_label text,
-    value_r smallint,
-    value_w_label text,
-    value_w smallint,
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_scalar_devenum IS 'Scalar Enum Values Table';
-CREATE INDEX IF NOT EXISTS att_scalar_devenum_att_conf_id_idx ON att_scalar_devenum (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_scalar_devenum_att_conf_id_data_time_idx ON att_scalar_devenum (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_scalar_devenum', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
--- Trigger to set the enum_labels
-CREATE OR REPLACE FUNCTION set_enum_label() RETURNS TRIGGER AS $$
-DECLARE
-BEGIN
-    IF NEW.value_r IS NOT NULL THEN
-        NEW.value_r_label := (SELECT enum_labels[NEW.value_r + 1] FROM att_parameter WHERE att_conf_id=NEW.att_conf_id ORDER BY recv_time DESC LIMIT 1);
-    END IF;
-    IF NEW.value_w IS NOT NULL THEN
-        NEW.value_w_label := (SELECT enum_labels[NEW.value_w + 1] FROM att_parameter WHERE att_conf_id=NEW.att_conf_id ORDER BY recv_time DESC LIMIT 1);
-    END IF;
-    RETURN NEW;
-END
-$$ LANGUAGE plpgsql;
-
-CREATE TRIGGER enum_label_trigger BEFORE INSERT ON att_scalar_devenum FOR EACH ROW EXECUTE PROCEDURE set_enum_label();
-
-CREATE TABLE IF NOT EXISTS att_array_devenum (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r_label text[],
-    value_r smallint[],
-    value_w_label text[],
-    value_w smallint[],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_array_devenum IS 'Array Enum Values Table';
-CREATE INDEX IF NOT EXISTS att_array_devenum_att_conf_id_idx ON att_array_devenum (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_array_devenum_att_conf_id_data_time_idx ON att_array_devenum (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_array_devenum', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
--- Trigger to set the enum_labels
-CREATE OR REPLACE FUNCTION set_enum_label_array() RETURNS TRIGGER AS $$
-DECLARE
-BEGIN
-    IF NEW.value_r IS NOT NULL THEN
-	WITH enum_labels AS (
-		SELECT enum_labels FROM att_parameter WHERE att_conf_id=NEW.att_conf_id ORDER BY recv_time DESC limit 1
-	)
-        SELECT array_agg(res) FROM (SELECT enum_labels[UNNEST(NEW.value_r)+ 1] FROM enum_labels) as res INTO NEW.value_r_label;
-    END IF;
-    IF NEW.value_w IS NOT NULL THEN
-	WITH enum_labels AS (
-		SELECT enum_labels FROM att_parameter WHERE att_conf_id=NEW.att_conf_id ORDER BY recv_time DESC limit 1
-	)
-        SELECT array_agg(res) FROM (SELECT enum_labels[UNNEST(NEW.value_w)+ 1] FROM enum_labels) as res INTO NEW.value_w_label;
-    END IF;
-    RETURN NEW;
-END
-$$ LANGUAGE plpgsql;
-
-CREATE TRIGGER enum_label_trigger BEFORE INSERT ON att_array_devenum FOR EACH ROW EXECUTE PROCEDURE set_enum_label_array();
diff --git a/docker-compose/timescaledb/resources/03_hdb_images.sql b/docker-compose/timescaledb/resources/03_hdb_images.sql
deleted file mode 100644
index 7eb58950bade5cf9af0551a6484f809b4bf7f3a8..0000000000000000000000000000000000000000
--- a/docker-compose/timescaledb/resources/03_hdb_images.sql
+++ /dev/null
@@ -1,298 +0,0 @@
--- -----------------------------------------------------------------------------
--- This file is part of the hdbpp-timescale-project
---
--- Copyright (C) : 2014-2019
---   European Synchrotron Radiation Facility
---   BP 220, Grenoble 38043, FRANCE
---
--- libhdb++timescale is free software: you can redistribute it and/or modify
--- it under the terms of the Lesser GNU General Public License as published by
--- the Free Software Foundation, either version 3 of the License, or
--- (at your option) any later version.
---
--- libhdb++timescale is distributed in the hope that it will be useful,
--- but WITHOUT ANY WARRANTY; without even the implied warranty of
--- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the Lesser
--- GNU General Public License for more details.
---
--- You should have received a copy of the Lesser GNU General Public License
--- along with libhdb++timescale.  If not, see <http://www.gnu.org/licenses/>.
--- -----------------------------------------------------------------------------
-
-\c hdb
-
-CREATE TABLE IF NOT EXISTS att_image_devboolean (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r boolean[][],
-    value_w boolean[][],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_image_devboolean IS 'Array Boolean Values Table';
-CREATE INDEX IF NOT EXISTS att_image_devboolean_att_conf_id_idx ON att_array_devboolean (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_image_devboolean_att_conf_id_data_time_idx ON att_array_devboolean (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_image_devboolean', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_image_devuchar (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r uchar[][],
-    value_w uchar[][],
-    quality smallint,
-    details json,
-    att_error_desc_id integer,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_image_devuchar IS 'Array UChar Values Table';
-CREATE INDEX IF NOT EXISTS att_image_devuchar_att_conf_id_idx ON att_array_devuchar (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_image_devuchar_att_conf_id_data_time_idx ON att_array_devuchar (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_image_devuchar', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_image_devshort (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r smallint[][],
-    value_w smallint[][],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_image_devshort IS 'Array Short Values Table';
-CREATE INDEX IF NOT EXISTS att_image_devshort_att_conf_id_idx ON att_array_devshort (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_image_devshort_att_conf_id_data_time_idx ON att_array_devshort (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_image_devshort', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_image_devushort (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r ushort[][],
-    value_w ushort[][],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_image_devushort IS 'Array UShort Values Table';
-CREATE INDEX IF NOT EXISTS att_image_devushort_att_conf_id_idx ON att_array_devushort (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_image_devushort_att_conf_id_data_time_idx ON att_array_devushort (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_image_devushort', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_image_devlong (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r integer[][],
-    value_w integer[][],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_image_devlong IS 'Array Long Values Table';
-CREATE INDEX IF NOT EXISTS att_image_devlong_att_conf_id_idx ON att_array_devlong (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_image_devlong_att_conf_id_data_time_idx ON att_array_devlong (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_image_devlong', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_image_devulong (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r ulong[][],
-    value_w ulong[][],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_image_devulong IS 'Array ULong Values Table';
-CREATE INDEX IF NOT EXISTS att_image_devulong_att_conf_id_idx ON att_array_devulong (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_image_devulong_att_conf_id_data_time_idx ON att_array_devulong (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_image_devulong', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_image_devlong64 (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r bigint[][],
-    value_w bigint[][],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_image_devlong64 IS 'Array Long64 Values Table';
-CREATE INDEX IF NOT EXISTS att_image_devlong64_att_conf_id_idx ON att_array_devlong64 (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_image_devlong64_att_conf_id_data_time_idx ON att_array_devlong64 (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_image_devlong64', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_image_devulong64 (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r ulong64[][],
-    value_w ulong64[][],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_image_devulong64 IS 'Array ULong64 Values Table';
-CREATE INDEX IF NOT EXISTS att_image_devulong64_att_conf_id_idx ON att_array_devulong64 (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_image_devulong64_att_conf_id_data_time_idx ON att_array_devulong64 (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_image_devulong64', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_image_devfloat (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r real[][],
-    value_w real[][],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_image_devfloat IS 'Array Float Values Table';
-CREATE INDEX IF NOT EXISTS att_image_devfloat_att_conf_id_idx ON att_array_devfloat (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_image_devfloat_att_conf_id_data_time_idx ON att_array_devfloat (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_image_devfloat', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_image_devdouble (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r double precision[][],
-    value_w double precision[][],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_image_devdouble IS 'Array Double Values Table';
-CREATE INDEX IF NOT EXISTS att_image_devdouble_att_conf_id_idx ON att_array_devdouble (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_image_devdouble_att_conf_id_data_time_idx ON att_array_devdouble (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_image_devdouble', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_image_devstring (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r text[][],
-    value_w text[][],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_image_devstring IS 'Array String Values Table';
-CREATE INDEX IF NOT EXISTS att_image_devstring_att_conf_id_idx ON att_array_devstring (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_image_devstring_att_conf_id_data_time_idx ON att_array_devstring (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_image_devstring', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_image_devstate (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r integer[][],
-    value_w integer[][],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_image_devstate IS 'Array State Values Table';
-CREATE INDEX IF NOT EXISTS att_image_devstate_att_conf_id_idx ON att_array_devstate (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_image_devstate_att_conf_id_data_time_idx ON att_array_devstate (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_image_devstate', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_image_devencoded (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r bytea[][],
-    value_w bytea[][],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-COMMENT ON TABLE att_image_devencoded IS 'Array DevEncoded Values Table';
-CREATE INDEX IF NOT EXISTS att_image_devencoded_att_conf_id_idx ON att_array_devencoded (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_image_devencoded_att_conf_id_data_time_idx ON att_array_devencoded (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_image_devencoded', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
-CREATE TABLE IF NOT EXISTS att_image_devenum (
-    att_conf_id integer NOT NULL,
-    data_time timestamp WITH TIME ZONE NOT NULL,
-    value_r_label text[][],
-    value_r smallint[][],
-    value_w_label text[][],
-    value_w smallint[][],
-    quality smallint,
-    att_error_desc_id integer,
-    details json,
-    PRIMARY KEY (att_conf_id, data_time),
-    FOREIGN KEY (att_conf_id) REFERENCES att_conf (att_conf_id),
-    FOREIGN KEY (att_error_desc_id) REFERENCES att_error_desc (att_error_desc_id)
-);
-
-COMMENT ON TABLE att_image_devenum IS 'Array Enum Values Table';
-CREATE INDEX IF NOT EXISTS att_image_devenum_att_conf_id_idx ON att_array_devenum (att_conf_id);
-CREATE INDEX IF NOT EXISTS att_image_devenum_att_conf_id_data_time_idx ON att_array_devenum (att_conf_id,data_time DESC);
-SELECT create_hypertable('att_image_devenum', 'data_time', chunk_time_interval => interval '28 day', create_default_indexes => FALSE);
-
--- Trigger to set the enum_labels
-CREATE OR REPLACE FUNCTION set_enum_label_array() RETURNS TRIGGER AS $$
-DECLARE
-BEGIN
-    IF NEW.value_r IS NOT NULL THEN
-	WITH enum_labels AS (
-		SELECT enum_labels FROM att_parameter WHERE att_conf_id=NEW.att_conf_id ORDER BY recv_time DESC limit 1
-	)
-        SELECT array_agg(res) FROM (SELECT enum_labels[UNNEST(NEW.value_r)+ 1] FROM enum_labels) as res INTO NEW.value_r_label;
-    END IF;
-    IF NEW.value_w IS NOT NULL THEN
-	WITH enum_labels AS (
-		SELECT enum_labels FROM att_parameter WHERE att_conf_id=NEW.att_conf_id ORDER BY recv_time DESC limit 1
-	)
-        SELECT array_agg(res) FROM (SELECT enum_labels[UNNEST(NEW.value_w)+ 1] FROM enum_labels) as res INTO NEW.value_w_label;
-    END IF;
-    RETURN NEW;
-END
-$$ LANGUAGE plpgsql;
-
-CREATE TRIGGER enum_label_trigger BEFORE INSERT ON att_image_devenum FOR EACH ROW EXECUTE PROCEDURE set_enum_label_array();
-
diff --git a/docker-compose/timescaledb/resources/04_hdb_roles.sql b/docker-compose/timescaledb/resources/04_hdb_roles.sql
deleted file mode 100644
index 0faa15175600b5411a9095e434ae87f743e095c9..0000000000000000000000000000000000000000
--- a/docker-compose/timescaledb/resources/04_hdb_roles.sql
+++ /dev/null
@@ -1,41 +0,0 @@
--- -----------------------------------------------------------------------------
--- This file is part of the hdbpp-timescale-project
---
--- Copyright (C) : 2014-2019
---   European Synchrotron Radiation Facility
---   BP 220, Grenoble 38043, FRANCE
---
--- libhdb++timescale is free software: you can redistribute it and/or modify
--- it under the terms of the Lesser GNU General Public License as published by
--- the Free Software Foundation, either version 3 of the License, or
--- (at your option) any later version.
---
--- libhdb++timescale is distributed in the hope that it will be useful,
--- but WITHOUT ANY WARRANTY; without even the implied warranty of
--- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the Lesser
--- GNU General Public License for more details.
---
--- You should have received a copy of the Lesser GNU General Public License
--- along with libhdb++timescale.  If not, see <http://www.gnu.org/licenses/>.
--- -----------------------------------------------------------------------------
-
--- Setup roles to access the hdb database
-CREATE ROLE readonly;
-CREATE ROLE readwrite;
-
--- Permissions - readonly
-GRANT CONNECT ON DATABASE hdb TO readonly;
-GRANT USAGE ON SCHEMA public TO readonly;
-GRANT SELECT ON ALL TABLES IN SCHEMA public TO readonly;
-ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT ON TABLES TO readonly;
-
--- Permissions - readwrite
-GRANT CONNECT ON DATABASE hdb TO readwrite;
-GRANT USAGE ON SCHEMA public TO readwrite;
-GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA public TO readwrite;
-ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO readwrite;
-GRANT USAGE ON ALL SEQUENCES IN SCHEMA public TO readwrite;
-ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT USAGE ON SEQUENCES TO readwrite;
-GRANT ALL ON SCHEMA public TO readwrite;
-GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO readwrite;
-GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO readwrite;
diff --git a/docker-compose/timescaledb/resources/05_hdb_ext_users.sql b/docker-compose/timescaledb/resources/05_hdb_ext_users.sql
deleted file mode 100644
index 1ce744e8675d4833f983cdf040107c4e1dd7c346..0000000000000000000000000000000000000000
--- a/docker-compose/timescaledb/resources/05_hdb_ext_users.sql
+++ /dev/null
@@ -1,32 +0,0 @@
--- -----------------------------------------------------------------------------
--- This file is part of the hdbpp-timescale-project
---
--- Copyright (C) : 2014-2019
---   European Synchrotron Radiation Facility
---   BP 220, Grenoble 38043, FRANCE
---
--- libhdb++timescale is free software: you can redistribute it and/or modify
--- it under the terms of the Lesser GNU General Public License as published by
--- the Free Software Foundation, either version 3 of the License, or
--- (at your option) any later version.
---
--- libhdb++timescale is distributed in the hope that it will be useful,
--- but WITHOUT ANY WARRANTY; without even the implied warranty of
--- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the Lesser
--- GNU General Public License for more details.
---
--- You should have received a copy of the Lesser GNU General Public License
--- along with libhdb++timescale.  If not, see <http://www.gnu.org/licenses/>.
--- -----------------------------------------------------------------------------
-
-\c hdb
-
--- Some useful users for a basic system
-CREATE ROLE hdb_cfg_man WITH LOGIN PASSWORD 'hdbpp';
-GRANT readwrite TO hdb_cfg_man;
-
-CREATE ROLE hdb_event_sub WITH LOGIN PASSWORD 'hdbpp';
-GRANT readwrite TO hdb_event_sub;
-
-CREATE ROLE hdb_data_reporter WITH LOGIN PASSWORD 'hdbpp';
-GRANT readonly TO hdb_data_reporter;
diff --git a/docker-compose/timescaledb/resources/06_hdb_ext_import.sql b/docker-compose/timescaledb/resources/06_hdb_ext_import.sql
deleted file mode 100644
index f08cc3610df1a96bb207eef24fca29e274c8e5d7..0000000000000000000000000000000000000000
--- a/docker-compose/timescaledb/resources/06_hdb_ext_import.sql
+++ /dev/null
@@ -1,19 +0,0 @@
-\c hdb
-
-CREATE OR REPLACE FUNCTION expand_name() RETURNS TRIGGER AS $$
-DECLARE
-    len integer;
-BEGIN
-    IF (NEW.cs_name <> '' AND NEW.domain <> '' AND NEW.family <> '' AND NEW.member <> '' AND NEW.name <> '') IS NOT TRUE THEN
-        len = (SELECT cardinality((SELECT regexp_split_to_array(NEW.att_name, E'/'))));
-        NEW.name := (SELECT split_part(NEW.att_name, '/', len));
-        NEW.member := (SELECT split_part(NEW.att_name, '/', len - 1));
-        NEW.family := (SELECT split_part(NEW.att_name, '/', len - 2));
-        NEW.domain := (SELECT split_part(NEW.att_name, '/', len - 3));
-        NEW.cs_name := (SELECT split_part(NEW.att_name, '/', len - 4));
-    END IF;
-    RETURN NEW;
-END
-$$ LANGUAGE plpgsql;
-
-CREATE TRIGGER expand_name_trigger BEFORE INSERT ON att_conf FOR EACH ROW EXECUTE PROCEDURE expand_name();
diff --git a/docker-compose/timescaledb/resources/07_hdb_ext_aggregates.sql b/docker-compose/timescaledb/resources/07_hdb_ext_aggregates.sql
deleted file mode 100644
index a253ccd04f3ef74728cdacc4884c7fb31806ab17..0000000000000000000000000000000000000000
--- a/docker-compose/timescaledb/resources/07_hdb_ext_aggregates.sql
+++ /dev/null
@@ -1,1293 +0,0 @@
--- -----------------------------------------------------------------------------
--- This file is part of the hdbpp-timescale-project
---
--- Copyright (C) : 2014-2019
---   European Synchrotron Radiation Facility
---   BP 220, Grenoble 38043, FRANCE
---
--- libhdb++timescale is free software: you can redistribute it and/or modify
--- it under the terms of the Lesser GNU General Public License as published by
--- the Free Software Foundation, either version 3 of the License, or
--- (at your option) any later version.
---
--- libhdb++timescale is distributed in the hope that it will be useful,
--- but WITHOUT ANY WARRANTY; without even the implied warranty of
--- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the Lesser
--- GNU General Public License for more details.
---
--- You should have received a copy of the Lesser GNU General Public License
--- along with libhdb++timescale.  If not, see <http://www.gnu.org/licenses/>.
--- -----------------------------------------------------------------------------
-
--- Continuous aggregates views for the attributes.
-\c hdb
--- Double attributes
-CREATE MATERIALIZED VIEW cagg_scalar_devdouble_1min(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, count_nan_r, mean_r, min_r, max_r, stddev_r
-		, count_w, count_nan_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 min', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), sum(
-                    CASE 
-                        WHEN value_r='NaN' THEN 1 
-                        WHEN value_r='infinity' THEN 1 
-                        WHEN value_r='-infinity' THEN 1 
-                        ELSE 0 
-                    END)
-                , avg(
-                    (CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)::numeric)::float8
-                , min(
-                    CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)
-                , max(
-                    CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)
-                , stddev(
-                    (CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)::numeric)::float8
-		, count(value_w), sum(
-                    CASE 
-                        WHEN value_w='NaN' THEN 1 
-                        WHEN value_w='infinity' THEN 1 
-                        WHEN value_w='-infinity' THEN 1 
-                        ELSE 0 
-                    END)
-                , avg(
-                    (CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)::numeric)::float8
-                , min(
-                    CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)
-                , max(
-                    CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)
-                , stddev(
-                    (CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)::numeric)::float8
-       	FROM att_scalar_devdouble
-        GROUP BY time_bucket('1 min', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devdouble_10min(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, count_nan_r, mean_r, min_r, max_r, stddev_r
-		, count_w, count_nan_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('10 mins', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), sum(
-                    CASE 
-                        WHEN value_r='NaN' THEN 1 
-                        WHEN value_r='infinity' THEN 1 
-                        WHEN value_r='-infinity' THEN 1 
-                        ELSE 0 
-                    END)
-                , avg(
-                    (CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)::numeric)::float8
-                , min(
-                    CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)
-                , max(
-                    CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)
-                , stddev(
-                    (CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)::numeric)::float8
-		, count(value_w), sum(
-                    CASE 
-                        WHEN value_w='NaN' THEN 1 
-                        WHEN value_w='infinity' THEN 1 
-                        WHEN value_w='-infinity' THEN 1 
-                        ELSE 0 
-                    END)
-                , avg(
-                    (CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)::numeric)::float8
-                , min(
-                    CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)
-                , max(
-                    CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)
-                , stddev(
-                    (CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)::numeric)::float8
-       	FROM att_scalar_devdouble
-        GROUP BY time_bucket('10 mins', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devdouble_1hour(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, count_nan_r, mean_r, min_r, max_r, stddev_r
-		, count_w, count_nan_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 hour', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), sum(
-                    CASE 
-                        WHEN value_r='NaN' THEN 1 
-                        WHEN value_r='infinity' THEN 1 
-                        WHEN value_r='-infinity' THEN 1 
-                        ELSE 0 
-                    END)
-                , avg(
-                    (CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)::numeric)::float8
-                , min(
-                    CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)
-                , max(
-                    CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)
-                , stddev(
-                    (CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)::numeric)::float8
-		, count(value_w), sum(
-                    CASE 
-                        WHEN value_w='NaN' THEN 1 
-                        WHEN value_w='infinity' THEN 1 
-                        WHEN value_w='-infinity' THEN 1 
-                        ELSE 0 
-                    END)
-                , avg(
-                    (CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)::numeric)::float8
-                , min(
-                    CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)
-                , max(
-                    CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)
-                , stddev(
-                    (CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)::numeric)::float8
-       	FROM att_scalar_devdouble 
-        GROUP BY time_bucket('1 hour', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devdouble_8hour(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, count_nan_r, mean_r, min_r, max_r, stddev_r
-		, count_w, count_nan_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('8 hours', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), sum(
-                    CASE 
-                        WHEN value_r='NaN' THEN 1 
-                        WHEN value_r='infinity' THEN 1 
-                        WHEN value_r='-infinity' THEN 1 
-                        ELSE 0 
-                    END)
-                , avg(
-                    (CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)::numeric)::float8
-                , min(
-                    CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)
-                , max(
-                    CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)
-                , stddev(
-                    (CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)::numeric)::float8
-		, count(value_w), sum(
-                    CASE 
-                        WHEN value_w='NaN' THEN 1 
-                        WHEN value_w='infinity' THEN 1 
-                        WHEN value_w='-infinity' THEN 1 
-                        ELSE 0 
-                    END)
-                , avg(
-                    (CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)::numeric)::float8
-                , min(
-                    CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)
-                , max(
-                    CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)
-                , stddev(
-                    (CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)::numeric)::float8
-       	FROM att_scalar_devdouble 
-        GROUP BY time_bucket('8 hours', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devdouble_1day(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, count_nan_r, mean_r, min_r, max_r, stddev_r
-		, count_w, count_nan_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 day', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), sum(
-                    CASE 
-                        WHEN value_r='NaN' THEN 1 
-                        WHEN value_r='infinity' THEN 1 
-                        WHEN value_r='-infinity' THEN 1
-                        ELSE 0 
-                    END)
-                , avg(
-                    (CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)::numeric)::float8
-                , min(
-                    CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)
-                , max(
-                    CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)
-                , stddev(
-                    (CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)::numeric)::float8
-		, count(value_w), sum(
-                    CASE 
-                        WHEN value_w='NaN' THEN 1 
-                        WHEN value_w='infinity' THEN 1 
-                        WHEN value_w='-infinity' THEN 1 
-                        ELSE 0 
-                    END)
-                , avg(
-                    (CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)::numeric)::float8
-                , min(
-                    CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)
-                , max(
-                    CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)
-                , stddev(
-                    (CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)::numeric)::float8
-       	FROM att_scalar_devdouble 
-        GROUP BY time_bucket('1 day', data_time), att_conf_id;
-
--- Float attributes
-CREATE MATERIALIZED VIEW cagg_scalar_devfloat_1min(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 min', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), sum(
-                    CASE 
-                        WHEN value_r='NaN' THEN 1 
-                        WHEN value_r='infinity' THEN 1 
-                        WHEN value_r='-infinity' THEN 1 
-                        ELSE 0 
-                    END)
-                , avg(
-                    (CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)::numeric)::float8
-                , min(
-                    CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)
-                , max(
-                    CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)
-                , stddev(
-                    (CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)::numeric)::float8
-		, count(value_w), sum(
-                    CASE 
-                        WHEN value_w='NaN' THEN 1 
-                        WHEN value_w='infinity' THEN 1 
-                        WHEN value_w='-infinity' THEN 1 
-                        ELSE 0 
-                    END)
-                , avg(
-                    (CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)::numeric)::float8
-                , min(
-                    CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)
-                , max(
-                    CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)
-                , stddev(
-                    (CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)::numeric)::float8
-       	FROM att_scalar_devfloat 
-        GROUP BY time_bucket('1 min', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devfloat_10min(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('10 mins', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), sum(
-                    CASE 
-                        WHEN value_r='NaN' THEN 1 
-                        WHEN value_r='infinity' THEN 1 
-                        WHEN value_r='-infinity' THEN 1 
-                        ELSE 0 
-                    END)
-                , avg(
-                    (CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)::numeric)::float8
-                , min(
-                    CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)
-                , max(
-                    CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)
-                , stddev(
-                    (CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)::numeric)::float8
-		, count(value_w), sum(
-                    CASE 
-                        WHEN value_w='NaN' THEN 1 
-                        WHEN value_w='infinity' THEN 1 
-                        WHEN value_w='-infinity' THEN 1 
-                        ELSE 0 
-                    END)
-                , avg(
-                    (CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)::numeric)::float8
-                , min(
-                    CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)
-                , max(
-                    CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)
-                , stddev(
-                    (CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)::numeric)::float8
-       	FROM att_scalar_devfloat 
-        GROUP BY time_bucket('10 mins', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devfloat_1hour(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 hour', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), sum(
-                    CASE 
-                        WHEN value_r='NaN' THEN 1 
-                        WHEN value_r='infinity' THEN 1 
-                        WHEN value_r='-infinity' THEN 1 
-                        ELSE 0 
-                    END)
-                , avg(
-                    (CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)::numeric)::float8
-                , min(
-                    CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)
-                , max(
-                    CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)
-                , stddev(
-                    (CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)::numeric)::float8
-		, count(value_w), sum(
-                    CASE 
-                        WHEN value_w='NaN' THEN 1 
-                        WHEN value_w='infinity' THEN 1 
-                        WHEN value_w='-infinity' THEN 1 
-                        ELSE 0 
-                    END)
-                , avg(
-                    (CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)::numeric)::float8
-                , min(
-                    CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)
-                , max(
-                    CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)
-                , stddev(
-                    (CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)::numeric)::float8
-       	FROM att_scalar_devfloat 
-        GROUP BY time_bucket('1 hour', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devfloat_8hour(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('8 hours', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), sum(
-                    CASE 
-                        WHEN value_r='NaN' THEN 1 
-                        WHEN value_r='infinity' THEN 1 
-                        WHEN value_r='-infinity' THEN 1 
-                        ELSE 0 
-                    END)
-                , avg(
-                    (CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)::numeric)::float8
-                , min(
-                    CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)
-                , max(
-                    CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)
-                , stddev(
-                    (CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)::numeric)::float8
-		, count(value_w), sum(
-                    CASE 
-                        WHEN value_w='NaN' THEN 1 
-                        WHEN value_w='infinity' THEN 1 
-                        WHEN value_w='-infinity' THEN 1 
-                        ELSE 0 
-                    END)
-                , avg(
-                    (CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)::numeric)::float8
-                , min(
-                    CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)
-                , max(
-                    CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)
-                , stddev(
-                    (CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)::numeric)::float8
-       	FROM att_scalar_devfloat 
-        GROUP BY time_bucket('8 hours', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devfloat_1day(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 day', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), sum(
-                    CASE 
-                        WHEN value_r='NaN' THEN 1 
-                        WHEN value_r='infinity' THEN 1 
-                        WHEN value_r='-infinity' THEN 1 
-                        ELSE 0 
-                    END)
-                , avg(
-                    (CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)::numeric)::float8
-                , min(
-                    CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)
-                , max(
-                    CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)
-                , stddev(
-                    (CASE 
-                        WHEN value_r='NaN' THEN null 
-                        WHEN value_r='infinity' THEN null 
-                        WHEN value_r='-infinity' THEN null 
-                        ELSE value_r 
-                    END)::numeric)::float8
-		, count(value_w), sum(
-                    CASE 
-                        WHEN value_w='NaN' THEN 1 
-                        WHEN value_w='infinity' THEN 1 
-                        WHEN value_w='-infinity' THEN 1 
-                        ELSE 0 
-                    END)
-                , avg(
-                    (CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)::numeric)::float8
-                , min(
-                    CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)
-                , max(
-                    CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)
-                , stddev(
-                    (CASE 
-                        WHEN value_w='NaN' THEN null 
-                        WHEN value_w='infinity' THEN null 
-                        WHEN value_w='-infinity' THEN null 
-                        ELSE value_w 
-                    END)::numeric)::float8
-       	FROM att_scalar_devfloat 
-        GROUP BY time_bucket('1 day', data_time), att_conf_id;
-
--- Long Attributes
-CREATE MATERIALIZED VIEW cagg_scalar_devlong_1min(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 min', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devlong         
-        GROUP BY time_bucket('1 min', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devlong_10min(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('10 mins', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devlong 
-        GROUP BY time_bucket('10 mins', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devlong_1hour(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 hour', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devlong 
-        GROUP BY time_bucket('1 hour', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devlong_8hour(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('8 hours', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devlong 
-        GROUP BY time_bucket('8 hours', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devlong_1day(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 day', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devlong 
-        GROUP BY time_bucket('1 day', data_time), att_conf_id;
-
--- Long 64 attributes
-CREATE MATERIALIZED VIEW cagg_scalar_devlong64_1min(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 min', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devlong64 
-        GROUP BY time_bucket('1 min', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devlong64_10min(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('10 mins', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devlong64 
-        GROUP BY time_bucket('10 mins', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devlong64_1hour(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 hour', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devlong64 
-        GROUP BY time_bucket('1 hour', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devlong64_8hour(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('8 hours', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devlong64 
-        GROUP BY time_bucket('8 hours', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devlong64_1day(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 day', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devlong64 
-        GROUP BY time_bucket('1 day', data_time), att_conf_id;
-
--- Short attributes
-CREATE MATERIALIZED VIEW cagg_scalar_devshort_1min(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 min', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devshort 
-        GROUP BY time_bucket('1 min', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devshort_10min(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('10 mins', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devshort 
-        GROUP BY time_bucket('10 mins', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devshort_1hour(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 hour', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devshort 
-        GROUP BY time_bucket('1 hour', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devshort_8hour(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('8 hours', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devshort 
-        GROUP BY time_bucket('8 hours', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devshort_1day(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 day', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devshort 
-        GROUP BY time_bucket('1 day', data_time), att_conf_id;
-
--- Unsigned long attributes
-CREATE MATERIALIZED VIEW cagg_scalar_devulong_1min(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 min', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devulong 
-        GROUP BY time_bucket('1 min', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devulong_10min(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('10 mins', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devulong 
-        GROUP BY time_bucket('10 mins', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devulong_1hour(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 hour', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devulong 
-        GROUP BY time_bucket('1 hour', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devulong_8hour(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('8 hours', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devulong 
-        GROUP BY time_bucket('8 hours', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devulong_1day(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 day', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devulong 
-        GROUP BY time_bucket('1 day', data_time), att_conf_id;
-
--- Unsigned long 64 attributes
-CREATE MATERIALIZED VIEW cagg_scalar_devulong64_1min(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 min', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devulong64 
-        GROUP BY time_bucket('1 min', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devulong64_10min(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('10 mins', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devulong64 
-        GROUP BY time_bucket('10 mins', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devulong64_1hour(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 hour', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devulong64 
-        GROUP BY time_bucket('1 hour', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devulong64_8hour(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('8 hours', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devulong64 
-        GROUP BY time_bucket('8 hours', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devulong64_1day(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 day', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devulong64 
-        GROUP BY time_bucket('1 day', data_time), att_conf_id;
-
--- Unsigned short attributes
-CREATE MATERIALIZED VIEW cagg_scalar_devushort_1min(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 min', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devushort 
-        GROUP BY time_bucket('1 min', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devushort_10min(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('10 mins', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devushort 
-        GROUP BY time_bucket('10 mins', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devushort_1hour(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 hour', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devushort 
-        GROUP BY time_bucket('1 hour', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devushort_8hour(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('8 hours', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devushort 
-        GROUP BY time_bucket('8 hours', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_scalar_devushort_1day(
-		att_conf_id, data_time, count_rows, count_errors
-		, count_r, mean_r, min_r, max_r, stddev_r
-		, count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 day', data_time), count(*), count(att_error_desc_id)
-		, count(value_r), avg(value_r), min(value_r), max(value_r), stddev(value_r)
-		, count(value_w), avg(value_w), min(value_w), max(value_w), stddev(value_w)
-       	FROM att_scalar_devushort 
-        GROUP BY time_bucket('1 day', data_time), att_conf_id;
-
-
---Set access
-
-GRANT ALL ON cagg_scalar_devdouble_1min TO readwrite;
-GRANT SELECT ON cagg_scalar_devdouble_1min TO readonly;
-GRANT ALL ON cagg_scalar_devdouble_10min TO readwrite;
-GRANT SELECT ON cagg_scalar_devdouble_10min TO readonly;
-GRANT ALL ON cagg_scalar_devdouble_1hour TO readwrite;
-GRANT SELECT ON cagg_scalar_devdouble_1hour TO readonly;
-GRANT ALL ON cagg_scalar_devdouble_8hour TO readwrite;
-GRANT SELECT ON cagg_scalar_devdouble_8hour TO readonly;
-GRANT ALL ON cagg_scalar_devdouble_1day TO readwrite;
-GRANT SELECT ON cagg_scalar_devdouble_1day TO readonly;
-
-GRANT ALL ON cagg_scalar_devfloat_1min TO readwrite;
-GRANT SELECT ON cagg_scalar_devfloat_1min TO readonly;
-GRANT ALL ON cagg_scalar_devfloat_10min TO readwrite;
-GRANT SELECT ON cagg_scalar_devfloat_10min TO readonly;
-GRANT ALL ON cagg_scalar_devfloat_1hour TO readwrite;
-GRANT SELECT ON cagg_scalar_devfloat_1hour TO readonly;
-GRANT ALL ON cagg_scalar_devfloat_8hour TO readwrite;
-GRANT SELECT ON cagg_scalar_devfloat_8hour TO readonly;
-GRANT ALL ON cagg_scalar_devfloat_1day TO readwrite;
-GRANT SELECT ON cagg_scalar_devfloat_1day TO readonly;
-
-GRANT ALL ON cagg_scalar_devlong_1min TO readwrite;
-GRANT SELECT ON cagg_scalar_devlong_1min TO readonly;
-GRANT ALL ON cagg_scalar_devlong_10min TO readwrite;
-GRANT SELECT ON cagg_scalar_devlong_10min TO readonly;
-GRANT ALL ON cagg_scalar_devlong_1hour TO readwrite;
-GRANT SELECT ON cagg_scalar_devlong_1hour TO readonly;
-GRANT ALL ON cagg_scalar_devlong_8hour TO readwrite;
-GRANT SELECT ON cagg_scalar_devlong_8hour TO readonly;
-GRANT ALL ON cagg_scalar_devlong_1day TO readwrite;
-GRANT SELECT ON cagg_scalar_devlong_1day TO readonly;
-
-GRANT ALL ON cagg_scalar_devulong_1min TO readwrite;
-GRANT SELECT ON cagg_scalar_devulong_1min TO readonly;
-GRANT ALL ON cagg_scalar_devulong_10min TO readwrite;
-GRANT SELECT ON cagg_scalar_devulong_10min TO readonly;
-GRANT ALL ON cagg_scalar_devulong_1hour TO readwrite;
-GRANT SELECT ON cagg_scalar_devulong_1hour TO readonly;
-GRANT ALL ON cagg_scalar_devulong_8hour TO readwrite;
-GRANT SELECT ON cagg_scalar_devulong_8hour TO readonly;
-GRANT ALL ON cagg_scalar_devulong_1day TO readwrite;
-GRANT SELECT ON cagg_scalar_devulong_1day TO readonly;
-
-GRANT ALL ON cagg_scalar_devulong64_1min TO readwrite;
-GRANT SELECT ON cagg_scalar_devulong64_1min TO readonly;
-GRANT ALL ON cagg_scalar_devulong64_10min TO readwrite;
-GRANT SELECT ON cagg_scalar_devulong64_10min TO readonly;
-GRANT ALL ON cagg_scalar_devulong64_1hour TO readwrite;
-GRANT SELECT ON cagg_scalar_devulong64_1hour TO readonly;
-GRANT ALL ON cagg_scalar_devulong64_8hour TO readwrite;
-GRANT SELECT ON cagg_scalar_devulong64_8hour TO readonly;
-GRANT ALL ON cagg_scalar_devulong64_1day TO readwrite;
-GRANT SELECT ON cagg_scalar_devulong64_1day TO readonly;
-
-GRANT ALL ON cagg_scalar_devlong64_1min TO readwrite;
-GRANT SELECT ON cagg_scalar_devlong64_1min TO readonly;
-GRANT ALL ON cagg_scalar_devlong64_10min TO readwrite;
-GRANT SELECT ON cagg_scalar_devlong64_10min TO readonly;
-GRANT ALL ON cagg_scalar_devlong64_1hour TO readwrite;
-GRANT SELECT ON cagg_scalar_devlong64_1hour TO readonly;
-GRANT ALL ON cagg_scalar_devlong64_8hour TO readwrite;
-GRANT SELECT ON cagg_scalar_devlong64_8hour TO readonly;
-GRANT ALL ON cagg_scalar_devlong64_1day TO readwrite;
-GRANT SELECT ON cagg_scalar_devlong64_1day TO readonly;
-
-GRANT ALL ON cagg_scalar_devshort_1min TO readwrite;
-GRANT SELECT ON cagg_scalar_devshort_1min TO readonly;
-GRANT ALL ON cagg_scalar_devshort_10min TO readwrite;
-GRANT SELECT ON cagg_scalar_devshort_10min TO readonly;
-GRANT ALL ON cagg_scalar_devshort_1hour TO readwrite;
-GRANT SELECT ON cagg_scalar_devshort_1hour TO readonly;
-GRANT ALL ON cagg_scalar_devshort_8hour TO readwrite;
-GRANT SELECT ON cagg_scalar_devshort_8hour TO readonly;
-GRANT ALL ON cagg_scalar_devshort_1day TO readwrite;
-GRANT SELECT ON cagg_scalar_devshort_1day TO readonly;
-
-GRANT ALL ON cagg_scalar_devushort_1min TO readwrite;
-GRANT SELECT ON cagg_scalar_devushort_1min TO readonly;
-GRANT ALL ON cagg_scalar_devushort_10min TO readwrite;
-GRANT SELECT ON cagg_scalar_devushort_10min TO readonly;
-GRANT ALL ON cagg_scalar_devushort_1hour TO readwrite;
-GRANT SELECT ON cagg_scalar_devushort_1hour TO readonly;
-GRANT ALL ON cagg_scalar_devushort_8hour TO readwrite;
-GRANT SELECT ON cagg_scalar_devushort_8hour TO readonly;
-GRANT ALL ON cagg_scalar_devushort_1day TO readwrite;
-GRANT SELECT ON cagg_scalar_devushort_1day TO readonly;
-
-
-
--- Drop all the views
--- DROP VIEW cagg_scalar_devdouble_1min CASCADE;
--- DROP VIEW cagg_scalar_devdouble_10min CASCADE;
--- DROP VIEW cagg_scalar_devdouble_1hour CASCADE;
--- DROP VIEW cagg_scalar_devdouble_8hour CASCADE;
--- DROP VIEW cagg_scalar_devdouble_1day CASCADE;
-
--- DROP VIEW cagg_scalar_devfloat_1min CASCADE;
--- DROP VIEW cagg_scalar_devfloat_10min CASCADE;
--- DROP VIEW cagg_scalar_devfloat_1hour CASCADE;
--- DROP VIEW cagg_scalar_devfloat_8hour CASCADE;
--- DROP VIEW cagg_scalar_devfloat_1day CASCADE;
-
--- DROP VIEW cagg_scalar_devlong_1min CASCADE;
--- DROP VIEW cagg_scalar_devlong_10min CASCADE;
--- DROP VIEW cagg_scalar_devlong_1hour CASCADE;
--- DROP VIEW cagg_scalar_devlong_8hour CASCADE;
--- DROP VIEW cagg_scalar_devlong_1day CASCADE;
-
--- DROP VIEW cagg_scalar_devlong64_1min CASCADE;
--- DROP VIEW cagg_scalar_devlong64_10min CASCADE;
--- DROP VIEW cagg_scalar_devlong64_1hour CASCADE;
--- DROP VIEW cagg_scalar_devlong64_8hour CASCADE;
--- DROP VIEW cagg_scalar_devlong64_1day CASCADE;
-
--- DROP VIEW cagg_scalar_devshort_1min CASCADE;
--- DROP VIEW cagg_scalar_devshort_10min CASCADE;
--- DROP VIEW cagg_scalar_devshort_1hour CASCADE;
--- DROP VIEW cagg_scalar_devshort_8hour CASCADE;
--- DROP VIEW cagg_scalar_devshort_1day CASCADE;
-
--- DROP VIEW cagg_scalar_devulong_1min CASCADE;
--- DROP VIEW cagg_scalar_devulong_10min CASCADE;
--- DROP VIEW cagg_scalar_devulong_1hour CASCADE;
--- DROP VIEW cagg_scalar_devulong_8hour CASCADE;
--- DROP VIEW cagg_scalar_devulong_1day CASCADE;
-
--- DROP VIEW cagg_scalar_devulong64_1min CASCADE;
--- DROP VIEW cagg_scalar_devulong64_10min CASCADE;
--- DROP VIEW cagg_scalar_devulong64_1hour CASCADE;
--- DROP VIEW cagg_scalar_devulong64_8hour CASCADE;
--- DROP VIEW cagg_scalar_devulong64_1day CASCADE;
-
--- DROP VIEW cagg_scalar_devushort_1min CASCADE;
--- DROP VIEW cagg_scalar_devushort_10min CASCADE;
--- DROP VIEW cagg_scalar_devushort_1hour CASCADE;
--- DROP VIEW cagg_scalar_devushort_8hour CASCADE;
--- DROP VIEW cagg_scalar_devushort_1day CASCADE;
-
diff --git a/docker-compose/timescaledb/resources/08_hdb_ext_arrays_aggregates_helper.sql b/docker-compose/timescaledb/resources/08_hdb_ext_arrays_aggregates_helper.sql
deleted file mode 100644
index 7c67f65366984268f337133c938b9bda909e95fe..0000000000000000000000000000000000000000
--- a/docker-compose/timescaledb/resources/08_hdb_ext_arrays_aggregates_helper.sql
+++ /dev/null
@@ -1,2336 +0,0 @@
--- -----------------------------------------------------------------------------
--- This file is part of the hdbpp-timescale-project
---
--- Copyright (C) : 2014-2019
---   European Synchrotron Radiation Facility
---   BP 220, Grenoble 38043, FRANCE
---
--- libhdb++timescale is free software: you can redistribute it and/or modify
--- it under the terms of the Lesser GNU General Public License as published by
--- the Free Software Foundation, either version 3 of the License, or
--- (at your option) any later version.
---
--- libhdb++timescale is distributed in the hope that it will be useful,
--- but WITHOUT ANY WARRANTY; without even the implied warranty of
--- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the Lesser
--- GNU General Public License for more details.
---
--- You should have received a copy of the Lesser GNU General Public License
--- along with libhdb++timescale.  If not, see <http://www.gnu.org/licenses/>.
--- -----------------------------------------------------------------------------
-
--- Aggregates function helper for the continuous aggregates views for the array attributes.
-
-\c hdb
-
-/*
-NOT USED
-Keep as a different approach, should be benchmarked for execution speed
-
--- Special type to be used as input by compute_element_agg
--- It contains past aggregates result and the new values
-create type double_agg_input as (
-    value_r double precision,
-    value_w double precision,
-    count_r integer,
-    count_nan_r integer,
-    avg_r decimal,
-    min_r double precision,
-    max_r double precision,
-    stddev_r decimal,
-    count_w integer,
-    count_nan_w integer,
-    avg_w decimal,
-    min_w double precision,
-    max_w double precision,
-    stddev_w decimal
-);
-
--- Function to compute the aggregates from the new values and old aggregates
--- result.
--- It computes the result for an array of input and return a table so that it 
--- can be used in a FROM clause
-CREATE OR REPLACE FUNCTION compute_element_agg(inp_arr double_agg_input[]
-    ) RETURNS SETOF RECORD as $$
-
-DECLARE
-    ret RECORD;
-    inp double_agg_input;
-    value_r double precision;
-    value_w double precision;
-    count_r integer;
-    count_nan_r integer;
-    avg_r decimal;
-    min_r double precision;
-    max_r double precision;
-    stddev_r decimal;
-    count_w integer;
-    count_nan_w integer;
-    avg_w decimal;
-    min_w double precision;
-    max_w double precision;
-    stddev_w decimal;
-    n_count_r integer;
-    n_count_nan_r integer;
-    n_avg_r decimal;
-    n_min_r double precision;
-    n_max_r double precision;
-    n_stddev_r decimal;
-    n_count_w integer;
-    n_count_nan_w integer;
-    n_avg_w decimal;
-    n_min_w double precision;
-    n_max_w double precision;
-    n_stddev_w decimal;
-
-BEGIN
-    FOREACH inp IN ARRAY inp_arr
-    LOOP
-
-        value_r := inp.value_r;
-        value_w := inp.value_w;
-        count_r :=  inp.count_r;
-        count_nan_r := inp.count_nan_r;
-        avg_r := inp.avg_r;
-        min_r := inp.min_r;
-        max_r := inp.max_r;
-        stddev_r := inp.stddev_r;
-        count_w := inp.count_w;
-        count_nan_w := inp.count_nan_w;
-        avg_w := inp.avg_w;
-        min_w := inp.min_w;
-        stddev_w := inp.stddev_w;
-        
-        IF value_r IS NULL OR value_r='NaN'::float8 OR value_r='Infinity' OR value_r='-Infinity'
-        THEN
-                
-            IF count_r IS NULL
-            THEN
-                n_count_r = 0;
-            ELSE
-                n_count_r = count_r;
-            END IF;
-        
-            IF value_r IS NULL
-            THEN
-                
-                IF count_nan_r IS NULL
-                THEN
-                    n_count_nan_r = 0;
-                ELSE
-                    n_count_nan_r = count_nan_r;
-                END IF;
-        
-            ELSE
-                
-                IF count_nan_r IS NULL
-                THEN
-                    n_count_nan_r = 1;
-                ELSE
-                    n_count_nan_r = count_nan_r + 1;
-                END IF;
-            END IF;
-        
-            n_avg_r = avg_r;
-            n_min_r = min_r;
-            n_max_r = max_r;
-            n_stddev_r = stddev_r;
-    
-        ELSE
-        
-            IF count_nan_r IS NULL
-            THEN
-                n_count_nan_r = 0;
-            ELSE
-                n_count_nan_r = count_nan_r;
-            END IF;
-
-            IF count_r IS NULL
-            THEN
-                n_count_r = 1;
-            ELSE
-                n_count_r = count_r + 1;
-            END IF;
-            
-            IF avg_r IS NULL
-            THEN
-                n_avg_r = value_r;
-            ELSE
-                n_avg_r = avg_r + (value_r-avg_r)/(count_r+1.)::decimal;
-            END IF;
-        
-            n_min_r = LEAST(value_r, min_r);
-            n_max_r = GREATEST(value_r, max_r);
-        
-            IF stddev_r IS NULL
-            THEN
-                n_stddev_r = 0;
-            ELSE
-                n_stddev_r = stddev_r + ((count_r + 0.)/(count_r+1.))*power(value_r - avg_r, 2);
-            END IF;
-        END IF;
-    
-        IF value_w IS NULL OR value_w='NaN'::float8 OR value_w='Infinity' OR value_w='-Infinity'
-        THEN
-        
-            IF count_w IS NULL
-            THEN
-                n_count_w = 0;
-            ELSE
-                n_count_w = count_w;
-            END IF;
-        
-            IF value_w IS NULL
-            THEN
-            
-                IF count_nan_w IS NULL
-                THEN
-                    n_count_nan_w = 0;
-                ELSE
-                    n_count_nan_w = count_nan_w;
-                END IF;
-        
-            ELSE
-            
-                IF count_nan_w IS NULL
-                THEN
-                    n_count_nan_w = 1;
-                ELSE
-                    n_count_nan_w = count_nan_w + 1;
-                END IF;
-            END IF;
-        
-            n_avg_w = avg_w;
-            n_min_w = min_w;
-            n_max_w = max_w;
-            n_stddev_w = stddev_w;
-        
-        ELSE
-        
-            IF count_nan_w IS NULL
-            THEN
-                n_count_nan_w = 0;
-            ELSE
-                n_count_nan_w = count_nan_w;
-            END IF;
-        
-            IF count_w IS NULL
-            THEN
-                n_count_w = 1;
-            ELSE
-                n_count_w = count_w + 1;
-            END IF;
-        
-            IF avg_w IS NULL
-            THEN
-                n_avg_w = value_w;
-            ELSE
-                n_avg_w = avg_w + (value_w-avg_w)/(count_w+1);
-            END IF;
-        
-            n_min_w = LEAST(value_w, min_w);
-            n_max_w = GREATEST(value_w, max_w);
-        
-            IF stddev_w IS NULL
-            THEN
-                n_stddev_w = 0;
-            ELSE
-                n_stddev_w = stddev_w + ((count_w + 0.)/(count_w+1.)*power(value_w - avg_w, 2));
-            END IF;
-        END IF;
-
-        ret := (n_count_r, n_count_nan_r, n_avg_r, n_min_r, n_max_r, n_stddev_r
-            , n_count_w, n_count_nan_w, n_avg_w, n_min_w, n_max_w, n_stddev_w);
-
-        return next ret;
-    END LOOP;
-END;
-$$
-LANGUAGE 'plpgsql';
-*/
-
-
--- Special types to store the aggregations data during computation
-create type double_array_agg_state as (
-	count integer,
-	count_errors integer,
-        count_r integer[],
-        count_nan_r integer[],
-	avg_r decimal[],
-        min_r double precision[],
-        max_r double precision[],
-	stddev_r decimal[],
-        count_w integer[],
-        count_nan_w integer[],
-	avg_w decimal[],
-        min_w double precision[],
-        max_w double precision[],
-	stddev_w decimal[]
-);
-
-create type float_array_agg_state as (
-	count integer,
-	count_errors integer,
-        count_r integer[],
-        count_nan_r integer[],
-	avg_r decimal[],
-        min_r real[],
-        max_r real[],
-	stddev_r decimal[],
-        count_w integer[],
-        count_nan_w integer[],
-	avg_w decimal[],
-        min_w real[],
-        max_w real[],
-	stddev_w decimal[]
-);
-
-create type long_array_agg_state as (
-	count integer,
-	count_errors integer,
-        count_r integer[],
-	avg_r decimal[],
-        min_r integer[],
-        max_r integer[],
-	stddev_r decimal[],
-        count_w integer[],
-	avg_w decimal[],
-        min_w integer[],
-        max_w integer[],
-	stddev_w decimal[]
-);
-
-create type long64_array_agg_state as (
-	count integer,
-	count_errors integer,
-        count_r integer[],
-	avg_r decimal[],
-        min_r bigint[],
-        max_r bigint[],
-	stddev_r decimal[],
-        count_w integer[],
-	avg_w decimal[],
-        min_w bigint[],
-        max_w bigint[],
-	stddev_w decimal[]
-);
-
-create type short_array_agg_state as (
-	count integer,
-	count_errors integer,
-        count_r integer[],
-	avg_r decimal[],
-        min_r smallint[],
-        max_r smallint[],
-	stddev_r decimal[],
-        count_w integer[],
-	avg_w decimal[],
-        min_w smallint[],
-        max_w smallint[],
-	stddev_w decimal[]
-);
-
-create type ulong_array_agg_state as (
-	count integer,
-	count_errors integer,
-        count_r integer[],
-	avg_r decimal[],
-        min_r ulong[],
-        max_r ulong[],
-	stddev_r decimal[],
-        count_w integer[],
-	avg_w decimal[],
-        min_w ulong[],
-        max_w ulong[],
-	stddev_w decimal[]
-);
-
-create type ulong64_array_agg_state as (
-	count integer,
-	count_errors integer,
-        count_r integer[],
-	avg_r decimal[],
-        min_r ulong64[],
-        max_r ulong64[],
-	stddev_r decimal[],
-        count_w integer[],
-	avg_w decimal[],
-        min_w ulong64[],
-        max_w ulong64[],
-	stddev_w decimal[]
-);
-
-create type ushort_array_agg_state as (
-	count integer,
-	count_errors integer,
-        count_r integer[],
-	avg_r decimal[],
-        min_r ushort[],
-        max_r ushort[],
-	stddev_r decimal[],
-        count_w integer[],
-	avg_w decimal[],
-        min_w ushort[],
-        max_w ushort[],
-	stddev_w decimal[]
-);
-
--- Function to combine to aggregate state into a new one
--- needed for the aggregate function to be used for partial aggregation
-CREATE OR REPLACE FUNCTION fn_double_combine(double_array_agg_state, double_array_agg_state)
-    RETURNS double_array_agg_state AS $$
-
-DECLARE
-    state1 ALIAS FOR $1;
-    state2 ALIAS FOR $2;
-    count integer;
-    count_errors integer;
-    result double_array_agg_state%ROWTYPE;
-
-BEGIN
-
-    -- Limit cases. 
-    IF state1 is NULL
-    THEN
-        return state2;
-    END IF;
-    
-    IF state2 is NULL
-    THEN
-        return state1;
-    END IF;
-
-    -- if there is a discrepancy in the arrays sizes
-    IF CARDINALITY(state1.avg_r) != CARDINALITY(state2.avg_r) OR CARDINALITY(state1.avg_w) != CARDINALITY(state2.avg_w) THEN
-        SELECT 0, 0,
-        ARRAY[]::integer[], ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::float8[], ARRAY[]::float8[], ARRAY[]::decimal[],
-        ARRAY[]::integer[], ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::float8[], ARRAY[]::float8[], ARRAY[]::decimal[]
-        INTO result;
-    ELSE
-	
-    count := state1.count + state2.count;
-    count_errors := state1.count_errors + state2.count_errors;
-    
-    WITH arrays AS(
-        SELECT 
-            UNNEST(state1.count_r) AS count_r1, UNNEST(state1.count_nan_r) AS nan_r1, UNNEST(state1.avg_r) AS avg_r1,
-            UNNEST(state1.min_r) AS min_r1, UNNEST(state1.max_r) AS max_r1, UNNEST(state1.stddev_r) AS stddev_r1,
-            UNNEST(state1.count_w) AS count_w1, UNNEST(state1.count_nan_w) AS nan_w1, UNNEST(state1.avg_w) AS avg_w1,
-            UNNEST(state1.min_w) AS min_w1, UNNEST(state1.max_w) AS max_w1, UNNEST(state1.stddev_w) AS stddev_w1,
-            UNNEST(state2.count_r) AS count_r2, UNNEST(state2.count_nan_r) AS nan_r2, UNNEST(state2.avg_r) AS avg_r2,
-            UNNEST(state2.min_r) AS min_r2, UNNEST(state2.max_r) AS max_r2, UNNEST(state2.stddev_r) AS stddev_r2,
-            UNNEST(state2.count_w) AS count_w2, UNNEST(state2.count_nan_w) AS nan_w2, UNNEST(state2.avg_w) AS avg_w2,
-            UNNEST(state2.min_w) AS min_w2, UNNEST(state2.max_w) AS max_w2, UNNEST(state2.stddev_w) AS stddev_w2
-        )
-        SELECT count, count_errors,
-            array_agg(count_r1+count_r2), array_agg(count_nan_r1+count_nan_r2),
-            array_agg(avg_r1 + (count_r2/(count_r1+count_r2))*(avg_r2-avg_r1)), array_agg(LEAST(min_r1, min_r2)), array_agg(GREATEST(max_r1, max_r2)),
-            array_agg(stddev_r1 + (count_r2*count_r1/count_r1+count_r2)*power(avg_r2 - avg_r1, 2)),
-            array_agg(count_w1+count_w2), array_agg(count_nan_w1+count_nan_w2),
-            array_agg(avg_w1 + (count_w2/(count_w1+count_w2))*(avg_w2-avg_w1)), array_agg(LEAST(min_w1, min_w2)), array_agg(GREATEST(max_w1, max_w2)),
-            array_agg(stddev_w1 + (count_w2*count_w1/count_w1+count_w2)*power(avg_w2 - avg_w1, 2))
-        INTO result FROM arrays;
-    END IF;
-    
-    return result;
-END;
-$$
-LANGUAGE 'plpgsql';
-
-CREATE OR REPLACE FUNCTION fn_float_combine(float_array_agg_state, float_array_agg_state)
-    RETURNS float_array_agg_state AS $$
-
-DECLARE
-    state1 ALIAS FOR $1;
-    state2 ALIAS FOR $2;
-    count integer;
-    count_errors integer;
-    result float_array_agg_state%ROWTYPE;
-
-BEGIN
-
-    -- Limit cases. 
-    IF state1 is NULL
-    THEN
-        return state2;
-    END IF;
-    
-    IF state2 is NULL
-    THEN
-        return state1;
-    END IF;
-
-    -- if there is a discrepancy in the arrays sizes
-    IF CARDINALITY(state1.avg_r) != CARDINALITY(state2.avg_r) OR CARDINALITY(state1.avg_w) != CARDINALITY(state2.avg_w) THEN
-        SELECT 0, 0,
-        ARRAY[]::integer[], ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::real[], ARRAY[]::real[], ARRAY[]::decimal[],
-        ARRAY[]::integer[], ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::real[], ARRAY[]::real[], ARRAY[]::decimal[]
-        INTO result;
-    ELSE
-	
-    count := state1.count + state2.count;
-    count_errors := state1.count_errors + state2.count_errors;
-    
-    WITH arrays AS(
-        SELECT 
-            UNNEST(state1.count_r) AS count_r1, UNNEST(state1.count_nan_r) AS nan_r1, UNNEST(state1.avg_r) AS avg_r1,
-            UNNEST(state1.min_r) AS min_r1, UNNEST(state1.max_r) AS max_r1, UNNEST(state1.stddev_r) AS stddev_r1,
-            UNNEST(state1.count_w) AS count_w1, UNNEST(state1.count_nan_w) AS nan_w1, UNNEST(state1.avg_w) AS avg_w1,
-            UNNEST(state1.min_w) AS min_w1, UNNEST(state1.max_w) AS max_w1, UNNEST(state1.stddev_w) AS stddev_w1,
-            UNNEST(state2.count_r) AS count_r2, UNNEST(state2.count_nan_r) AS nan_r2, UNNEST(state2.avg_r) AS avg_r2,
-            UNNEST(state2.min_r) AS min_r2, UNNEST(state2.max_r) AS max_r2, UNNEST(state2.stddev_r) AS stddev_r2,
-            UNNEST(state2.count_w) AS count_w2, UNNEST(state2.count_nan_w) AS nan_w2, UNNEST(state2.avg_w) AS avg_w2,
-            UNNEST(state2.min_w) AS min_w2, UNNEST(state2.max_w) AS max_w2, UNNEST(state2.stddev_w) AS stddev_w2
-        )
-        SELECT count, count_errors,
-            array_agg(count_r1+count_r2), array_agg(count_nan_r1+count_nan_r2),
-            array_agg(avg_r1 + (count_r2/(count_r1+count_r2))*(avg_r2-avg_r1)), array_agg(LEAST(min_r1, min_r2)), array_agg(GREATEST(max_r1, max_r2)),
-            array_agg(stddev_r1 + (count_r2*count_r1/count_r1+count_r2)*power(avg_r2 - avg_r1, 2)),
-            array_agg(count_w1+count_w2), array_agg(count_nan_w1+count_nan_w2),
-            array_agg(avg_w1 + (count_w2/(count_w1+count_w2))*(avg_w2-avg_w1)), array_agg(LEAST(min_w1, min_w2)), array_agg(GREATEST(max_w1, max_w2)),
-            array_agg(stddev_w1 + (count_w2*count_w1/count_w1+count_w2)*power(avg_w2 - avg_w1, 2))
-        INTO result FROM arrays;
-    END IF;
-    
-    return result;
-END;
-$$
-LANGUAGE 'plpgsql';
-
-CREATE OR REPLACE FUNCTION fn_long_combine(long_array_agg_state, long_array_agg_state)
-    RETURNS long_array_agg_state AS $$
-
-DECLARE
-    state1 ALIAS FOR $1;
-    state2 ALIAS FOR $2;
-    count integer;
-    count_errors integer;
-    result long_array_agg_state%ROWTYPE;
-
-BEGIN
-
-    -- Limit cases. 
-    IF state1 is NULL
-    THEN
-        return state2;
-    END IF;
-    
-    IF state2 is NULL
-    THEN
-        return state1;
-    END IF;
-
-    -- if there is a discrepancy in the arrays sizes
-    IF CARDINALITY(state1.avg_r) != CARDINALITY(state2.avg_r) OR CARDINALITY(state1.avg_w) != CARDINALITY(state2.avg_w) THEN
-        SELECT 0, 0,
-        ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::integer[], ARRAY[]::integer[], ARRAY[]::decimal[],
-        ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::integer[], ARRAY[]::integer[], ARRAY[]::decimal[]
-        INTO result;
-    ELSE
-	
-    count := state1.count + state2.count;
-    count_errors := state1.count_errors + state2.count_errors;
-    
-    WITH arrays AS(
-        SELECT 
-            UNNEST(state1.count_r) AS count_r1, UNNEST(state1.count_nan_r) AS nan_r1, UNNEST(state1.avg_r) AS avg_r1,
-            UNNEST(state1.min_r) AS min_r1, UNNEST(state1.max_r) AS max_r1, UNNEST(state1.stddev_r) AS stddev_r1,
-            UNNEST(state1.count_w) AS count_w1, UNNEST(state1.count_nan_w) AS nan_w1, UNNEST(state1.avg_w) AS avg_w1,
-            UNNEST(state1.min_w) AS min_w1, UNNEST(state1.max_w) AS max_w1, UNNEST(state1.stddev_w) AS stddev_w1,
-            UNNEST(state2.count_r) AS count_r2, UNNEST(state2.count_nan_r) AS nan_r2, UNNEST(state2.avg_r) AS avg_r2,
-            UNNEST(state2.min_r) AS min_r2, UNNEST(state2.max_r) AS max_r2, UNNEST(state2.stddev_r) AS stddev_r2,
-            UNNEST(state2.count_w) AS count_w2, UNNEST(state2.count_nan_w) AS nan_w2, UNNEST(state2.avg_w) AS avg_w2,
-            UNNEST(state2.min_w) AS min_w2, UNNEST(state2.max_w) AS max_w2, UNNEST(state2.stddev_w) AS stddev_w2
-        )
-        SELECT count, count_errors,
-            array_agg(count_r1+count_r2),
-            array_agg(avg_r1 + (count_r2/(count_r1+count_r2))*(avg_r2-avg_r1)), array_agg(LEAST(min_r1, min_r2)), array_agg(GREATEST(max_r1, max_r2)),
-            array_agg(stddev_r1 + (count_r2*count_r1/count_r1+count_r2)*power(avg_r2 - avg_r1, 2)),
-            array_agg(count_w1+count_w2),
-            array_agg(avg_w1 + (count_w2/(count_w1+count_w2))*(avg_w2-avg_w1)), array_agg(LEAST(min_w1, min_w2)), array_agg(GREATEST(max_w1, max_w2)),
-            array_agg(stddev_w1 + (count_w2*count_w1/count_w1+count_w2)*power(avg_w2 - avg_w1, 2))
-        INTO result FROM arrays;
-    END IF;
-    
-    return result;
-END;
-$$
-LANGUAGE 'plpgsql';
-
-CREATE OR REPLACE FUNCTION fn_long64_combine(long64_array_agg_state, long64_array_agg_state)
-    RETURNS long64_array_agg_state AS $$
-
-DECLARE
-    state1 ALIAS FOR $1;
-    state2 ALIAS FOR $2;
-    count integer;
-    count_errors integer;
-    result long64_array_agg_state%ROWTYPE;
-
-BEGIN
-
-    -- Limit cases. 
-    IF state1 is NULL
-    THEN
-        return state2;
-    END IF;
-    
-    IF state2 is NULL
-    THEN
-        return state1;
-    END IF;
-
-    -- if there is a discrepancy in the arrays sizes
-    IF CARDINALITY(state1.avg_r) != CARDINALITY(state2.avg_r) OR CARDINALITY(state1.avg_w) != CARDINALITY(state2.avg_w) THEN
-        SELECT 0, 0,
-        ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::bigint[], ARRAY[]::bigint[], ARRAY[]::decimal[],
-        ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::bigint[], ARRAY[]::bigint[], ARRAY[]::decimal[]
-        INTO result;
-    ELSE
-	
-    count := state1.count + state2.count;
-    count_errors := state1.count_errors + state2.count_errors;
-    
-    WITH arrays AS(
-        SELECT 
-            UNNEST(state1.count_r) AS count_r1, UNNEST(state1.avg_r) AS avg_r1,
-            UNNEST(state1.min_r) AS min_r1, UNNEST(state1.max_r) AS max_r1, UNNEST(state1.stddev_r) AS stddev_r1,
-            UNNEST(state1.count_w) AS count_w1, UNNEST(state1.avg_w) AS avg_w1,
-            UNNEST(state1.min_w) AS min_w1, UNNEST(state1.max_w) AS max_w1, UNNEST(state1.stddev_w) AS stddev_w1,
-            UNNEST(state2.count_r) AS count_r2, UNNEST(state2.avg_r) AS avg_r2,
-            UNNEST(state2.min_r) AS min_r2, UNNEST(state2.max_r) AS max_r2, UNNEST(state2.stddev_r) AS stddev_r2,
-            UNNEST(state2.count_w) AS count_w2, UNNEST(state2.avg_w) AS avg_w2,
-            UNNEST(state2.min_w) AS min_w2, UNNEST(state2.max_w) AS max_w2, UNNEST(state2.stddev_w) AS stddev_w2
-        )
-        SELECT count, count_errors,
-            array_agg(count_r1+count_r2),
-            array_agg(avg_r1 + (count_r2/(count_r1+count_r2))*(avg_r2-avg_r1)), array_agg(LEAST(min_r1, min_r2)), array_agg(GREATEST(max_r1, max_r2)),
-            array_agg(stddev_r1 + (count_r2*count_r1/count_r1+count_r2)*power(avg_r2 - avg_r1, 2)),
-            array_agg(count_w1+count_w2),
-            array_agg(avg_w1 + (count_w2/(count_w1+count_w2))*(avg_w2-avg_w1)), array_agg(LEAST(min_w1, min_w2)), array_agg(GREATEST(max_w1, max_w2)),
-            array_agg(stddev_w1 + (count_w2*count_w1/count_w1+count_w2)*power(avg_w2 - avg_w1, 2))
-        INTO result FROM arrays;
-    END IF;
-    
-    return result;
-END;
-$$
-LANGUAGE 'plpgsql';
-
-CREATE OR REPLACE FUNCTION fn_short_combine(short_array_agg_state, short_array_agg_state)
-    RETURNS short_array_agg_state AS $$
-
-DECLARE
-    state1 ALIAS FOR $1;
-    state2 ALIAS FOR $2;
-    count integer;
-    count_errors integer;
-    result short_array_agg_state%ROWTYPE;
-
-BEGIN
-
-    -- Limit cases. 
-    IF state1 is NULL
-    THEN
-        return state2;
-    END IF;
-    
-    IF state2 is NULL
-    THEN
-        return state1;
-    END IF;
-
-    -- if there is a discrepancy in the arrays sizes
-    IF CARDINALITY(state1.avg_r) != CARDINALITY(state2.avg_r) OR CARDINALITY(state1.avg_w) != CARDINALITY(state2.avg_w) THEN
-        SELECT 0, 0,
-        ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::smallint[], ARRAY[]::smallint[], ARRAY[]::decimal[],
-        ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::smallint[], ARRAY[]::smallint[], ARRAY[]::decimal[]
-        INTO result;
-    ELSE
-	
-    count := state1.count + state2.count;
-    count_errors := state1.count_errors + state2.count_errors;
-    
-    WITH arrays AS(
-        SELECT 
-            UNNEST(state1.count_r) AS count_r1, UNNEST(state1.avg_r) AS avg_r1,
-            UNNEST(state1.min_r) AS min_r1, UNNEST(state1.max_r) AS max_r1, UNNEST(state1.stddev_r) AS stddev_r1,
-            UNNEST(state1.count_w) AS count_w1, UNNEST(state1.avg_w) AS avg_w1,
-            UNNEST(state1.min_w) AS min_w1, UNNEST(state1.max_w) AS max_w1, UNNEST(state1.stddev_w) AS stddev_w1,
-            UNNEST(state2.count_r) AS count_r2, UNNEST(state2.avg_r) AS avg_r2,
-            UNNEST(state2.min_r) AS min_r2, UNNEST(state2.max_r) AS max_r2, UNNEST(state2.stddev_r) AS stddev_r2,
-            UNNEST(state2.count_w) AS count_w2, UNNEST(state2.avg_w) AS avg_w2,
-            UNNEST(state2.min_w) AS min_w2, UNNEST(state2.max_w) AS max_w2, UNNEST(state2.stddev_w) AS stddev_w2
-        )
-        SELECT count, count_errors,
-            array_agg(count_r1+count_r2),
-            array_agg(avg_r1 + (count_r2/(count_r1+count_r2))*(avg_r2-avg_r1)), array_agg(LEAST(min_r1, min_r2)), array_agg(GREATEST(max_r1, max_r2)),
-            array_agg(stddev_r1 + (count_r2*count_r1/count_r1+count_r2)*power(avg_r2 - avg_r1, 2)),
-            array_agg(count_w1+count_w2),
-            array_agg(avg_w1 + (count_w2/(count_w1+count_w2))*(avg_w2-avg_w1)), array_agg(LEAST(min_w1, min_w2)), array_agg(GREATEST(max_w1, max_w2)),
-            array_agg(stddev_w1 + (count_w2*count_w1/count_w1+count_w2)*power(avg_w2 - avg_w1, 2))
-        INTO result FROM arrays;
-    END IF;
-    
-    return result;
-END;
-$$
-LANGUAGE 'plpgsql';
-
-CREATE OR REPLACE FUNCTION fn_ulong_combine(ulong_array_agg_state, ulong_array_agg_state)
-    RETURNS ulong_array_agg_state AS $$
-
-DECLARE
-    state1 ALIAS FOR $1;
-    state2 ALIAS FOR $2;
-    count integer;
-    count_errors integer;
-    result ulong_array_agg_state%ROWTYPE;
-
-BEGIN
-
-    -- Limit cases. 
-    IF state1 is NULL
-    THEN
-        return state2;
-    END IF;
-    
-    IF state2 is NULL
-    THEN
-        return state1;
-    END IF;
-
-    -- if there is a discrepancy in the arrays sizes
-    IF CARDINALITY(state1.avg_r) != CARDINALITY(state2.avg_r) OR CARDINALITY(state1.avg_w) != CARDINALITY(state2.avg_w) THEN
-        SELECT 0, 0,
-        ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::ulong[], ARRAY[]::ulong[], ARRAY[]::decimal[],
-        ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::ulong[], ARRAY[]::ulong[], ARRAY[]::decimal[]
-        INTO result;
-    ELSE
-	
-    count := state1.count + state2.count;
-    count_errors := state1.count_errors + state2.count_errors;
-    
-    WITH arrays AS(
-        SELECT 
-            UNNEST(state1.count_r) AS count_r1, UNNEST(state1.avg_r) AS avg_r1,
-            UNNEST(state1.min_r) AS min_r1, UNNEST(state1.max_r) AS max_r1, UNNEST(state1.stddev_r) AS stddev_r1,
-            UNNEST(state1.count_w) AS count_w1, UNNEST(state1.avg_w) AS avg_w1,
-            UNNEST(state1.min_w) AS min_w1, UNNEST(state1.max_w) AS max_w1, UNNEST(state1.stddev_w) AS stddev_w1,
-            UNNEST(state2.count_r) AS count_r2, UNNEST(state2.avg_r) AS avg_r2,
-            UNNEST(state2.min_r) AS min_r2, UNNEST(state2.max_r) AS max_r2, UNNEST(state2.stddev_r) AS stddev_r2,
-            UNNEST(state2.count_w) AS count_w2, UNNEST(state2.avg_w) AS avg_w2,
-            UNNEST(state2.min_w) AS min_w2, UNNEST(state2.max_w) AS max_w2, UNNEST(state2.stddev_w) AS stddev_w2
-        )
-        SELECT count, count_errors,
-            array_agg(count_r1+count_r2),
-            array_agg(avg_r1 + (count_r2/(count_r1+count_r2))*(avg_r2-avg_r1)), array_agg(LEAST(min_r1, min_r2)), array_agg(GREATEST(max_r1, max_r2)),
-            array_agg(stddev_r1 + (count_r2*count_r1/count_r1+count_r2)*power(avg_r2 - avg_r1, 2)),
-            array_agg(count_w1+count_w2),
-            array_agg(avg_w1 + (count_w2/(count_w1+count_w2))*(avg_w2-avg_w1)), array_agg(LEAST(min_w1, min_w2)), array_agg(GREATEST(max_w1, max_w2)),
-            array_agg(stddev_w1 + (count_w2*count_w1/count_w1+count_w2)*power(avg_w2 - avg_w1, 2))
-        INTO result FROM arrays;
-    END IF;
-    
-    return result;
-END;
-$$
-LANGUAGE 'plpgsql';
-
-CREATE OR REPLACE FUNCTION fn_ulong64_combine(ulong64_array_agg_state, ulong64_array_agg_state)
-    RETURNS ulong64_array_agg_state AS $$
-
-DECLARE
-    state1 ALIAS FOR $1;
-    state2 ALIAS FOR $2;
-    count integer;
-    count_errors integer;
-    result ulong64_array_agg_state%ROWTYPE;
-
-BEGIN
-
-    -- Limit cases. 
-    IF state1 is NULL
-    THEN
-        return state2;
-    END IF;
-    
-    IF state2 is NULL
-    THEN
-        return state1;
-    END IF;
-
-    -- if there is a discrepancy in the arrays sizes
-    IF CARDINALITY(state1.avg_r) != CARDINALITY(state2.avg_r) OR CARDINALITY(state1.avg_w) != CARDINALITY(state2.avg_w) THEN
-        SELECT 0, 0,
-        ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::ulong64[], ARRAY[]::ulong64[], ARRAY[]::decimal[],
-        ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::ulong64[], ARRAY[]::ulong64[], ARRAY[]::decimal[]
-        INTO result;
-    ELSE
-	
-    count := state1.count + state2.count;
-    count_errors := state1.count_errors + state2.count_errors;
-    
-    WITH arrays AS(
-        SELECT 
-            UNNEST(state1.count_r) AS count_r1, UNNEST(state1.avg_r) AS avg_r1,
-            UNNEST(state1.min_r) AS min_r1, UNNEST(state1.max_r) AS max_r1, UNNEST(state1.stddev_r) AS stddev_r1,
-            UNNEST(state1.count_w) AS count_w1, UNNEST(state1.avg_w) AS avg_w1,
-            UNNEST(state1.min_w) AS min_w1, UNNEST(state1.max_w) AS max_w1, UNNEST(state1.stddev_w) AS stddev_w1,
-            UNNEST(state2.count_r) AS count_r2, UNNEST(state2.avg_r) AS avg_r2,
-            UNNEST(state2.min_r) AS min_r2, UNNEST(state2.max_r) AS max_r2, UNNEST(state2.stddev_r) AS stddev_r2,
-            UNNEST(state2.count_w) AS count_w2, UNNEST(state2.avg_w) AS avg_w2,
-            UNNEST(state2.min_w) AS min_w2, UNNEST(state2.max_w) AS max_w2, UNNEST(state2.stddev_w) AS stddev_w2
-        )
-        SELECT count, count_errors,
-            array_agg(count_r1+count_r2),
-            array_agg(avg_r1 + (count_r2/(count_r1+count_r2))*(avg_r2-avg_r1)), array_agg(LEAST(min_r1, min_r2)), array_agg(GREATEST(max_r1, max_r2)),
-            array_agg(stddev_r1 + (count_r2*count_r1/count_r1+count_r2)*power(avg_r2 - avg_r1, 2)),
-            array_agg(count_w1+count_w2),
-            array_agg(avg_w1 + (count_w2/(count_w1+count_w2))*(avg_w2-avg_w1)), array_agg(LEAST(min_w1, min_w2)), array_agg(GREATEST(max_w1, max_w2)),
-            array_agg(stddev_w1 + (count_w2*count_w1/count_w1+count_w2)*power(avg_w2 - avg_w1, 2))
-        INTO result FROM arrays;
-    END IF;
-    
-    return result;
-END;
-$$
-LANGUAGE 'plpgsql';
-
-CREATE OR REPLACE FUNCTION fn_ushort_combine(ushort_array_agg_state, ushort_array_agg_state)
-    RETURNS ushort_array_agg_state AS $$
-
-DECLARE
-    state1 ALIAS FOR $1;
-    state2 ALIAS FOR $2;
-    count integer;
-    count_errors integer;
-    result ushort_array_agg_state%ROWTYPE;
-
-BEGIN
-
-    -- Limit cases. 
-    IF state1 is NULL
-    THEN
-        return state2;
-    END IF;
-    
-    IF state2 is NULL
-    THEN
-        return state1;
-    END IF;
-
-    -- if there is a discrepancy in the arrays sizes
-    IF CARDINALITY(state1.avg_r) != CARDINALITY(state2.avg_r) OR CARDINALITY(state1.avg_w) != CARDINALITY(state2.avg_w) THEN
-        SELECT 0, 0,
-        ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::ushort[], ARRAY[]::ushort[], ARRAY[]::decimal[],
-        ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::ushort[], ARRAY[]::ushort[], ARRAY[]::decimal[]
-        INTO result;
-    ELSE
-	
-    count := state1.count + state2.count;
-    count_errors := state1.count_errors + state2.count_errors;
-    
-    WITH arrays AS(
-        SELECT 
-            UNNEST(state1.count_r) AS count_r1, UNNEST(state1.avg_r) AS avg_r1,
-            UNNEST(state1.min_r) AS min_r1, UNNEST(state1.max_r) AS max_r1, UNNEST(state1.stddev_r) AS stddev_r1,
-            UNNEST(state1.count_w) AS count_w1, UNNEST(state1.avg_w) AS avg_w1,
-            UNNEST(state1.min_w) AS min_w1, UNNEST(state1.max_w) AS max_w1, UNNEST(state1.stddev_w) AS stddev_w1,
-            UNNEST(state2.count_r) AS count_r2, UNNEST(state2.avg_r) AS avg_r2,
-            UNNEST(state2.min_r) AS min_r2, UNNEST(state2.max_r) AS max_r2, UNNEST(state2.stddev_r) AS stddev_r2,
-            UNNEST(state2.count_w) AS count_w2, UNNEST(state2.avg_w) AS avg_w2,
-            UNNEST(state2.min_w) AS min_w2, UNNEST(state2.max_w) AS max_w2, UNNEST(state2.stddev_w) AS stddev_w2
-        )
-        SELECT count, count_errors,
-            array_agg(count_r1+count_r2),
-            array_agg(avg_r1 + (count_r2/(count_r1+count_r2))*(avg_r2-avg_r1)), array_agg(LEAST(min_r1, min_r2)), array_agg(GREATEST(max_r1, max_r2)),
-            array_agg(stddev_r1 + (count_r2*count_r1/count_r1+count_r2)*power(avg_r2 - avg_r1, 2)),
-            array_agg(count_w1+count_w2),
-            array_agg(avg_w1 + (count_w2/(count_w1+count_w2))*(avg_w2-avg_w1)), array_agg(LEAST(min_w1, min_w2)), array_agg(GREATEST(max_w1, max_w2)),
-            array_agg(stddev_w1 + (count_w2*count_w1/count_w1+count_w2)*power(avg_w2 - avg_w1, 2))
-        INTO result FROM arrays;
-    END IF;
-    
-    return result;
-END;
-$$
-LANGUAGE 'plpgsql';
-
--- Function to compute next aggregate from last state and current row
-CREATE OR REPLACE FUNCTION fn_double_array_agg(double_array_agg_state,new_row att_array_devdouble)
-    RETURNS double_array_agg_state AS $$
-
-DECLARE
-    state ALIAS FOR $1;
-    count integer;
-    count_err integer;
-    result double_array_agg_state%ROWTYPE;
-
-BEGIN
-
-    -- Increment error count if needed
-    IF new_row.att_error_desc_id > 0 THEN
-        count_err = 1;
-    ELSE
-        count_err = 0;
-    END IF;
-
-    IF state is NULL
-    THEN
-        WITH arrays AS(
-            SELECT UNNEST(new_row.value_r) AS read, UNNEST(new_row.value_w) AS write)
-            SELECT 1, count_err,
-            array_agg(
-                CASE 
-                    WHEN read='NaN'::float8 THEN 0 
-                    WHEN read='Infinity'::float8 THEN 0 
-                    WHEN read='-Infinity'::float8 THEN 0 
-                    WHEN read IS NOT NULL THEN 1  
-                    ELSE 0 
-                END
-                ), array_agg(
-                CASE 
-                    WHEN read='NaN'::float8 THEN 1 
-                    WHEN read='Infinity'::float8 THEN 1 
-                    WHEN read='-Infinity'::float8 THEN 1 
-                    ELSE 0 
-                END
-            ), array_agg(
-                CASE
-                    WHEN read='NaN'::float8 THEN NULL
-                    WHEN read='Infinity'::float8 THEN NULL
-                    WHEN read='-Infinity'::float8 THEN NULL
-                    ELSE read::decimal
-                END
-            ), array_agg(read), array_agg(read), array_agg(
-                CASE 
-                    WHEN read='NaN'::float8 THEN NULL
-                    WHEN read='Infinity'::float8 THEN NULL
-                    WHEN read='-Infinity'::float8 THEN NULL
-                    WHEN read IS NOT NULL THEN 0 
-                    ELSE read
-                END
-            ),
-            array_agg(
-                CASE 
-                    WHEN write='NaN'::float8 THEN 0 
-                    WHEN write='Infinity'::float8 THEN 0 
-                    WHEN write='-Infinity'::float8 THEN 0 
-                    WHEN write IS NOT NULL THEN 1 
-                    ELSE 0 
-                END
-                ), array_agg(
-                CASE 
-                    WHEN write='NaN'::float8 THEN 1 
-                    WHEN write='Infinity'::float8 THEN 1 
-                    WHEN write='-Infinity'::float8 THEN 1 
-                    ELSE 0 
-                END
-            ), array_agg(
-                CASE
-                    WHEN write='NaN'::float8 THEN NULL
-                    WHEN write='Infinity'::float8 THEN NULL
-                    WHEN write='-Infinity'::float8 THEN NULL
-                    ELSE write::decimal
-                END
-            ), array_agg(write), array_agg(write), array_agg(
-                CASE 
-                    WHEN write='NaN'::float8 THEN NULL
-                    WHEN write='Infinity'::float8 THEN NULL
-                    WHEN write='-Infinity'::float8 THEN NULL
-                    WHEN write IS NOT NULL THEN 0 
-                    ELSE write
-                END
-            )
-            INTO result FROM arrays;
-    ELSE
-
-        IF CARDINALITY(state.avg_r) != CARDINALITY(new_row.value_r) or CARDINALITY(state.avg_w) != CARDINALITY(new_row.value_w)
-        THEN
-            SELECT 0, 0,
-            ARRAY[]::integer[], ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::float8[], ARRAY[]::float8[], ARRAY[]::decimal[],
-            ARRAY[]::integer[], ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::float8[], ARRAY[]::float8[], ARRAY[]::decimal[]
-            INTO result;
-        ELSE
-
-            count := state.count + 1;
-            WITH arrays AS(
-                SELECT UNNEST(new_row.value_r) AS read, UNNEST(new_row.value_w) AS write,
-                    UNNEST(state.count_r) AS count_r, UNNEST(state.count_nan_r) AS nan_r, UNNEST(state.avg_r) AS avg_r,
-                    UNNEST(state.min_r) AS min_r, UNNEST(state.max_r) AS max_r, UNNEST(state.stddev_r) AS stddev_r,
-                    UNNEST(state.count_w) AS count_w, UNNEST(state.count_nan_w) AS nan_w, UNNEST(state.avg_w) AS avg_w,
-                    UNNEST(state.min_w) AS min_w, UNNEST(state.max_w) AS max_w, UNNEST(state.stddev_w) AS stddev_w
-                )
-                SELECT count, state.count_errors+count_err
-                 , array_agg(CASE
-                        WHEN read='NaN'::float8 THEN count_r
-                        WHEN read='Infinity'::float8 THEN count_r 
-                        WHEN read='-Infinity'::float8 THEN count_r 
-                        WHEN read IS NOT NULL THEN count_r+1 
-                        ELSE count_r 
-                    END
-                    ), array_agg(CASE 
-                        WHEN read='NaN'::float8 THEN nan_r + 1 
-                        WHEN read='Infinity'::float8 THEN nan_r + 1 
-                        WHEN read='-Infinity'::float8 THEN nan_r + 1 
-                        ELSE nan_r 
-                    END
-                    ), array_agg(CASE 
-                        WHEN read='NaN'::float8 THEN avg_r 
-                        WHEN read='Infinity'::float8 THEN avg_r 
-                        WHEN read='-Infinity'::float8 THEN avg_r
-                        WHEN read IS NULL THEN avg_r
-                        WHEN avg_r IS NULL THEN read::decimal
-                        ELSE avg_r + (read::decimal-avg_r)/(count_r+1.)::decimal::decimal
-                    END
-                    ), array_agg(CASE 
-                        WHEN read='NaN'::float8 THEN min_r 
-                        WHEN read='Infinity'::float8 THEN min_r 
-                        WHEN read='-Infinity'::float8 THEN min_r
-                        ELSE LEAST(read, min_r)
-                    END
-                    ), array_agg(CASE 
-                        WHEN read='NaN'::float8 THEN max_r 
-                        WHEN read='Infinity'::float8 THEN max_r 
-                        WHEN read='-Infinity'::float8 THEN max_r 
-                        ELSE GREATEST(read, max_r)
-                    END
-                    ), array_agg(CASE 
-                        WHEN read='NaN'::float8 THEN stddev_r 
-                        WHEN read='Infinity'::float8 THEN stddev_r 
-                        WHEN read='-Infinity'::float8 THEN stddev_r
-                        WHEN read IS NULL THEN stddev_r
-                        WHEN stddev_r IS NULL THEN 0
-                        ELSE stddev_r + ((count_r+0.)/(count_r+1.))::decimal*power(read::decimal - avg_r, 2)
-                    END
-                    ), array_agg(CASE
-                        WHEN write='NaN'::float8 THEN count_w
-                        WHEN write='Infinity'::float8 THEN count_w 
-                        WHEN write='-Infinity'::float8 THEN count_w 
-                        WHEN write IS NOT NULL THEN count_w+1 
-                        ELSE count_w 
-                    END
-                    ), array_agg(CASE 
-                        WHEN write='NaN'::float8 THEN nan_w + 1 
-                        WHEN write='Infinity'::float8 THEN nan_w + 1 
-                        WHEN write='-Infinity'::float8 THEN nan_w + 1 
-                        ELSE nan_w 
-                    END
-                    ), array_agg(CASE 
-                        WHEN write='NaN'::float8 THEN avg_w 
-                        WHEN write='Infinity'::float8 THEN avg_w 
-                        WHEN write='-Infinity'::float8 THEN avg_w
-                        WHEN write IS NULL THEN avg_w
-                        WHEN avg_w IS NULL THEN write
-                        ELSE avg_w + (write::decimal-avg_w)/(count_w+1.)::decimal
-                    END
-                    ), array_agg(CASE 
-                        WHEN write='NaN'::float8 THEN min_w 
-                        WHEN write='Infinity'::float8 THEN min_w 
-                        WHEN write='-Infinity'::float8 THEN min_w
-                        ELSE LEAST(write, min_w)
-                    END
-                    ), array_agg(CASE 
-                        WHEN write='NaN'::float8 THEN max_w 
-                        WHEN write='Infinity'::float8 THEN max_w 
-                        WHEN write='-Infinity'::float8 THEN max_w 
-                        ELSE GREATEST(write, max_w)
-                    END
-                    ), array_agg(CASE 
-                        WHEN write='NaN'::float8 THEN stddev_w 
-                        WHEN write='Infinity'::float8 THEN stddev_w 
-                        WHEN write='-Infinity'::float8 THEN stddev_w
-                        WHEN write IS NULL THEN stddev_w
-                        WHEN stddev_w IS NULL THEN 0
-                        ELSE stddev_w + ((count_w+0.)/(count_w+1.))::decimal*power(write::decimal - avg_w, 2)
-                    END
-                    )
-                INTO result FROM arrays;
-/*
-* Different method using compute_element_agg
-
-                SELECT n_count_r, n_count_nan_r, n_avg_r, n_min_r, n_max_r, n_stddev_r
-                    , n_count_w, n_count_nan_w, n_avg_w, n_min_w, n_max_w, n_stddev_w
-                    FROM compute_element_agg(
-                         ( SELECT array_agg(ROW(read, write
-                    , count_r, nan_r, avg_r, min_r, max_r, stddev_r
-                    , count_w, nan_w, avg_w, min_w, max_w, stddev_w)::double_agg_input) from arrays )
-                    ) as (n_count_r integer, n_count_nan_r integer, n_avg_r decimal, n_min_r double precision, n_max_r double precision, n_stddev_r decimal
-                        , n_count_w integer, n_count_nan_w integer, n_avg_w decimal, n_min_w double precision, n_max_w double precision, n_stddev_w decimal)
-                )
-                SELECT count, state.count_errors+count_err
-                , array_agg(aggregates.n_count_r), array_agg(aggregates.n_count_nan_r), array_agg(aggregates.n_avg_r), array_agg(aggregates.n_min_r), array_agg(aggregates.n_max_r), array_agg(aggregates.n_stddev_r)
-                , array_agg(aggregates.n_count_w), array_agg(aggregates.n_count_nan_w), array_agg(aggregates.n_avg_w), array_agg(aggregates.n_min_w), array_agg(aggregates.n_max_w), array_agg(aggregates.n_stddev_w)
-                into result from aggregates;
-*/
-        END IF;
-    END IF;
-
-    return result;
-
-END;
-$$
-LANGUAGE 'plpgsql';
-
-CREATE OR REPLACE FUNCTION fn_float_array_agg(float_array_agg_state,new_row att_array_devfloat)
-    RETURNS float_array_agg_state AS $$
-
-DECLARE
-    state ALIAS FOR $1;
-    count integer;
-    count_err integer;
-    result float_array_agg_state%ROWTYPE;
-
-BEGIN
-
-    -- Increment error count if needed
-    IF new_row.att_error_desc_id > 0 THEN
-        count_err = 1;
-    ELSE
-        count_err = 0;
-    END IF;
-
-    IF state is NULL
-    THEN
-        WITH arrays AS(
-            SELECT UNNEST(new_row.value_r) AS read, UNNEST(new_row.value_w) AS write)
-            SELECT 1, count_err,
-            array_agg(
-                CASE 
-                    WHEN read='NaN'::float8 THEN 0 
-                    WHEN read='Infinity'::float8 THEN 0 
-                    WHEN read='-Infinity'::float8 THEN 0 
-                    WHEN read IS NOT NULL THEN 1  
-                    ELSE 0 
-                END
-                ), array_agg(
-                CASE 
-                    WHEN read='NaN'::float8 THEN 1 
-                    WHEN read='Infinity'::float8 THEN 1 
-                    WHEN read='-Infinity'::float8 THEN 1 
-                    ELSE 0 
-                END
-            ), array_agg(
-                CASE
-                    WHEN read='NaN'::float8 THEN NULL
-                    WHEN read='Infinity'::float8 THEN NULL
-                    WHEN read='-Infinity'::float8 THEN NULL
-                    ELSE read::decimal
-                END
-            ), array_agg(read), array_agg(read), array_agg(
-                CASE 
-                    WHEN read='NaN'::float8 THEN NULL
-                    WHEN read='Infinity'::float8 THEN NULL
-                    WHEN read='-Infinity'::float8 THEN NULL
-                    WHEN read IS NOT NULL THEN 0 
-                    ELSE read
-                END
-            ),
-            array_agg(
-                CASE 
-                    WHEN write='NaN'::float8 THEN 0 
-                    WHEN write='Infinity'::float8 THEN 0 
-                    WHEN write='-Infinity'::float8 THEN 0 
-                    WHEN write IS NOT NULL THEN 1 
-                    ELSE 0 
-                END
-                ), array_agg(
-                CASE 
-                    WHEN write='NaN'::float8 THEN 1 
-                    WHEN write='Infinity'::float8 THEN 1 
-                    WHEN write='-Infinity'::float8 THEN 1 
-                    ELSE 0 
-                END
-            ), array_agg(
-                CASE
-                    WHEN write='NaN'::float8 THEN NULL
-                    WHEN write='Infinity'::float8 THEN NULL
-                    WHEN write='-Infinity'::float8 THEN NULL
-                    ELSE write::decimal
-                END
-            ), array_agg(write), array_agg(write), array_agg(
-                CASE 
-                    WHEN write='NaN'::float8 THEN NULL
-                    WHEN write='Infinity'::float8 THEN NULL
-                    WHEN write='-Infinity'::float8 THEN NULL
-                    WHEN write IS NOT NULL THEN 0 
-                    ELSE write
-                END
-            )
-            INTO result FROM arrays;
-    ELSE
-
-        IF CARDINALITY(state.avg_r) != CARDINALITY(new_row.value_r) or CARDINALITY(state.avg_w) != CARDINALITY(new_row.value_w)
-        THEN
-            SELECT 0, 0,
-            ARRAY[]::integer[], ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::real[], ARRAY[]::real[], ARRAY[]::decimal[],
-            ARRAY[]::integer[], ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::real[], ARRAY[]::real[], ARRAY[]::decimal[]
-            INTO result;
-        ELSE
-
-            count := state.count + 1;
-            WITH arrays AS(
-                SELECT UNNEST(new_row.value_r) AS read, UNNEST(new_row.value_w) AS write,
-                    UNNEST(state.count_r) AS count_r, UNNEST(state.count_nan_r) AS nan_r, UNNEST(state.avg_r) AS avg_r,
-                    UNNEST(state.min_r) AS min_r, UNNEST(state.max_r) AS max_r, UNNEST(state.stddev_r) AS stddev_r,
-                    UNNEST(state.count_w) AS count_w, UNNEST(state.count_nan_w) AS nan_w, UNNEST(state.avg_w) AS avg_w,
-                    UNNEST(state.min_w) AS min_w, UNNEST(state.max_w) AS max_w, UNNEST(state.stddev_w) AS stddev_w
-                )
-                SELECT count, state.count_errors+count_err
-                 , array_agg(CASE
-                        WHEN read='NaN'::float8 THEN count_r
-                        WHEN read='Infinity'::float8 THEN count_r 
-                        WHEN read='-Infinity'::float8 THEN count_r 
-                        WHEN read IS NOT NULL THEN count_r+1 
-                        ELSE count_r 
-                    END
-                    ), array_agg(CASE 
-                        WHEN read='NaN'::float8 THEN nan_r + 1 
-                        WHEN read='Infinity'::float8 THEN nan_r + 1 
-                        WHEN read='-Infinity'::float8 THEN nan_r + 1 
-                        ELSE nan_r 
-                    END
-                    ), array_agg(CASE 
-                        WHEN read='NaN'::float8 THEN avg_r 
-                        WHEN read='Infinity'::float8 THEN avg_r 
-                        WHEN read='-Infinity'::float8 THEN avg_r
-                        WHEN read IS NULL THEN avg_r
-                        WHEN avg_r IS NULL THEN read
-                        ELSE avg_r + (read::decimal-avg_r)/(count_r+1.)::decimal
-                    END
-                    ), array_agg(CASE 
-                        WHEN read='NaN'::float8 THEN min_r 
-                        WHEN read='Infinity'::float8 THEN min_r 
-                        WHEN read='-Infinity'::float8 THEN min_r
-                        ELSE LEAST(read, min_r)
-                    END
-                    ), array_agg(CASE 
-                        WHEN read='NaN'::float8 THEN max_r 
-                        WHEN read='Infinity'::float8 THEN max_r 
-                        WHEN read='-Infinity'::float8 THEN max_r 
-                        ELSE GREATEST(read, max_r)
-                    END
-                    ), array_agg(CASE 
-                        WHEN read='NaN'::float8 THEN stddev_r 
-                        WHEN read='Infinity'::float8 THEN stddev_r 
-                        WHEN read='-Infinity'::float8 THEN stddev_r
-                        WHEN read IS NULL THEN stddev_r
-                        WHEN stddev_r IS NULL THEN 0
-                        ELSE stddev_r + ((count_r+0.)/(count_r+1.))::decimal*power(read::decimal - avg_r, 2)
-                    END
-                    ), array_agg(CASE
-                        WHEN write='NaN'::float8 THEN count_w
-                        WHEN write='Infinity'::float8 THEN count_w 
-                        WHEN write='-Infinity'::float8 THEN count_w 
-                        WHEN write IS NOT NULL THEN count_w+1 
-                        ELSE count_w 
-                    END
-                    ), array_agg(CASE 
-                        WHEN write='NaN'::float8 THEN nan_w + 1 
-                        WHEN write='Infinity'::float8 THEN nan_w + 1 
-                        WHEN write='-Infinity'::float8 THEN nan_w + 1 
-                        ELSE nan_w 
-                    END
-                    ), array_agg(CASE 
-                        WHEN write='NaN'::float8 THEN avg_w 
-                        WHEN write='Infinity'::float8 THEN avg_w 
-                        WHEN write='-Infinity'::float8 THEN avg_w
-                        WHEN write IS NULL THEN avg_w
-                        WHEN avg_w IS NULL THEN write
-                        ELSE avg_w + (write::decimal-avg_w)/(count_w+1.)::decimal
-                    END
-                    ), array_agg(CASE 
-                        WHEN write='NaN'::float8 THEN min_w 
-                        WHEN write='Infinity'::float8 THEN min_w 
-                        WHEN write='-Infinity'::float8 THEN min_w
-                        ELSE LEAST(write, min_w)
-                    END
-                    ), array_agg(CASE 
-                        WHEN write='NaN'::float8 THEN max_w 
-                        WHEN write='Infinity'::float8 THEN max_w 
-                        WHEN write='-Infinity'::float8 THEN max_w 
-                        ELSE GREATEST(write, max_w)
-                    END
-                    ), array_agg(CASE 
-                        WHEN write='NaN'::float8 THEN stddev_w 
-                        WHEN write='Infinity'::float8 THEN stddev_w 
-                        WHEN write='-Infinity'::float8 THEN stddev_w
-                        WHEN write IS NULL THEN stddev_w
-                        WHEN stddev_w IS NULL THEN 0
-                        ELSE stddev_w + ((count_w+0.)/(count_w+1.))::decimal*power(write::decimal - avg_w, 2)
-                    END
-                    )
-                INTO result FROM arrays;
-        END IF;
-    END IF;
-
-    return result;
-
-END;
-$$
-LANGUAGE 'plpgsql';
-
-CREATE OR REPLACE FUNCTION fn_long_array_agg(long_array_agg_state,new_row att_array_devlong)
-    RETURNS long_array_agg_state AS $$
-
-DECLARE
-    state ALIAS FOR $1;
-    count integer;
-    count_err integer;
-    result long_array_agg_state%ROWTYPE;
-
-BEGIN
-
-    -- Increment error count if needed
-    IF new_row.att_error_desc_id > 0 THEN
-        count_err = 1;
-    ELSE
-        count_err = 0;
-    END IF;
-
-    IF state is NULL
-    THEN
-        WITH arrays AS(
-            SELECT UNNEST(new_row.value_r) AS read, UNNEST(new_row.value_w) AS write)
-            SELECT 1, count_err,
-            array_agg(
-                CASE 
-                    WHEN read IS NOT NULL THEN 1  
-                    ELSE 0 
-                END
-            ), array_agg(read::decimal), array_agg(read), array_agg(read), array_agg(
-                CASE 
-                    WHEN read IS NOT NULL THEN 0 
-                    ELSE read
-                END
-            ),
-            array_agg(
-                CASE 
-                    WHEN write IS NOT NULL THEN 1 
-                    ELSE 0 
-                END
-            ), array_agg(write::decimal), array_agg(write), array_agg(write), array_agg(
-                CASE 
-                    WHEN write IS NOT NULL THEN 0 
-                    ELSE write
-                END
-            )
-            INTO result FROM arrays;
-    ELSE
-
-        IF CARDINALITY(state.avg_r) != CARDINALITY(new_row.value_r) or CARDINALITY(state.avg_w) != CARDINALITY(new_row.value_w)
-        THEN
-            SELECT 0, 0,
-            ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::integer[], ARRAY[]::integer[], ARRAY[]::decimal[],
-            ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::integer[], ARRAY[]::integer[], ARRAY[]::decimal[]
-            INTO result;
-        ELSE
-
-            count := state.count + 1;
-            WITH arrays AS(
-                SELECT UNNEST(new_row.value_r) AS read, UNNEST(new_row.value_w) AS write,
-                    UNNEST(state.count_r) AS count_r, UNNEST(state.avg_r) AS avg_r,
-                    UNNEST(state.min_r) AS min_r, UNNEST(state.max_r) AS max_r, UNNEST(state.stddev_r) AS stddev_r,
-                    UNNEST(state.count_w) AS count_w, UNNEST(state.avg_w) AS avg_w,
-                    UNNEST(state.min_w) AS min_w, UNNEST(state.max_w) AS max_w, UNNEST(state.stddev_w) AS stddev_w
-                )
-                SELECT count, state.count_errors+count_err
-                 , array_agg(CASE
-                        WHEN read IS NOT NULL THEN count_r+1 
-                        ELSE count_r 
-                    END
-                    ), array_agg(CASE 
-                        WHEN read IS NULL THEN avg_r
-                        WHEN avg_r IS NULL THEN read
-                        ELSE avg_r + (read::decimal-avg_r)/(count_r+1.)::decimal
-                    END
-                    ), array_agg(LEAST(read, min_r)), array_agg(GREATEST(read, max_r))
-                    , array_agg(CASE 
-                        WHEN read IS NULL THEN stddev_r
-                        WHEN stddev_r IS NULL THEN 0
-                        ELSE stddev_r + ((count_r+0.)/(count_r+1.))::decimal*power(read::decimal - avg_r, 2)
-                    END
-                    ), array_agg(CASE
-                        WHEN write IS NOT NULL THEN count_w+1 
-                        ELSE count_w 
-                    END
-                    ), array_agg(CASE 
-                        WHEN write IS NULL THEN avg_w
-                        WHEN avg_w IS NULL THEN write
-                        ELSE avg_w + (write::decimal-avg_w)/(count_w+1.)::decimal
-                    END
-                    ), array_agg(LEAST(write, min_w)), array_agg(GREATEST(write, max_w))
-                    , array_agg(CASE 
-                        WHEN write IS NULL THEN stddev_w
-                        WHEN stddev_w IS NULL THEN 0
-                        ELSE stddev_w + ((count_w+0.)/(count_w+1.))::decimal*power(write::decimal - avg_w, 2)
-                    END
-                    )
-                INTO result FROM arrays;
-        END IF;
-    END IF;
-
-    return result;
-
-END;
-$$
-LANGUAGE 'plpgsql';
-
-CREATE OR REPLACE FUNCTION fn_long64_array_agg(long64_array_agg_state,new_row att_array_devlong64)
-    RETURNS long64_array_agg_state AS $$
-
-DECLARE
-    state ALIAS FOR $1;
-    count integer;
-    count_err integer;
-    result long64_array_agg_state%ROWTYPE;
-
-BEGIN
-
-    -- Increment error count if needed
-    IF new_row.att_error_desc_id > 0 THEN
-        count_err = 1;
-    ELSE
-        count_err = 0;
-    END IF;
-
-    IF state is NULL
-    THEN
-        WITH arrays AS(
-            SELECT UNNEST(new_row.value_r) AS read, UNNEST(new_row.value_w) AS write)
-            SELECT 1, count_err,
-            array_agg(
-                CASE 
-                    WHEN read IS NOT NULL THEN 1  
-                    ELSE 0 
-                END
-            ), array_agg(read::decimal), array_agg(read), array_agg(read), array_agg(
-                CASE 
-                    WHEN read IS NOT NULL THEN 0 
-                    ELSE read
-                END
-            ),
-            array_agg(
-                CASE 
-                    WHEN write IS NOT NULL THEN 1 
-                    ELSE 0 
-                END
-            ), array_agg(write::decimal), array_agg(write), array_agg(write), array_agg(
-                CASE 
-                    WHEN write IS NOT NULL THEN 0 
-                    ELSE write
-                END
-            )
-            INTO result FROM arrays;
-    ELSE
-
-        IF CARDINALITY(state.avg_r) != CARDINALITY(new_row.value_r) or CARDINALITY(state.avg_w) != CARDINALITY(new_row.value_w)
-        THEN
-            SELECT 0, 0,
-            ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::bigint[], ARRAY[]::bigint[], ARRAY[]::decimal[],
-            ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::bigint[], ARRAY[]::bigint[], ARRAY[]::decimal[]
-            INTO result;
-        ELSE
-
-            count := state.count + 1;
-            WITH arrays AS(
-                SELECT UNNEST(new_row.value_r) AS read, UNNEST(new_row.value_w) AS write,
-                    UNNEST(state.count_r) AS count_r, UNNEST(state.avg_r) AS avg_r,
-                    UNNEST(state.min_r) AS min_r, UNNEST(state.max_r) AS max_r, UNNEST(state.stddev_r) AS stddev_r,
-                    UNNEST(state.count_w) AS count_w, UNNEST(state.avg_w) AS avg_w,
-                    UNNEST(state.min_w) AS min_w, UNNEST(state.max_w) AS max_w, UNNEST(state.stddev_w) AS stddev_w
-                )
-                SELECT count, state.count_errors+count_err
-                 , array_agg(CASE
-                        WHEN read IS NOT NULL THEN count_r+1 
-                        ELSE count_r 
-                    END
-                    ), array_agg(CASE 
-                        WHEN read IS NULL THEN avg_r
-                        WHEN avg_r IS NULL THEN read
-                        ELSE avg_r + (read::decimal-avg_r)/(count_r+1.)::decimal
-                    END
-                    ), array_agg(LEAST(read, min_r)), array_agg(GREATEST(read, max_r))
-                    , array_agg(CASE 
-                        WHEN read IS NULL THEN stddev_r
-                        WHEN stddev_r IS NULL THEN 0
-                        ELSE stddev_r + ((count_r+0.)/(count_r+1.))::decimal*power(read::decimal - avg_r, 2)
-                    END
-                    ), array_agg(CASE
-                        WHEN write IS NOT NULL THEN count_w+1 
-                        ELSE count_w 
-                    END
-                    ), array_agg(CASE 
-                        WHEN write IS NULL THEN avg_w
-                        WHEN avg_w IS NULL THEN write
-                        ELSE avg_w + (write::decimal-avg_w)/(count_w+1.)::decimal
-                    END
-                    ), array_agg(LEAST(write, min_w)), array_agg(GREATEST(write, max_w))
-                    , array_agg(CASE 
-                        WHEN write IS NULL THEN stddev_w
-                        WHEN stddev_w IS NULL THEN 0
-                        ELSE stddev_w + ((count_w+0.)/(count_w+1.))::decimal*power(write::decimal - avg_w, 2)
-                    END
-                    )
-                INTO result FROM arrays;
-        END IF;
-    END IF;
-
-    return result;
-
-END;
-$$
-LANGUAGE 'plpgsql';
-
-CREATE OR REPLACE FUNCTION fn_short_array_agg(short_array_agg_state,new_row att_array_devshort)
-    RETURNS short_array_agg_state AS $$
-
-DECLARE
-    state ALIAS FOR $1;
-    count integer;
-    count_err integer;
-    result short_array_agg_state%ROWTYPE;
-
-BEGIN
-
-    -- Increment error count if needed
-    IF new_row.att_error_desc_id > 0 THEN
-        count_err = 1;
-    ELSE
-        count_err = 0;
-    END IF;
-
-    IF state is NULL
-    THEN
-        WITH arrays AS(
-            SELECT UNNEST(new_row.value_r) AS read, UNNEST(new_row.value_w) AS write)
-            SELECT 1, count_err,
-            array_agg(
-                CASE 
-                    WHEN read IS NOT NULL THEN 1  
-                    ELSE 0 
-                END
-            ), array_agg(read::decimal), array_agg(read), array_agg(read), array_agg(
-                CASE 
-                    WHEN read IS NOT NULL THEN 0 
-                    ELSE read
-                END
-            ),
-            array_agg(
-                CASE 
-                    WHEN write IS NOT NULL THEN 1 
-                    ELSE 0 
-                END
-            ), array_agg(write::decimal), array_agg(write), array_agg(write), array_agg(
-                CASE 
-                    WHEN write IS NOT NULL THEN 0 
-                    ELSE write
-                END
-            )
-            INTO result FROM arrays;
-    ELSE
-
-        IF CARDINALITY(state.avg_r) != CARDINALITY(new_row.value_r) or CARDINALITY(state.avg_w) != CARDINALITY(new_row.value_w)
-        THEN
-            SELECT 0, 0,
-            ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::smallint[], ARRAY[]::smallint[], ARRAY[]::decimal[],
-            ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::smallint[], ARRAY[]::smallint[], ARRAY[]::decimal[]
-            INTO result;
-        ELSE
-
-            count := state.count + 1;
-            WITH arrays AS(
-                SELECT UNNEST(new_row.value_r) AS read, UNNEST(new_row.value_w) AS write,
-                    UNNEST(state.count_r) AS count_r, UNNEST(state.avg_r) AS avg_r,
-                    UNNEST(state.min_r) AS min_r, UNNEST(state.max_r) AS max_r, UNNEST(state.stddev_r) AS stddev_r,
-                    UNNEST(state.count_w) AS count_w, UNNEST(state.avg_w) AS avg_w,
-                    UNNEST(state.min_w) AS min_w, UNNEST(state.max_w) AS max_w, UNNEST(state.stddev_w) AS stddev_w
-                )
-                SELECT count, state.count_errors+count_err
-                 , array_agg(CASE
-                        WHEN read IS NOT NULL THEN count_r+1 
-                        ELSE count_r 
-                    END
-                    ), array_agg(CASE 
-                        WHEN read IS NULL THEN avg_r
-                        WHEN avg_r IS NULL THEN read
-                        ELSE avg_r + (read::decimal-avg_r)/(count_r+1.)::decimal
-                    END
-                    ), array_agg(LEAST(read, min_r)), array_agg(GREATEST(read, max_r))
-                    , array_agg(CASE 
-                        WHEN read IS NULL THEN stddev_r
-                        WHEN stddev_r IS NULL THEN 0
-                        ELSE stddev_r + ((count_r+0.)/(count_r+1.))::decimal*power(read::decimal - avg_r, 2)
-                    END
-                    ), array_agg(CASE
-                        WHEN write IS NOT NULL THEN count_w+1 
-                        ELSE count_w 
-                    END
-                    ), array_agg(CASE 
-                        WHEN write IS NULL THEN avg_w
-                        WHEN avg_w IS NULL THEN write
-                        ELSE avg_w + (write::decimal-avg_w)/(count_w+1.)::decimal
-                    END
-                    ), array_agg(LEAST(write, min_w)), array_agg(GREATEST(write, max_w))
-                    , array_agg(CASE 
-                        WHEN write IS NULL THEN stddev_w
-                        WHEN stddev_w IS NULL THEN 0
-                        ELSE stddev_w + ((count_w+0.)/(count_w+1.))::decimal*power(write::decimal - avg_w, 2)
-                    END
-                    )
-                INTO result FROM arrays;
-        END IF;
-    END IF;
-
-    return result;
-
-END;
-$$
-LANGUAGE 'plpgsql';
-
-CREATE OR REPLACE FUNCTION fn_ulong_array_agg(ulong_array_agg_state,new_row att_array_devulong)
-    RETURNS ulong_array_agg_state AS $$
-
-DECLARE
-    state ALIAS FOR $1;
-    count integer;
-    count_err integer;
-    result ulong_array_agg_state%ROWTYPE;
-
-BEGIN
-
-    -- Increment error count if needed
-    IF new_row.att_error_desc_id > 0 THEN
-        count_err = 1;
-    ELSE
-        count_err = 0;
-    END IF;
-
-    IF state is NULL
-    THEN
-        WITH arrays AS(
-            SELECT UNNEST(new_row.value_r) AS read, UNNEST(new_row.value_w) AS write)
-            SELECT 1, count_err,
-            array_agg(
-                CASE 
-                    WHEN read IS NOT NULL THEN 1  
-                    ELSE 0 
-                END
-            ), array_agg(read::decimal), array_agg(read), array_agg(read), array_agg(
-                CASE 
-                    WHEN read IS NOT NULL THEN 0 
-                    ELSE read
-                END
-            ),
-            array_agg(
-                CASE 
-                    WHEN write IS NOT NULL THEN 1 
-                    ELSE 0 
-                END
-            ), array_agg(write::decimal), array_agg(write), array_agg(write), array_agg(
-                CASE 
-                    WHEN write IS NOT NULL THEN 0 
-                    ELSE write
-                END
-            )
-            INTO result FROM arrays;
-    ELSE
-
-        IF CARDINALITY(state.avg_r) != CARDINALITY(new_row.value_r) or CARDINALITY(state.avg_w) != CARDINALITY(new_row.value_w)
-        THEN
-            SELECT 0, 0,
-            ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::ulong[], ARRAY[]::ulong[], ARRAY[]::decimal[],
-            ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::ulong[], ARRAY[]::ulong[], ARRAY[]::decimal[]
-            INTO result;
-        ELSE
-
-            count := state.count + 1;
-            WITH arrays AS(
-                SELECT UNNEST(new_row.value_r) AS read, UNNEST(new_row.value_w) AS write,
-                    UNNEST(state.count_r) AS count_r, UNNEST(state.avg_r) AS avg_r,
-                    UNNEST(state.min_r) AS min_r, UNNEST(state.max_r) AS max_r, UNNEST(state.stddev_r) AS stddev_r,
-                    UNNEST(state.count_w) AS count_w, UNNEST(state.avg_w) AS avg_w,
-                    UNNEST(state.min_w) AS min_w, UNNEST(state.max_w) AS max_w, UNNEST(state.stddev_w) AS stddev_w
-                )
-                SELECT count, state.count_errors+count_err
-                 , array_agg(CASE
-                        WHEN read IS NOT NULL THEN count_r+1 
-                        ELSE count_r 
-                    END
-                    ), array_agg(CASE 
-                        WHEN read IS NULL THEN avg_r
-                        WHEN avg_r IS NULL THEN read
-                        ELSE avg_r + (read::decimal-avg_r)/(count_r+1.)::decimal
-                    END
-                    ), array_agg(LEAST(read, min_r)), array_agg(GREATEST(read, max_r))
-                    , array_agg(CASE 
-                        WHEN read IS NULL THEN stddev_r
-                        WHEN stddev_r IS NULL THEN 0
-                        ELSE stddev_r + ((count_r+0.)/(count_r+1.))::decimal*power(read::decimal - avg_r, 2)
-                    END
-                    ), array_agg(CASE
-                        WHEN write IS NOT NULL THEN count_w+1 
-                        ELSE count_w 
-                    END
-                    ), array_agg(CASE 
-                        WHEN write IS NULL THEN avg_w
-                        WHEN avg_w IS NULL THEN write
-                        ELSE avg_w + (write::decimal-avg_w)/(count_w+1.)::decimal
-                    END
-                    ), array_agg(LEAST(write, min_w)), array_agg(GREATEST(write, max_w))
-                    , array_agg(CASE 
-                        WHEN write IS NULL THEN stddev_w
-                        WHEN stddev_w IS NULL THEN 0
-                        ELSE stddev_w + ((count_w+0.)/(count_w+1.))::decimal*power(write::decimal - avg_w, 2)
-                    END
-                    )
-                INTO result FROM arrays;
-        END IF;
-    END IF;
-
-    return result;
-
-END;
-$$
-LANGUAGE 'plpgsql';
-
-CREATE OR REPLACE FUNCTION fn_ulong64_array_agg(ulong64_array_agg_state,new_row att_array_devulong64)
-    RETURNS ulong64_array_agg_state AS $$
-
-DECLARE
-    state ALIAS FOR $1;
-    count integer;
-    count_err integer;
-    result ulong64_array_agg_state%ROWTYPE;
-
-BEGIN
-
-    -- Increment error count if needed
-    IF new_row.att_error_desc_id > 0 THEN
-        count_err = 1;
-    ELSE
-        count_err = 0;
-    END IF;
-
-    IF state is NULL
-    THEN
-        WITH arrays AS(
-            SELECT UNNEST(new_row.value_r) AS read, UNNEST(new_row.value_w) AS write)
-            SELECT 1, count_err,
-            array_agg(
-                CASE 
-                    WHEN read IS NOT NULL THEN 1  
-                    ELSE 0 
-                END
-            ), array_agg(read::decimal), array_agg(read), array_agg(read), array_agg(
-                CASE 
-                    WHEN read IS NOT NULL THEN 0 
-                    ELSE read
-                END
-            ),
-            array_agg(
-                CASE 
-                    WHEN write IS NOT NULL THEN 1 
-                    ELSE 0 
-                END
-            ), array_agg(write::decimal), array_agg(write), array_agg(write), array_agg(
-                CASE 
-                    WHEN write IS NOT NULL THEN 0 
-                    ELSE write
-                END
-            )
-            INTO result FROM arrays;
-    ELSE
-
-        IF CARDINALITY(state.avg_r) != CARDINALITY(new_row.value_r) or CARDINALITY(state.avg_w) != CARDINALITY(new_row.value_w)
-        THEN
-            SELECT 0, 0,
-            ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::ulong64[], ARRAY[]::ulong64[], ARRAY[]::decimal[],
-            ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::ulong64[], ARRAY[]::ulong64[], ARRAY[]::decimal[]
-            INTO result;
-        ELSE
-
-            count := state.count + 1;
-            WITH arrays AS(
-                SELECT UNNEST(new_row.value_r) AS read, UNNEST(new_row.value_w) AS write,
-                    UNNEST(state.count_r) AS count_r, UNNEST(state.avg_r) AS avg_r,
-                    UNNEST(state.min_r) AS min_r, UNNEST(state.max_r) AS max_r, UNNEST(state.stddev_r) AS stddev_r,
-                    UNNEST(state.count_w) AS count_w, UNNEST(state.avg_w) AS avg_w,
-                    UNNEST(state.min_w) AS min_w, UNNEST(state.max_w) AS max_w, UNNEST(state.stddev_w) AS stddev_w
-                )
-                SELECT count, state.count_errors+count_err
-                 , array_agg(CASE
-                        WHEN read IS NOT NULL THEN count_r+1 
-                        ELSE count_r 
-                    END
-                    ), array_agg(CASE 
-                        WHEN read IS NULL THEN avg_r
-                        WHEN avg_r IS NULL THEN read
-                        ELSE avg_r + (read::decimal-avg_r)/(count_r+1.)::decimal
-                    END
-                    ), array_agg(LEAST(read, min_r)), array_agg(GREATEST(read, max_r))
-                    , array_agg(CASE 
-                        WHEN read IS NULL THEN stddev_r
-                        WHEN stddev_r IS NULL THEN 0
-                        ELSE stddev_r + ((count_r+0.)/(count_r+1.))::decimal*power(read::decimal - avg_r, 2)
-                    END
-                    ), array_agg(CASE
-                        WHEN write IS NOT NULL THEN count_w+1 
-                        ELSE count_w 
-                    END
-                    ), array_agg(CASE 
-                        WHEN write IS NULL THEN avg_w
-                        WHEN avg_w IS NULL THEN write
-                        ELSE avg_w + (write::decimal-avg_w)/(count_w+1.)::decimal
-                    END
-                    ), array_agg(LEAST(write, min_w)), array_agg(GREATEST(write, max_w))
-                    , array_agg(CASE 
-                        WHEN write IS NULL THEN stddev_w
-                        WHEN stddev_w IS NULL THEN 0
-                        ELSE stddev_w + ((count_w+0.)/(count_w+1.))::decimal*power(write::decimal - avg_w, 2)
-                    END
-                    )
-                INTO result FROM arrays;
-        END IF;
-    END IF;
-
-    return result;
-
-END;
-$$
-LANGUAGE 'plpgsql';
-
-CREATE OR REPLACE FUNCTION fn_ushort_array_agg(ushort_array_agg_state,new_row att_array_devushort)
-    RETURNS ushort_array_agg_state AS $$
-
-DECLARE
-    state ALIAS FOR $1;
-    count integer;
-    count_err integer;
-    result ushort_array_agg_state%ROWTYPE;
-
-BEGIN
-
-    -- Increment error count if needed
-    IF new_row.att_error_desc_id > 0 THEN
-        count_err = 1;
-    ELSE
-        count_err = 0;
-    END IF;
-
-    IF state is NULL
-    THEN
-        WITH arrays AS(
-            SELECT UNNEST(new_row.value_r) AS read, UNNEST(new_row.value_w) AS write)
-            SELECT 1, count_err,
-            array_agg(
-                CASE 
-                    WHEN read IS NOT NULL THEN 1  
-                    ELSE 0 
-                END
-            ), array_agg(read::decimal), array_agg(read), array_agg(read), array_agg(
-                CASE 
-                    WHEN read IS NOT NULL THEN 0 
-                    ELSE read
-                END
-            ),
-            array_agg(
-                CASE 
-                    WHEN write IS NOT NULL THEN 1 
-                    ELSE 0 
-                END
-            ), array_agg(write::decimal), array_agg(write), array_agg(write), array_agg(
-                CASE 
-                    WHEN write IS NOT NULL THEN 0 
-                    ELSE write
-                END
-            )
-            INTO result FROM arrays;
-    ELSE
-
-        IF CARDINALITY(state.avg_r) != CARDINALITY(new_row.value_r) or CARDINALITY(state.avg_w) != CARDINALITY(new_row.value_w)
-        THEN
-            SELECT 0, 0,
-            ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::ushort[], ARRAY[]::ushort[], ARRAY[]::decimal[],
-            ARRAY[]::integer[], ARRAY[]::decimal[], ARRAY[]::ushort[], ARRAY[]::ushort[], ARRAY[]::decimal[]
-            INTO result;
-        ELSE
-
-            count := state.count + 1;
-            WITH arrays AS(
-                SELECT UNNEST(new_row.value_r) AS read, UNNEST(new_row.value_w) AS write,
-                    UNNEST(state.count_r) AS count_r, UNNEST(state.avg_r) AS avg_r,
-                    UNNEST(state.min_r) AS min_r, UNNEST(state.max_r) AS max_r, UNNEST(state.stddev_r) AS stddev_r,
-                    UNNEST(state.count_w) AS count_w, UNNEST(state.avg_w) AS avg_w,
-                    UNNEST(state.min_w) AS min_w, UNNEST(state.max_w) AS max_w, UNNEST(state.stddev_w) AS stddev_w
-                )
-                SELECT count, state.count_errors+count_err
-                 , array_agg(CASE
-                        WHEN read IS NOT NULL THEN count_r+1 
-                        ELSE count_r 
-                    END
-                    ), array_agg(CASE 
-                        WHEN read IS NULL THEN avg_r
-                        WHEN avg_r IS NULL THEN read
-                        ELSE avg_r + (read::decimal-avg_r)/(count_r+1.)::decimal
-                    END
-                    ), array_agg(LEAST(read, min_r)), array_agg(GREATEST(read, max_r))
-                    , array_agg(CASE 
-                        WHEN read IS NULL THEN stddev_r
-                        WHEN stddev_r IS NULL THEN 0
-                        ELSE stddev_r + ((count_r+0.)/(count_r+1.))::decimal*power(read::decimal - avg_r, 2)
-                    END
-                    ), array_agg(CASE
-                        WHEN write IS NOT NULL THEN count_w+1 
-                        ELSE count_w 
-                    END
-                    ), array_agg(CASE 
-                        WHEN write IS NULL THEN avg_w
-                        WHEN avg_w IS NULL THEN write
-                        ELSE avg_w + (write::decimal-avg_w)/(count_w+1.)::decimal
-                    END
-                    ), array_agg(LEAST(write, min_w)), array_agg(GREATEST(write, max_w))
-                    , array_agg(CASE 
-                        WHEN write IS NULL THEN stddev_w
-                        WHEN stddev_w IS NULL THEN 0
-                        ELSE stddev_w + ((count_w+0.)/(count_w+1.))::decimal*power(write::decimal - avg_w, 2)
-                    END
-                    )
-                INTO result FROM arrays;
-        END IF;
-    END IF;
-
-    return result;
-
-END;
-$$
-LANGUAGE 'plpgsql';
-
--- Function to compute the real aggregate results from the internal state
--- in this case only the stddev has to be computed
-CREATE OR REPLACE FUNCTION fn_double_array_final(double_array_agg_state)
-    RETURNS double_array_agg_state AS $$
-
-DECLARE
-    state ALIAS FOR $1;
-    result double_array_agg_state%ROWTYPE;
-
-BEGIN
-
-    IF state IS NULL
-    THEN
-        return NULL;
-    END IF;
-
-    IF state.count = 0 THEN
-        return NULL;
-
-    ELSE
-        WITH arrays AS(
-            SELECT UNNEST(state.count_r) AS count_r, UNNEST(state.stddev_r) AS stddev_r,
-                UNNEST(state.count_w) AS count_w, UNNEST(state.stddev_w) AS stddev_w
-            )
-            SELECT state.count, state.count_errors,
-            state.count_r, state.count_nan_r, state.avg_r, 
-            state.min_r, state.max_r, array_agg(CASE
-                WHEN count_r=0 THEN NULL
-                WHEN count_r=1 THEN 0
-                ELSE sqrt(stddev_r/(count_r))
-                END
-                ),
-            state.count_w, state.count_nan_w, state.avg_w, 
-            state.min_w, state.max_w, array_agg(CASE
-                WHEN count_r=0 THEN NULL
-                WHEN count_r=1 THEN 0
-                ELSE sqrt(stddev_w/(count_w))
-                END
-                )
-            INTO result FROM arrays;
-
-        return result;
-
-    END IF;
-END;
-$$
-LANGUAGE 'plpgsql';
-
-CREATE OR REPLACE FUNCTION fn_float_array_final(float_array_agg_state)
-    RETURNS float_array_agg_state AS $$
-
-DECLARE
-    state ALIAS FOR $1;
-    result float_array_agg_state%ROWTYPE;
-
-BEGIN
-
-    IF state IS NULL
-    THEN
-        return NULL;
-    END IF;
-
-    IF state.count = 0 THEN
-        return NULL;
-
-    ELSE
-        WITH arrays AS(
-            SELECT UNNEST(state.count_r) AS count_r, UNNEST(state.stddev_r) AS stddev_r,
-                UNNEST(state.count_w) AS count_w, UNNEST(state.stddev_w) AS stddev_w
-            )
-            SELECT state.count, state.count_errors,
-            state.count_r, state.count_nan_r, state.avg_r, 
-            state.min_r, state.max_r, array_agg(CASE
-                WHEN count_r=0 THEN NULL
-                WHEN count_r=1 THEN 0
-                ELSE sqrt(stddev_r/(count_r))
-                END
-                ),
-            state.count_w, state.count_nan_w, state.avg_w, 
-            state.min_w, state.max_w, array_agg(CASE
-                WHEN count_r=0 THEN NULL
-                WHEN count_r=1 THEN 0
-                ELSE sqrt(stddev_w/(count_w))
-                END)
-            INTO result FROM arrays;
-
-        return result;
-
-    END IF;
-END;
-$$
-LANGUAGE 'plpgsql';
-
-CREATE OR REPLACE FUNCTION fn_long_array_final(long_array_agg_state)
-    RETURNS long_array_agg_state AS $$
-
-DECLARE
-    state ALIAS FOR $1;
-    result long_array_agg_state%ROWTYPE;
-
-BEGIN
-
-    IF state IS NULL
-    THEN
-        return NULL;
-    END IF;
-
-    IF state.count = 0 THEN
-        return NULL;
-
-    ELSE
-        WITH arrays AS(
-            SELECT UNNEST(state.count_r) AS count_r, UNNEST(state.stddev_r) AS stddev_r,
-                UNNEST(state.count_w) AS count_w, UNNEST(state.stddev_w) AS stddev_w
-            )
-            SELECT state.count, state.count_errors,
-            state.count_r, state.avg_r, 
-            state.min_r, state.max_r, array_agg(CASE
-                WHEN count_r=0 THEN NULL
-                WHEN count_r=1 THEN 0
-                ELSE sqrt(stddev_r/(count_r))
-                END
-                ),
-            state.count_w, state.avg_w, 
-            state.min_w, state.max_w, array_agg(CASE
-                WHEN count_r=0 THEN NULL
-                WHEN count_r=1 THEN 0
-                ELSE sqrt(stddev_w/(count_w))
-                END)
-            INTO result FROM arrays;
-
-        return result;
-
-    END IF;
-END;
-$$
-LANGUAGE 'plpgsql';
-
-CREATE OR REPLACE FUNCTION fn_long64_array_final(long64_array_agg_state)
-    RETURNS long64_array_agg_state AS $$
-
-DECLARE
-    state ALIAS FOR $1;
-    result long64_array_agg_state%ROWTYPE;
-
-BEGIN
-
-    IF state IS NULL
-    THEN
-        return NULL;
-    END IF;
-
-    IF state.count = 0 THEN
-        return NULL;
-
-    ELSE
-        WITH arrays AS(
-            SELECT UNNEST(state.count_r) AS count_r, UNNEST(state.stddev_r) AS stddev_r,
-                UNNEST(state.count_w) AS count_w, UNNEST(state.stddev_w) AS stddev_w
-            )
-            SELECT state.count, state.count_errors,
-            state.count_r, state.avg_r, 
-            state.min_r, state.max_r, array_agg(CASE
-                WHEN count_r=0 THEN NULL
-                WHEN count_r=1 THEN 0
-                ELSE sqrt(stddev_r/(count_r))
-                END
-                ),
-            state.count_w, state.avg_w, 
-            state.min_w, state.max_w, array_agg(CASE
-                WHEN count_r=0 THEN NULL
-                WHEN count_r=1 THEN 0
-                ELSE sqrt(stddev_w/(count_w))
-                END)
-            INTO result FROM arrays;
-
-        return result;
-
-    END IF;
-END;
-$$
-LANGUAGE 'plpgsql';
-
-CREATE OR REPLACE FUNCTION fn_short_array_final(short_array_agg_state)
-    RETURNS short_array_agg_state AS $$
-
-DECLARE
-    state ALIAS FOR $1;
-    result short_array_agg_state%ROWTYPE;
-
-BEGIN
-
-    IF state IS NULL
-    THEN
-        return NULL;
-    END IF;
-
-    IF state.count = 0 THEN
-        return NULL;
-
-    ELSE
-        WITH arrays AS(
-            SELECT UNNEST(state.count_r) AS count_r, UNNEST(state.stddev_r) AS stddev_r,
-                UNNEST(state.count_w) AS count_w, UNNEST(state.stddev_w) AS stddev_w
-            )
-            SELECT state.count, state.count_errors,
-            state.count_r, state.avg_r, 
-            state.min_r, state.max_r, array_agg(CASE
-                WHEN count_r=0 THEN NULL
-                WHEN count_r=1 THEN 0
-                ELSE sqrt(stddev_r/(count_r))
-                END
-                ),
-            state.count_w, state.avg_w, 
-            state.min_w, state.max_w, array_agg(CASE
-                WHEN count_r=0 THEN NULL
-                WHEN count_r=1 THEN 0
-                ELSE sqrt(stddev_w/(count_w))
-                END)
-            INTO result FROM arrays;
-
-        return result;
-
-    END IF;
-END;
-$$
-LANGUAGE 'plpgsql';
-
-CREATE OR REPLACE FUNCTION fn_ulong_array_final(ulong_array_agg_state)
-    RETURNS ulong_array_agg_state AS $$
-
-DECLARE
-    state ALIAS FOR $1;
-    result ulong_array_agg_state%ROWTYPE;
-
-BEGIN
-
-    IF state IS NULL
-    THEN
-        return NULL;
-    END IF;
-
-    IF state.count = 0 THEN
-        return NULL;
-
-    ELSE
-        WITH arrays AS(
-            SELECT UNNEST(state.count_r) AS count_r, UNNEST(state.stddev_r) AS stddev_r,
-                UNNEST(state.count_w) AS count_w, UNNEST(state.stddev_w) AS stddev_w
-            )
-            SELECT state.count, state.count_errors,
-            state.count_r, state.avg_r, 
-            state.min_r, state.max_r, array_agg(CASE
-                WHEN count_r=0 THEN NULL
-                WHEN count_r=1 THEN 0
-                ELSE sqrt(stddev_r/(count_r))
-                END
-                ),
-            state.count_w, state.avg_w, 
-            state.min_w, state.max_w, array_agg(CASE
-                WHEN count_r=0 THEN NULL
-                WHEN count_r=1 THEN 0
-                ELSE sqrt(stddev_w/(count_w))
-                END)
-            INTO result FROM arrays;
-
-        return result;
-
-    END IF;
-END;
-$$
-LANGUAGE 'plpgsql';
-
-CREATE OR REPLACE FUNCTION fn_ulong64_array_final(ulong64_array_agg_state)
-    RETURNS ulong64_array_agg_state AS $$
-
-DECLARE
-    state ALIAS FOR $1;
-    result ulong64_array_agg_state%ROWTYPE;
-
-BEGIN
-
-    IF state IS NULL
-    THEN
-        return NULL;
-    END IF;
-
-    IF state.count = 0 THEN
-        return NULL;
-
-    ELSE
-        WITH arrays AS(
-            SELECT UNNEST(state.count_r) AS count_r, UNNEST(state.stddev_r) AS stddev_r,
-                UNNEST(state.count_w) AS count_w, UNNEST(state.stddev_w) AS stddev_w
-            )
-            SELECT state.count, state.count_errors,
-            state.count_r, state.avg_r, 
-            state.min_r, state.max_r, array_agg(CASE
-                WHEN count_r=0 THEN NULL
-                WHEN count_r=1 THEN 0
-                ELSE sqrt(stddev_r/(count_r))
-                END
-                ),
-            state.count_w, state.avg_w, 
-            state.min_w, state.max_w, array_agg(CASE
-                WHEN count_r=0 THEN NULL
-                WHEN count_r=1 THEN 0
-                ELSE sqrt(stddev_w/(count_w))
-                END)
-            INTO result FROM arrays;
-
-        return result;
-
-    END IF;
-END;
-$$
-LANGUAGE 'plpgsql';
-
-CREATE OR REPLACE FUNCTION fn_ushort_array_final(ushort_array_agg_state)
-    RETURNS ushort_array_agg_state AS $$
-
-DECLARE
-    state ALIAS FOR $1;
-    result ushort_array_agg_state%ROWTYPE;
-
-BEGIN
-
-    IF state IS NULL
-    THEN
-        return NULL;
-    END IF;
-
-    IF state.count = 0 THEN
-        return NULL;
-
-    ELSE
-        WITH arrays AS(
-            SELECT UNNEST(state.count_r) AS count_r, UNNEST(state.stddev_r) AS stddev_r,
-                UNNEST(state.count_w) AS count_w, UNNEST(state.stddev_w) AS stddev_w
-            )
-            SELECT state.count, state.count_errors,
-            state.count_r, state.avg_r, 
-            state.min_r, state.max_r, array_agg(CASE
-                WHEN count_r=0 THEN NULL
-                WHEN count_r=1 THEN 0
-                ELSE sqrt(stddev_r/(count_r))
-                END
-                ),
-            state.count_w, state.avg_w, 
-            state.min_w, state.max_w, array_agg(CASE
-                WHEN count_r=0 THEN NULL
-                WHEN count_r=1 THEN 0
-                ELSE sqrt(stddev_w/(count_w))
-                END)
-            INTO result FROM arrays;
-
-        return result;
-
-    END IF;
-END;
-$$
-LANGUAGE 'plpgsql';
-
--- Aggregate function declaration
-CREATE AGGREGATE double_array_aggregate(att_array_devdouble)
-(
-    sfunc = fn_double_array_agg,
-    stype = double_array_agg_state,
-    combinefunc = fn_double_combine,
-    finalfunc = fn_double_array_final
-);
-
-CREATE AGGREGATE float_array_aggregate(att_array_devfloat)
-(
-    sfunc = fn_float_array_agg,
-    stype = float_array_agg_state,
-    combinefunc = fn_float_combine,
-    finalfunc = fn_float_array_final
-);
-
-CREATE AGGREGATE long_array_aggregate(att_array_devlong)
-(
-    sfunc = fn_long_array_agg,
-    stype = long_array_agg_state,
-    combinefunc = fn_long_combine,
-    finalfunc = fn_long_array_final
-);
-
-CREATE AGGREGATE long64_array_aggregate(att_array_devlong64)
-(
-    sfunc = fn_long64_array_agg,
-    stype = long64_array_agg_state,
-    combinefunc = fn_long64_combine,
-    finalfunc = fn_long64_array_final
-);
-
-CREATE AGGREGATE short_array_aggregate(att_array_devshort)
-(
-    sfunc = fn_short_array_agg,
-    stype = short_array_agg_state,
-    combinefunc = fn_short_combine,
-    finalfunc = fn_short_array_final
-);
-
-CREATE AGGREGATE ulong_array_aggregate(att_array_devulong)
-(
-    sfunc = fn_ulong_array_agg,
-    stype = ulong_array_agg_state,
-    combinefunc = fn_ulong_combine,
-    finalfunc = fn_ulong_array_final
-);
-
-CREATE AGGREGATE ulong64_array_aggregate(att_array_devulong64)
-(
-    sfunc = fn_ulong64_array_agg,
-    stype = ulong64_array_agg_state,
-    combinefunc = fn_ulong64_combine,
-    finalfunc = fn_ulong64_array_final
-);
-
-CREATE AGGREGATE ushort_array_aggregate(att_array_devushort)
-(
-    sfunc = fn_ushort_array_agg,
-    stype = ushort_array_agg_state,
-    combinefunc = fn_ushort_combine,
-    finalfunc = fn_ushort_array_final
-);
diff --git a/docker-compose/timescaledb/resources/09_hdb_ext_arrays_aggregates.sql b/docker-compose/timescaledb/resources/09_hdb_ext_arrays_aggregates.sql
deleted file mode 100644
index 028712989defdbd6f30bcdde61e98790a9940192..0000000000000000000000000000000000000000
--- a/docker-compose/timescaledb/resources/09_hdb_ext_arrays_aggregates.sql
+++ /dev/null
@@ -1,328 +0,0 @@
--- -----------------------------------------------------------------------------
--- This file is part of the hdbpp-timescale-project
---
--- Copyright (C) : 2014-2019
---   European Synchrotron Radiation Facility
---   BP 220, Grenoble 38043, FRANCE
---
--- libhdb++timescale is free software: you can redistribute it and/or modify
--- it under the terms of the Lesser GNU General Public License as published by
--- the Free Software Foundation, either version 3 of the License, or
--- (at your option) any later version.
---
--- libhdb++timescale is distributed in the hope that it will be useful,
--- but WITHOUT ANY WARRANTY; without even the implied warranty of
--- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the Lesser
--- GNU General Public License for more details.
---
--- You should have received a copy of the Lesser GNU General Public License
--- along with libhdb++timescale.  If not, see <http://www.gnu.org/licenses/>.
--- -----------------------------------------------------------------------------
-
--- Continuous aggregates views for the array attributes.
-\c hdb
--- Double attributes
-CREATE MATERIALIZED VIEW cagg_array_devdouble_1hour(
-		att_conf_id, data_time, count_rows, count_errors
-                , count_r, count_nan_r, mean_r, min_r, max_r, stddev_r
-                , count_w, count_nan_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 hour', data_time), (double_array_aggregate(t)).count,  (double_array_aggregate(t)).count_errors
-        , (double_array_aggregate(t)).count_r,  (double_array_aggregate(t)).count_nan_r, (double_array_aggregate(t)).avg_r::float8[],  (double_array_aggregate(t)).min_r,  (double_array_aggregate(t)).max_r,  (double_array_aggregate(t)).stddev_r::float8[]  
-        , (double_array_aggregate(t)).count_w,  (double_array_aggregate(t)).count_nan_w,  (double_array_aggregate(t)).avg_w::float8[], (double_array_aggregate(t)).min_w,  (double_array_aggregate(t)).max_w,  (double_array_aggregate(t)).stddev_w::float8[]  
-       	FROM att_array_devdouble as t
-        GROUP BY time_bucket('1 hour', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_array_devdouble_8hour(
-		att_conf_id, data_time, count_rows, count_errors
-                , count_r, count_nan_r, mean_r, min_r, max_r, stddev_r
-                , count_w, count_nan_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('8 hours', data_time), (double_array_aggregate(t)).count,  (double_array_aggregate(t)).count_errors
-        , (double_array_aggregate(t)).count_r,  (double_array_aggregate(t)).count_nan_r, (double_array_aggregate(t)).avg_r::float8[],  (double_array_aggregate(t)).min_r,  (double_array_aggregate(t)).max_r,  (double_array_aggregate(t)).stddev_r::float8[]  
-        , (double_array_aggregate(t)).count_w,  (double_array_aggregate(t)).count_nan_w,  (double_array_aggregate(t)).avg_w::float8[], (double_array_aggregate(t)).min_w,  (double_array_aggregate(t)).max_w,  (double_array_aggregate(t)).stddev_w::float8[]  
-       	FROM att_array_devdouble as t
-        GROUP BY time_bucket('8 hours', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_array_devdouble_1day(
-		att_conf_id, data_time, count_rows, count_errors
-                , count_r, count_nan_r, mean_r, min_r, max_r, stddev_r
-                , count_w, count_nan_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 day', data_time), (double_array_aggregate(t)).count,  (double_array_aggregate(t)).count_errors
-        , (double_array_aggregate(t)).count_r,  (double_array_aggregate(t)).count_nan_r, (double_array_aggregate(t)).avg_r::float8[],  (double_array_aggregate(t)).min_r,  (double_array_aggregate(t)).max_r,  (double_array_aggregate(t)).stddev_r::float8[]  
-        , (double_array_aggregate(t)).count_w,  (double_array_aggregate(t)).count_nan_w,  (double_array_aggregate(t)).avg_w::float8[], (double_array_aggregate(t)).min_w,  (double_array_aggregate(t)).max_w,  (double_array_aggregate(t)).stddev_w::float8[]  
-       	FROM att_array_devdouble as t
-        GROUP BY time_bucket('1 day', data_time), att_conf_id;
-
--- Float attributes
-CREATE MATERIALIZED VIEW cagg_array_devfloat_1hour(
-		att_conf_id, data_time, count_rows, count_errors
-                , count_r, count_nan_r, mean_r, min_r, max_r, stddev_r
-                , count_w, count_nan_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 hour', data_time), (float_array_aggregate(t)).count,  (float_array_aggregate(t)).count_errors
-        , (float_array_aggregate(t)).count_r,  (float_array_aggregate(t)).count_nan_r,  (float_array_aggregate(t)).avg_r::float8[], (float_array_aggregate(t)).min_r,  (float_array_aggregate(t)).max_r,  (float_array_aggregate(t)).stddev_r::float8[]  
-        , (float_array_aggregate(t)).count_w,  (float_array_aggregate(t)).count_nan_w,  (float_array_aggregate(t)).avg_w::float8[], (float_array_aggregate(t)).min_w,  (float_array_aggregate(t)).max_w,  (float_array_aggregate(t)).stddev_w::float8[]  
-       	FROM att_array_devfloat as t
-        GROUP BY time_bucket('1 hour', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_array_devfloat_8hour(
-		att_conf_id, data_time, count_rows, count_errors
-                , count_r, count_nan_r, mean_r, min_r, max_r, stddev_r
-                , count_w, count_nan_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('8 hours', data_time), (float_array_aggregate(t)).count,  (float_array_aggregate(t)).count_errors
-        , (float_array_aggregate(t)).count_r,  (float_array_aggregate(t)).count_nan_r,  (float_array_aggregate(t)).avg_r::float8[], (float_array_aggregate(t)).min_r,  (float_array_aggregate(t)).max_r,  (float_array_aggregate(t)).stddev_r::float8[]  
-        , (float_array_aggregate(t)).count_w,  (float_array_aggregate(t)).count_nan_w,  (float_array_aggregate(t)).avg_w::float8[], (float_array_aggregate(t)).min_w,  (float_array_aggregate(t)).max_w,  (float_array_aggregate(t)).stddev_w::float8[]  
-       	FROM att_array_devfloat as t
-        GROUP BY time_bucket('8 hours', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_array_devfloat_1day(
-		att_conf_id, data_time, count_rows, count_errors
-                , count_r, count_nan_r, mean_r, min_r, max_r, stddev_r
-                , count_w, count_nan_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 day', data_time), (float_array_aggregate(t)).count,  (float_array_aggregate(t)).count_errors
-        , (float_array_aggregate(t)).count_r,  (float_array_aggregate(t)).count_nan_r,  (float_array_aggregate(t)).avg_r::float8[], (float_array_aggregate(t)).min_r,  (float_array_aggregate(t)).max_r,  (float_array_aggregate(t)).stddev_r::float8[]  
-        , (float_array_aggregate(t)).count_w,  (float_array_aggregate(t)).count_nan_w,  (float_array_aggregate(t)).avg_w::float8[], (float_array_aggregate(t)).min_w,  (float_array_aggregate(t)).max_w,  (float_array_aggregate(t)).stddev_w::float8[]  
-       	FROM att_array_devfloat as t
-        GROUP BY time_bucket('1 day', data_time), att_conf_id;
-
--- Long attributes
-CREATE MATERIALIZED VIEW cagg_array_devlong_1hour(
-		att_conf_id, data_time, count_rows, count_errors
-                , count_r, mean_r, min_r, max_r, stddev_r
-                , count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 hour', data_time), (long_array_aggregate(t)).count,  (long_array_aggregate(t)).count_errors
-        , (long_array_aggregate(t)).count_r,  (long_array_aggregate(t)).avg_r::float8[], (long_array_aggregate(t)).min_r,  (long_array_aggregate(t)).max_r,  (long_array_aggregate(t)).stddev_r::float8[]  
-        , (long_array_aggregate(t)).count_w,  (long_array_aggregate(t)).avg_w::float8[], (long_array_aggregate(t)).min_w,  (long_array_aggregate(t)).max_w,  (long_array_aggregate(t)).stddev_w::float8[]  
-       	FROM att_array_devlong as t
-        GROUP BY time_bucket('1 hour', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_array_devlong_8hour(
-		att_conf_id, data_time, count_rows, count_errors
-                , count_r, mean_r, min_r, max_r, stddev_r
-                , count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('8 hours', data_time), (long_array_aggregate(t)).count,  (long_array_aggregate(t)).count_errors
-        , (long_array_aggregate(t)).count_r,  (long_array_aggregate(t)).avg_r::float8[], (long_array_aggregate(t)).min_r,  (long_array_aggregate(t)).max_r,  (long_array_aggregate(t)).stddev_r::float8[]  
-        , (long_array_aggregate(t)).count_w,  (long_array_aggregate(t)).avg_w::float8[], (long_array_aggregate(t)).min_w,  (long_array_aggregate(t)).max_w,  (long_array_aggregate(t)).stddev_w::float8[]  
-       	FROM att_array_devlong as t
-        GROUP BY time_bucket('8 hours', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_array_devlong_1day(
-		att_conf_id, data_time, count_rows, count_errors
-                , count_r, mean_r, min_r, max_r, stddev_r
-                , count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 day', data_time), (long_array_aggregate(t)).count,  (long_array_aggregate(t)).count_errors
-        , (long_array_aggregate(t)).count_r, (long_array_aggregate(t)).avg_r::float8[], (long_array_aggregate(t)).min_r,  (long_array_aggregate(t)).max_r,  (long_array_aggregate(t)).stddev_r::float8[]  
-        , (long_array_aggregate(t)).count_w, (long_array_aggregate(t)).avg_w::float8[], (long_array_aggregate(t)).min_w,  (long_array_aggregate(t)).max_w,  (long_array_aggregate(t)).stddev_w::float8[]  
-       	FROM att_array_devlong as t
-        GROUP BY time_bucket('1 day', data_time), att_conf_id;
-
--- Long 64 attributes
-CREATE MATERIALIZED VIEW cagg_array_devlong64_1hour(
-		att_conf_id, data_time, count_rows, count_errors
-                , count_r, mean_r, min_r, max_r, stddev_r
-                , count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 hour', data_time), (long64_array_aggregate(t)).count,  (long64_array_aggregate(t)).count_errors
-        , (long64_array_aggregate(t)).count_r,  (long64_array_aggregate(t)).avg_r::float8[], (long64_array_aggregate(t)).min_r,  (long64_array_aggregate(t)).max_r,  (long64_array_aggregate(t)).stddev_r::float8[]  
-        , (long64_array_aggregate(t)).count_w,  (long64_array_aggregate(t)).avg_w::float8[], (long64_array_aggregate(t)).min_w,  (long64_array_aggregate(t)).max_w,  (long64_array_aggregate(t)).stddev_w::float8[]  
-       	FROM att_array_devlong64 as t
-        GROUP BY time_bucket('1 hour', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_array_devlong64_8hour(
-		att_conf_id, data_time, count_rows, count_errors
-                , count_r, mean_r, min_r, max_r, stddev_r
-                , count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('8 hours', data_time), (long64_array_aggregate(t)).count,  (long64_array_aggregate(t)).count_errors
-        , (long64_array_aggregate(t)).count_r,  (long64_array_aggregate(t)).avg_r::float8[], (long64_array_aggregate(t)).min_r,  (long64_array_aggregate(t)).max_r,  (long64_array_aggregate(t)).stddev_r::float8[]  
-        , (long64_array_aggregate(t)).count_w,  (long64_array_aggregate(t)).avg_w::float8[], (long64_array_aggregate(t)).min_w,  (long64_array_aggregate(t)).max_w,  (long64_array_aggregate(t)).stddev_w::float8[]  
-       	FROM att_array_devlong64 as t
-        GROUP BY time_bucket('8 hours', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_array_devlong64_1day(
-		att_conf_id, data_time, count_rows, count_errors
-                , count_r, mean_r, min_r, max_r, stddev_r
-                , count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 day', data_time), (long64_array_aggregate(t)).count,  (long64_array_aggregate(t)).count_errors
-        , (long64_array_aggregate(t)).count_r, (long64_array_aggregate(t)).avg_r::float8[], (long64_array_aggregate(t)).min_r,  (long64_array_aggregate(t)).max_r,  (long64_array_aggregate(t)).stddev_r::float8[]  
-        , (long64_array_aggregate(t)).count_w, (long64_array_aggregate(t)).avg_w::float8[], (long64_array_aggregate(t)).min_w,  (long64_array_aggregate(t)).max_w,  (long64_array_aggregate(t)).stddev_w::float8[]  
-       	FROM att_array_devlong64 as t
-        GROUP BY time_bucket('1 day', data_time), att_conf_id;
-
--- Short attributes
-CREATE MATERIALIZED VIEW cagg_array_devshort_1hour(
-		att_conf_id, data_time, count_rows, count_errors
-                , count_r, mean_r, min_r, max_r, stddev_r
-                , count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 hour', data_time), (short_array_aggregate(t)).count,  (short_array_aggregate(t)).count_errors
-        , (short_array_aggregate(t)).count_r,  (short_array_aggregate(t)).avg_r::float8[], (short_array_aggregate(t)).min_r,  (short_array_aggregate(t)).max_r,  (short_array_aggregate(t)).stddev_r::float8[]  
-        , (short_array_aggregate(t)).count_w,  (short_array_aggregate(t)).avg_w::float8[], (short_array_aggregate(t)).min_w,  (short_array_aggregate(t)).max_w,  (short_array_aggregate(t)).stddev_w::float8[]  
-       	FROM att_array_devshort as t
-        GROUP BY time_bucket('1 hour', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_array_devshort_8hour(
-		att_conf_id, data_time, count_rows, count_errors
-                , count_r, mean_r, min_r, max_r, stddev_r
-                , count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('8 hours', data_time), (short_array_aggregate(t)).count,  (short_array_aggregate(t)).count_errors
-        , (short_array_aggregate(t)).count_r,  (short_array_aggregate(t)).avg_r::float8[], (short_array_aggregate(t)).min_r,  (short_array_aggregate(t)).max_r,  (short_array_aggregate(t)).stddev_r::float8[]  
-        , (short_array_aggregate(t)).count_w,  (short_array_aggregate(t)).avg_w::float8[], (short_array_aggregate(t)).min_w,  (short_array_aggregate(t)).max_w,  (short_array_aggregate(t)).stddev_w::float8[]  
-       	FROM att_array_devshort as t
-        GROUP BY time_bucket('8 hours', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_array_devshort_1day(
-		att_conf_id, data_time, count_rows, count_errors
-                , count_r, mean_r, min_r, max_r, stddev_r
-                , count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 day', data_time), (short_array_aggregate(t)).count,  (short_array_aggregate(t)).count_errors
-        , (short_array_aggregate(t)).count_r, (short_array_aggregate(t)).avg_r::float8[], (short_array_aggregate(t)).min_r,  (short_array_aggregate(t)).max_r,  (short_array_aggregate(t)).stddev_r::float8[]  
-        , (short_array_aggregate(t)).count_w, (short_array_aggregate(t)).avg_w::float8[], (short_array_aggregate(t)).min_w,  (short_array_aggregate(t)).max_w,  (short_array_aggregate(t)).stddev_w::float8[]  
-       	FROM att_array_devshort as t
-        GROUP BY time_bucket('1 day', data_time), att_conf_id;
-
--- Unsigned long attributes
-CREATE MATERIALIZED VIEW cagg_array_devulong_1hour(
-		att_conf_id, data_time, count_rows, count_errors
-                , count_r, mean_r, min_r, max_r, stddev_r
-                , count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 hour', data_time), (ulong_array_aggregate(t)).count,  (ulong_array_aggregate(t)).count_errors
-        , (ulong_array_aggregate(t)).count_r,  (ulong_array_aggregate(t)).avg_r::float8[], (ulong_array_aggregate(t)).min_r,  (ulong_array_aggregate(t)).max_r,  (ulong_array_aggregate(t)).stddev_r::float8[]  
-        , (ulong_array_aggregate(t)).count_w,  (ulong_array_aggregate(t)).avg_w::float8[], (ulong_array_aggregate(t)).min_w,  (ulong_array_aggregate(t)).max_w,  (ulong_array_aggregate(t)).stddev_w::float8[]  
-       	FROM att_array_devulong as t
-        GROUP BY time_bucket('1 hour', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_array_devulong_8hour(
-		att_conf_id, data_time, count_rows, count_errors
-                , count_r, mean_r, min_r, max_r, stddev_r
-                , count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('8 hours', data_time), (ulong_array_aggregate(t)).count,  (ulong_array_aggregate(t)).count_errors
-        , (ulong_array_aggregate(t)).count_r,  (ulong_array_aggregate(t)).avg_r::float8[], (ulong_array_aggregate(t)).min_r,  (ulong_array_aggregate(t)).max_r,  (ulong_array_aggregate(t)).stddev_r::float8[]  
-        , (ulong_array_aggregate(t)).count_w,  (ulong_array_aggregate(t)).avg_w::float8[], (ulong_array_aggregate(t)).min_w,  (ulong_array_aggregate(t)).max_w,  (ulong_array_aggregate(t)).stddev_w::float8[]  
-       	FROM att_array_devulong as t
-        GROUP BY time_bucket('8 hours', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_array_devulong_1day(
-		att_conf_id, data_time, count_rows, count_errors
-                , count_r, mean_r, min_r, max_r, stddev_r
-                , count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 day', data_time), (ulong_array_aggregate(t)).count,  (ulong_array_aggregate(t)).count_errors
-        , (ulong_array_aggregate(t)).count_r, (ulong_array_aggregate(t)).avg_r::float8[], (ulong_array_aggregate(t)).min_r,  (ulong_array_aggregate(t)).max_r,  (ulong_array_aggregate(t)).stddev_r::float8[]  
-        , (ulong_array_aggregate(t)).count_w, (ulong_array_aggregate(t)).avg_w::float8[], (ulong_array_aggregate(t)).min_w,  (ulong_array_aggregate(t)).max_w,  (ulong_array_aggregate(t)).stddev_w::float8[]  
-       	FROM att_array_devulong as t
-        GROUP BY time_bucket('1 day', data_time), att_conf_id;
-
--- Unsigned long 64 attributes
-CREATE MATERIALIZED VIEW cagg_array_devulong64_1hour(
-		att_conf_id, data_time, count_rows, count_errors
-                , count_r, mean_r, min_r, max_r, stddev_r
-                , count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 hour', data_time), (ulong64_array_aggregate(t)).count,  (ulong64_array_aggregate(t)).count_errors
-        , (ulong64_array_aggregate(t)).count_r,  (ulong64_array_aggregate(t)).avg_r::float8[], (ulong64_array_aggregate(t)).min_r,  (ulong64_array_aggregate(t)).max_r,  (ulong64_array_aggregate(t)).stddev_r::float8[]  
-        , (ulong64_array_aggregate(t)).count_w,  (ulong64_array_aggregate(t)).avg_w::float8[], (ulong64_array_aggregate(t)).min_w,  (ulong64_array_aggregate(t)).max_w,  (ulong64_array_aggregate(t)).stddev_w::float8[]  
-       	FROM att_array_devulong64 as t
-        GROUP BY time_bucket('1 hour', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_array_devulong64_8hour(
-		att_conf_id, data_time, count_rows, count_errors
-                , count_r, mean_r, min_r, max_r, stddev_r
-                , count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('8 hours', data_time), (ulong64_array_aggregate(t)).count,  (ulong64_array_aggregate(t)).count_errors
-        , (ulong64_array_aggregate(t)).count_r,  (ulong64_array_aggregate(t)).avg_r::float8[], (ulong64_array_aggregate(t)).min_r,  (ulong64_array_aggregate(t)).max_r,  (ulong64_array_aggregate(t)).stddev_r::float8[]  
-        , (ulong64_array_aggregate(t)).count_w,  (ulong64_array_aggregate(t)).avg_w::float8[], (ulong64_array_aggregate(t)).min_w,  (ulong64_array_aggregate(t)).max_w,  (ulong64_array_aggregate(t)).stddev_w::float8[]  
-       	FROM att_array_devulong64 as t
-        GROUP BY time_bucket('8 hours', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_array_devulong64_1day(
-		att_conf_id, data_time, count_rows, count_errors
-                , count_r, mean_r, min_r, max_r, stddev_r
-                , count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 day', data_time), (ulong64_array_aggregate(t)).count,  (ulong64_array_aggregate(t)).count_errors
-        , (ulong64_array_aggregate(t)).count_r, (ulong64_array_aggregate(t)).avg_r::float8[], (ulong64_array_aggregate(t)).min_r,  (ulong64_array_aggregate(t)).max_r,  (ulong64_array_aggregate(t)).stddev_r::float8[]  
-        , (ulong64_array_aggregate(t)).count_w, (ulong64_array_aggregate(t)).avg_w::float8[], (ulong64_array_aggregate(t)).min_w,  (ulong64_array_aggregate(t)).max_w,  (ulong64_array_aggregate(t)).stddev_w::float8[]  
-       	FROM att_array_devulong64 as t
-        GROUP BY time_bucket('1 day', data_time), att_conf_id;
-
--- Unsigned short attributes
-CREATE MATERIALIZED VIEW cagg_array_devushort_1hour(
-		att_conf_id, data_time, count_rows, count_errors
-                , count_r, mean_r, min_r, max_r, stddev_r
-                , count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 hour', data_time), (ushort_array_aggregate(t)).count,  (ushort_array_aggregate(t)).count_errors
-        , (ushort_array_aggregate(t)).count_r,  (ushort_array_aggregate(t)).avg_r::float8[], (ushort_array_aggregate(t)).min_r,  (ushort_array_aggregate(t)).max_r,  (ushort_array_aggregate(t)).stddev_r::float8[]  
-        , (ushort_array_aggregate(t)).count_w,  (ushort_array_aggregate(t)).avg_w::float8[], (ushort_array_aggregate(t)).min_w,  (ushort_array_aggregate(t)).max_w,  (ushort_array_aggregate(t)).stddev_w::float8[]  
-       	FROM att_array_devushort as t
-        GROUP BY time_bucket('1 hour', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_array_devushort_8hour(
-		att_conf_id, data_time, count_rows, count_errors
-                , count_r, mean_r, min_r, max_r, stddev_r
-                , count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('8 hours', data_time), (ushort_array_aggregate(t)).count,  (ushort_array_aggregate(t)).count_errors
-        , (ushort_array_aggregate(t)).count_r,  (ushort_array_aggregate(t)).avg_r::float8[], (ushort_array_aggregate(t)).min_r,  (ushort_array_aggregate(t)).max_r,  (ushort_array_aggregate(t)).stddev_r::float8[]  
-        , (ushort_array_aggregate(t)).count_w,  (ushort_array_aggregate(t)).avg_w::float8[], (ushort_array_aggregate(t)).min_w,  (ushort_array_aggregate(t)).max_w,  (ushort_array_aggregate(t)).stddev_w::float8[]  
-       	FROM att_array_devushort as t
-        GROUP BY time_bucket('8 hours', data_time), att_conf_id;
-
-CREATE MATERIALIZED VIEW cagg_array_devushort_1day(
-		att_conf_id, data_time, count_rows, count_errors
-                , count_r, mean_r, min_r, max_r, stddev_r
-                , count_w, mean_w, min_w, max_w, stddev_w
-	) WITH (timescaledb.continuous)
-       	AS SELECT att_conf_id, time_bucket('1 day', data_time), (ushort_array_aggregate(t)).count,  (ushort_array_aggregate(t)).count_errors
-        , (ushort_array_aggregate(t)).count_r, (ushort_array_aggregate(t)).avg_r::float8[], (ushort_array_aggregate(t)).min_r,  (ushort_array_aggregate(t)).max_r,  (ushort_array_aggregate(t)).stddev_r::float8[]  
-        , (ushort_array_aggregate(t)).count_w, (ushort_array_aggregate(t)).avg_w::float8[], (ushort_array_aggregate(t)).min_w,  (ushort_array_aggregate(t)).max_w,  (ushort_array_aggregate(t)).stddev_w::float8[]  
-       	FROM att_array_devushort as t
-        GROUP BY time_bucket('1 day', data_time), att_conf_id;
-
--- Drop all the views
--- DROP VIEW cagg_array_devdouble_1hour CASCADE;
--- DROP VIEW cagg_array_devdouble_8hour CASCADE;
--- DROP VIEW cagg_array_devdouble_1day CASCADE;
-
--- DROP VIEW cagg_array_devfloat_1hour CASCADE;
--- DROP VIEW cagg_array_devfloat_8hour CASCADE;
--- DROP VIEW cagg_array_devfloat_1day CASCADE;
-
--- DROP VIEW cagg_array_devlong_1hour CASCADE;
--- DROP VIEW cagg_array_devlong_8hour CASCADE;
--- DROP VIEW cagg_array_devlong_1day CASCADE;
-
--- DROP VIEW cagg_array_devlong64_1hour CASCADE;
--- DROP VIEW cagg_array_devlong64_8hour CASCADE;
--- DROP VIEW cagg_array_devlong64_1day CASCADE;
-
--- DROP VIEW cagg_array_devshort_1hour CASCADE;
--- DROP VIEW cagg_array_devshort_8hour CASCADE;
--- DROP VIEW cagg_array_devshort_1day CASCADE;
-
--- DROP VIEW cagg_array_devulong_1hour CASCADE;
--- DROP VIEW cagg_array_devulong_8hour CASCADE;
--- DROP VIEW cagg_array_devulong_1day CASCADE;
-
--- DROP VIEW cagg_array_devulong64_1hour CASCADE;
--- DROP VIEW cagg_array_devulong64_8hour CASCADE;
--- DROP VIEW cagg_array_devulong64_1day CASCADE;
-
--- DROP VIEW cagg_array_devushort_1hour CASCADE;
--- DROP VIEW cagg_array_devushort_8hour CASCADE;
--- DROP VIEW cagg_array_devushort_1day CASCADE;
-
diff --git a/docker-compose/timescaledb/resources/10_hdb_ext_compress_policy.sql b/docker-compose/timescaledb/resources/10_hdb_ext_compress_policy.sql
deleted file mode 100644
index a815950e82b9d88d6e992fda23e8e3a5d5b84a26..0000000000000000000000000000000000000000
--- a/docker-compose/timescaledb/resources/10_hdb_ext_compress_policy.sql
+++ /dev/null
@@ -1,118 +0,0 @@
--- -----------------------------------------------------------------------------
--- This file is part of the hdbpp-timescale-project
---
--- Copyright (C) : 2014-2019
---   European Synchrotron Radiation Facility
---   BP 220, Grenoble 38043, FRANCE
---
--- libhdb++timescale is free software: you can redistribute it and/or modify
--- it under the terms of the Lesser GNU General Public License as published by
--- the Free Software Foundation, either version 3 of the License, or
--- (at your option) any later version.
---
--- libhdb++timescale is distributed in the hope that it will be useful,
--- but WITHOUT ANY WARRANTY; without even the implied warranty of
--- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the Lesser
--- GNU General Public License for more details.
---
--- You should have received a copy of the Lesser GNU General Public License
--- along with libhdb++timescale.  If not, see <http://www.gnu.org/licenses/>.
--- -----------------------------------------------------------------------------
-\c hdb
--- Compress chunk policy
--- Allow compression on the table
-ALTER TABLE att_scalar_devboolean SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_scalar_devdouble SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_scalar_devfloat SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_scalar_devencoded SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_scalar_devenum SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_scalar_devstate SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_scalar_devstring SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_scalar_devuchar SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_scalar_devulong SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_scalar_devulong64 SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_scalar_devlong64 SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_scalar_devlong SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_scalar_devushort SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_scalar_devshort SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-
-ALTER TABLE att_array_devboolean SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_array_devdouble SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_array_devfloat SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_array_devencoded SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_array_devenum SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_array_devstate SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_array_devstring SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_array_devuchar SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_array_devulong SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_array_devulong64 SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_array_devlong64 SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_array_devlong SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_array_devushort SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-ALTER TABLE att_array_devshort SET(timescaledb.compress, timescaledb.compress_segmentby = 'att_conf_id, att_error_desc_id', timescaledb.compress_orderby = 'data_time DESC');
-
-DO $$ BEGIN
-    IF (SELECT extversion>'2.0.0' FROM pg_extension where extname = 'timescaledb') THEN
-        -- If using timescaledb v2
-        PERFORM add_compression_policy('att_scalar_devboolean', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_scalar_devdouble', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_scalar_devfloat', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_scalar_devencoded', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_scalar_devenum', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_scalar_devstate', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_scalar_devstring', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_scalar_devuchar', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_scalar_devulong', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_scalar_devulong64', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_scalar_devlong64', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_scalar_devlong', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_scalar_devushort', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_scalar_devshort', INTERVAL '200d', if_not_exists => true);
-
-        PERFORM add_compression_policy('att_array_devboolean', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_array_devdouble', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_array_devfloat', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_array_devencoded', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_array_devenum', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_array_devstate', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_array_devstring', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_array_devuchar', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_array_devulong', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_array_devulong64', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_array_devlong64', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_array_devlong', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_array_devushort', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compression_policy('att_array_devshort', INTERVAL '200d', if_not_exists => true);
-    ELSE
-        -- If using timescaledb v1.7
-        PERFORM add_compress_chunks_policy('att_scalar_devboolean', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_scalar_devdouble', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_scalar_devfloat', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_scalar_devencoded', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_scalar_devenum', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_scalar_devstate', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_scalar_devstring', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_scalar_devuchar', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_scalar_devulong', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_scalar_devulong64', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_scalar_devlong64', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_scalar_devlong', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_scalar_devushort', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_scalar_devshort', INTERVAL '200d', if_not_exists => true);
-
-        PERFORM add_compress_chunks_policy('att_array_devboolean', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_array_devdouble', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_array_devfloat', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_array_devencoded', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_array_devenum', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_array_devstate', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_array_devstring', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_array_devuchar', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_array_devulong', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_array_devulong64', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_array_devlong64', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_array_devlong', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_array_devushort', INTERVAL '200d', if_not_exists => true);
-        PERFORM add_compress_chunks_policy('att_array_devshort', INTERVAL '200d', if_not_exists => true);
-    END IF;
-END $$;
diff --git a/docker-compose/timescaledb/resources/11_hdb_ext_reorder_policy.sql b/docker-compose/timescaledb/resources/11_hdb_ext_reorder_policy.sql
deleted file mode 100644
index e8e0f3911b6945159945d8e9b0caafeaad1741e1..0000000000000000000000000000000000000000
--- a/docker-compose/timescaledb/resources/11_hdb_ext_reorder_policy.sql
+++ /dev/null
@@ -1,53 +0,0 @@
--- -----------------------------------------------------------------------------
--- This file is part of the hdbpp-timescale-project
---
--- Copyright (C) : 2014-2019
---   European Synchrotron Radiation Facility
---   BP 220, Grenoble 38043, FRANCE
---
--- libhdb++timescale is free software: you can redistribute it and/or modify
--- it under the terms of the Lesser GNU General Public License as published by
--- the Free Software Foundation, either version 3 of the License, or
--- (at your option) any later version.
---
--- libhdb++timescale is distributed in the hope that it will be useful,
--- but WITHOUT ANY WARRANTY; without even the implied warranty of
--- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the Lesser
--- GNU General Public License for more details.
---
--- You should have received a copy of the Lesser GNU General Public License
--- along with libhdb++timescale.  If not, see <http://www.gnu.org/licenses/>.
--- -----------------------------------------------------------------------------
-
-\c hdb
--- Reorder chunk policy
-
-SELECT add_reorder_policy('att_scalar_devboolean', 'att_scalar_devboolean_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_scalar_devdouble', 'att_scalar_devdouble_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_scalar_devfloat', 'att_scalar_devfloat_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_scalar_devencoded', 'att_scalar_devencoded_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_scalar_devenum', 'att_scalar_devenum_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_scalar_devstate', 'att_scalar_devstate_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_scalar_devstring', 'att_scalar_devstring_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_scalar_devuchar', 'att_scalar_devuchar_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_scalar_devulong', 'att_scalar_devulong_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_scalar_devulong64', 'att_scalar_devulong64_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_scalar_devlong64', 'att_scalar_devlong64_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_scalar_devlong', 'att_scalar_devlong_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_scalar_devushort', 'att_scalar_devushort_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_scalar_devshort', 'att_scalar_devshort_att_conf_id_data_time_idx', if_not_exists => true);
-
-SELECT add_reorder_policy('att_array_devboolean', 'att_array_devboolean_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_array_devdouble', 'att_array_devdouble_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_array_devfloat', 'att_array_devfloat_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_array_devencoded', 'att_array_devencoded_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_array_devenum', 'att_array_devenum_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_array_devstate', 'att_array_devstate_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_array_devstring', 'att_array_devstring_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_array_devuchar', 'att_array_devuchar_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_array_devulong', 'att_array_devulong_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_array_devulong64', 'att_array_devulong64_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_array_devlong64', 'att_array_devlong64_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_array_devlong', 'att_array_devlong_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_array_devushort', 'att_array_devushort_att_conf_id_data_time_idx', if_not_exists => true);
-SELECT add_reorder_policy('att_array_devshort', 'att_array_devshort_att_conf_id_data_time_idx', if_not_exists => true);
diff --git a/docker-compose/timescaledb/resources/12_lofar_func.sh b/docker-compose/timescaledb/resources/12_lofar_func.sh
deleted file mode 100644
index 4797c7a6df2acf11e3709221a9bd4fd335128264..0000000000000000000000000000000000000000
--- a/docker-compose/timescaledb/resources/12_lofar_func.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-
-psql << EOF
-\c hdb
-CREATE OR REPLACE FUNCTION mask(double precision[], boolean[])
-RETURNS double precision[] LANGUAGE sql
-AS \$function\$ SELECT ARRAY(SELECT
-case when \$2[i] then \$1[i]
-else '0'::double precision end
-FROM generate_subscripts(\$1,1) g(i)) \$function\$;
-EOF
diff --git a/docker-compose/timescaledb/resources/13_lofar_views.sql b/docker-compose/timescaledb/resources/13_lofar_views.sql
deleted file mode 100644
index 791e191452f769fbae60136a1746e4e2c0a1fc7c..0000000000000000000000000000000000000000
--- a/docker-compose/timescaledb/resources/13_lofar_views.sql
+++ /dev/null
@@ -1,594 +0,0 @@
-\c hdb
-
--- NOTE: We concatenate domain/family/member here, which means we can't index
---       the resulting column. However, queries also supply the attribute name,
---       which we can index. The scan on the device name is then limited to
---        entries which have the same attribute name across devices.
-
--- DOUBLE --
-
-CREATE OR REPLACE VIEW lofar_image_double AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x,
-    (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y,
-    array_element.val as value
-  FROM att_image_devdouble att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
-
-CREATE OR REPLACE VIEW lofar_array_double AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    array_element.idx - 1 AS x,
-    array_element.val as value
-  FROM att_array_devdouble att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
-  
-CREATE OR REPLACE VIEW lofar_scalar_double AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    value_r as value
-  FROM att_scalar_devdouble att
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
-  
--- BOOLEAN --
-
-CREATE OR REPLACE VIEW lofar_image_boolean AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x,
-    (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y,
-    CASE WHEN array_element.val THEN 1 ELSE 0 END AS value
-  FROM att_image_devboolean att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
- 
-CREATE OR REPLACE VIEW lofar_array_boolean AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    array_element.idx - 1 AS x,
-    CASE WHEN array_element.val THEN 1 ELSE 0 END AS value
-  FROM att_array_devboolean att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
-  
-CREATE OR REPLACE VIEW lofar_scalar_boolean AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    CASE WHEN value_r THEN 1 ELSE 0 END AS value
-  FROM att_scalar_devboolean att
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
- 
--- UCHAR --
-
-CREATE OR REPLACE VIEW lofar_image_uchar AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x,
-    (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y,
-    array_element.val as value
-  FROM att_image_devuchar att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
-
-CREATE OR REPLACE VIEW lofar_array_uchar AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    array_element.idx - 1 AS x,
-    array_element.val as value
-  FROM att_array_devuchar att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
- 
-CREATE OR REPLACE VIEW lofar_scalar_uchar AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    value_r as value
-  FROM att_scalar_devuchar att
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
- 
--- SHORT --
-
-CREATE OR REPLACE VIEW lofar_image_short AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x,
-    (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y,
-    array_element.val as value
-  FROM att_image_devshort att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
-
-CREATE OR REPLACE VIEW lofar_array_short AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    array_element.idx - 1 AS x,
-    array_element.val as value
-  FROM att_array_devshort att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
- 
-CREATE OR REPLACE VIEW lofar_scalar_short AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    value_r as value
-  FROM att_scalar_devshort att
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
- 
--- USHORT --
-
-CREATE OR REPLACE VIEW lofar_image_ushort AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x,
-    (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y,
-    array_element.val as value
-  FROM att_image_devushort att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
-
-CREATE OR REPLACE VIEW lofar_array_ushort AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    array_element.idx - 1 AS x,
-    array_element.val as value
-  FROM att_array_devushort att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
- 
-CREATE OR REPLACE VIEW lofar_scalar_ushort AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    value_r as value
-  FROM att_scalar_devushort att
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
- 
--- LONG -- 
-
-CREATE OR REPLACE VIEW lofar_image_long AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x,
-    (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y,
-    array_element.val as value
-  FROM att_image_devlong att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
-
-CREATE OR REPLACE VIEW lofar_array_long AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    array_element.idx - 1 AS x,
-    array_element.val as value
-  FROM att_array_devlong att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
- 
-CREATE OR REPLACE VIEW lofar_scalar_long AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    value_r as value
-  FROM att_scalar_devlong att
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
- 
--- ULONG -- 
-
-CREATE OR REPLACE VIEW lofar_image_ulong AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x,
-    (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y,
-    array_element.val as value
-  FROM att_image_devulong att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
-
-CREATE OR REPLACE VIEW lofar_array_ulong AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    array_element.idx - 1 AS x,
-    array_element.val as value
-  FROM att_array_devulong att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
- 
-CREATE OR REPLACE VIEW lofar_scalar_ulong AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    value_r as value
-  FROM att_scalar_devulong att
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
- 
--- LONG64 --
-
-CREATE OR REPLACE VIEW lofar_image_long64 AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x,
-    (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y,
-    array_element.val as value
-  FROM att_image_devlong64 att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
-
-CREATE OR REPLACE VIEW lofar_array_long64 AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    array_element.idx - 1 AS x,
-    array_element.val as value
-  FROM att_array_devlong64 att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
- 
-CREATE OR REPLACE VIEW lofar_scalar_long64 AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    value_r as value
-  FROM att_scalar_devlong64 att
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
- 
--- ULONG64 --
-
-CREATE OR REPLACE VIEW lofar_image_ulong64 AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x,
-    (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y,
-    array_element.val as value
-  FROM att_image_devulong64 att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
-
-CREATE OR REPLACE VIEW lofar_array_ulong64 AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    array_element.idx - 1 AS x,
-    array_element.val as value
-  FROM att_array_devulong64 att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
- 
-CREATE OR REPLACE VIEW lofar_scalar_ulong64 AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    value_r as value
-  FROM att_scalar_devulong64 att
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
- 
--- FLOAT -- 
-
-CREATE OR REPLACE VIEW lofar_image_float AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x,
-    (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y,
-    array_element.val as value
-  FROM att_image_devfloat att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
-
-CREATE OR REPLACE VIEW lofar_array_float AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    array_element.idx - 1 AS x,
-    array_element.val as value
-  FROM att_array_devfloat att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
-
-CREATE OR REPLACE VIEW lofar_scalar_float AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    value_r as value
-  FROM att_scalar_devfloat att
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
- 
--- STRING --
-
-CREATE OR REPLACE VIEW lofar_image_string AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x,
-    (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y,
-    array_element.val as value
-  FROM att_image_devstring att
--- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
-
-CREATE OR REPLACE VIEW lofar_array_string AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    array_element.idx - 1 AS x,
-    array_element.val as value
-  FROM att_array_devstring att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
- 
-CREATE OR REPLACE VIEW lofar_scalar_string AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    value_r as value
-  FROM att_scalar_devstring att
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
- 
--- STATE --
-
-CREATE OR REPLACE VIEW lofar_image_state AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x,
-    (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y,
-    array_element.val as value
-  FROM att_image_devstate att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
-
-CREATE OR REPLACE VIEW lofar_array_state AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    array_element.idx - 1 AS x,
-    array_element.val as value
-  FROM att_array_devstate att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
- 
-CREATE OR REPLACE VIEW lofar_scalar_state AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    value_r as value
-  FROM att_scalar_devstate att
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
- 
--- ENCODED -- 
-
-CREATE OR REPLACE VIEW lofar_image_encoded AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x,
-    (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y,
-    array_element.val as value
-  FROM att_image_devencoded att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
-
-CREATE OR REPLACE VIEW lofar_array_encoded AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    array_element.idx - 1 AS x,
-    array_element.val as value
-  FROM att_array_devencoded att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
- 
-CREATE OR REPLACE VIEW lofar_scalar_encoded AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    value_r as value
-  FROM att_scalar_devencoded att
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
- 
--- ENUM --
-
-CREATE OR REPLACE VIEW lofar_image_enum AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    (array_element.idx - 1) / ARRAY_LENGTH(att.value_r, 1) AS x,
-    (array_element.idx - 1) % ARRAY_LENGTH(att.value_r, 1) AS y,
-    array_element.val as value
-  FROM att_image_devenum att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
-
-CREATE OR REPLACE VIEW lofar_array_enum AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    array_element.idx - 1 AS x,
-    array_element.val as value
-  FROM att_array_devenum att
-  -- add array values, and their index
-  JOIN LATERAL UNNEST(att.value_r) WITH ORDINALITY AS array_element(val,idx) ON TRUE
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
- 
-CREATE OR REPLACE VIEW lofar_scalar_enum AS
-  SELECT
-    att.data_time AS data_time,
-    CONCAT_WS('/', domain, family, member) AS device,
-    ac.name AS name,
-    value_r as value
-  FROM att_scalar_devenum att
-  -- add the device information
-  JOIN att_conf ac ON att.att_conf_id = ac.att_conf_id
-  WHERE att.value_r IS NOT NULL;
diff --git a/docker-compose/timescaledb/resources/14_cleanup.sql b/docker-compose/timescaledb/resources/14_cleanup.sql
deleted file mode 100644
index b18b24a7afa73d22e9e987576fa2495d591e8e24..0000000000000000000000000000000000000000
--- a/docker-compose/timescaledb/resources/14_cleanup.sql
+++ /dev/null
@@ -1 +0,0 @@
-ALTER USER hdb_admin NOSUPERUSER;
diff --git a/jupyter-notebooks/Archiving_load_test.ipynb b/jupyter-notebooks/Archiving_load_test.ipynb
deleted file mode 100644
index 1a39a3115949398fd2743d4122aa053bd6f880f6..0000000000000000000000000000000000000000
--- a/jupyter-notebooks/Archiving_load_test.ipynb
+++ /dev/null
@@ -1,1135 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "id": "d9f35471",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import sys, time\n",
-    "import numpy as np\n",
-    "sys.path.append('/hosthome/tango/tangostationcontrol/tangostationcontrol')\n",
-    "from toolkit.archiver import *\n",
-    "from matplotlib import pyplot as plt"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "id": "5817986f",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "Attribute stat/recv/1/version_r not found in archiving list!\n",
-      "Attribute stat/recv/1/opcua_missing_attributes_r not found in archiving list!\n",
-      "Attribute stat/recv/1/ant_status_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_led_colour_r not found in archiving list!\n",
-      "Attribute stat/recv/1/ant_mask_rw removed!\n",
-      "Attribute stat/recv/1/hbat_bf_delays_r not found in archiving list!\n",
-      "Attribute stat/recv/1/hbat_bf_delays_rw not found in archiving list!\n",
-      "Attribute stat/recv/1/hbat_led_on_r not found in archiving list!\n",
-      "Attribute stat/recv/1/hbat_led_on_rw not found in archiving list!\n",
-      "Attribute stat/recv/1/hbat_pwr_lna_on_r not found in archiving list!\n",
-      "Attribute stat/recv/1/hbat_pwr_lna_on_rw not found in archiving list!\n",
-      "Attribute stat/recv/1/hbat_pwr_on_r not found in archiving list!\n",
-      "Attribute stat/recv/1/hbat_pwr_on_rw not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_adc_locked_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_attenuator_db_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_attenuator_db_rw not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_band_select_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_band_select_rw not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_dth_freq_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_dth_freq_rw not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_dth_on_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_led_green_on_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_led_green_on_rw not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_led_red_on_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_led_red_on_rw not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_mask_rw removed!\n",
-      "Attribute stat/recv/1/rcu_pcb_id_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pcb_number_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pcb_version_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pwr_1v8_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pwr_2v5_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pwr_3v3_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pwr_analog_on_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pwr_ant_iout_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pwr_ant_on_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pwr_ant_on_rw not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pwr_ant_vin_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pwr_ant_vout_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pwr_digital_on_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pwr_good_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_temp_r not found in archiving list!\n",
-      "Attribute stat/recv/1/recvtr_i2c_error_r not found in archiving list!\n",
-      "Attribute stat/recv/1/recvtr_monitor_rate_rw not found in archiving list!\n",
-      "Attribute stat/recv/1/recvtr_translator_busy_r not found in archiving list!\n",
-      "Attribute stat/recv/1/state removed!\n",
-      "Attribute stat/recv/1/status not found in archiving list!\n",
-      "Attribute stat/sdp/1/version_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/opcua_missing_attributes_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_wg_amplitude_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_wg_frequency_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_wg_phase_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_enable_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_enable_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_hdr_eth_destination_mac_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_hdr_eth_destination_mac_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_hdr_ip_destination_address_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_hdr_ip_destination_address_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_hdr_udp_destination_port_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_hdr_udp_destination_port_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_scale_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_scale_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_firmware_version_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_global_node_index_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_hardware_version_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_processing_enable_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_processing_enable_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_scrap_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_scrap_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_antenna_band_index_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_block_period_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_f_adc_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_fsub_type_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_nyquist_sampling_zone_index_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_nyquist_sampling_zone_index_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_observation_id_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_observation_id_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_station_id_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_station_id_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_subband_weights_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_subband_weights_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_temp_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_weights_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_weights_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_wg_amplitude_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_wg_enable_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_wg_enable_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_wg_frequency_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_wg_phase_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/tr_fpga_mask_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/tr_fpga_mask_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/tr_fpga_communication_error_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/tr_sdp_config_first_fpga_nr_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/tr_sdp_config_nof_beamsets_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/tr_sdp_config_nof_fpgas_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/tr_software_version_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/tr_start_time_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/tr_tod_r removed!\n",
-      "Attribute stat/sdp/1/tr_tod_pps_delta_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_signal_input_mean_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_signal_input_rms_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_jesd204b_csr_rbd_count_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_jesd204b_csr_dev_syncn_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_jesd204b_rx_err0_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_jesd204b_rx_err1_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_bsn_monitor_input_bsn_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_bsn_monitor_input_nof_packets_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_bsn_monitor_input_nof_valid_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_bsn_monitor_input_nof_err_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/state not found in archiving list!\n",
-      "Attribute stat/sdp/1/status not found in archiving list!\n",
-      "Device STAT/SST/1 offline\n",
-      "Device STAT/XST/1 offline\n",
-      "Device STAT/UNB2/1 offline\n"
-     ]
-    }
-   ],
-   "source": [
-    "# Apply the chosen JSON configuration file in directory toolkit/archiver_config/\n",
-    "archiver = Archiver(selector_filename='lofar2.json')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "id": "848dc5e7",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "lofar2.json\n"
-     ]
-    },
-    {
-     "data": {
-      "text/plain": [
-       "{'global_variables': {'development_polling_time': '10000',\n",
-       "  'development_archive_time': '60000'},\n",
-       " 'devices': {'STAT/RECV/1': {'environment': 'development',\n",
-       "   'include': [],\n",
-       "   'exclude': ['CLK_Enable_PWR_R',\n",
-       "    'CLK_I2C_STATUS_R',\n",
-       "    'CLK_PLL_error_R',\n",
-       "    'CLK_PLL_locked_R',\n",
-       "    'CLK_translator_busy_R']},\n",
-       "  'STAT/SDP/1': {'environment': 'development',\n",
-       "   'include': [],\n",
-       "   'exclude': ['FPGA_scrap_R', 'FPGA_scrap_RW']},\n",
-       "  'STAT/SST/1': {'environment': 'development', 'include': [], 'exclude': []},\n",
-       "  'STAT/XST/1': {'environment': 'development', 'include': [], 'exclude': []},\n",
-       "  'STAT/UNB2/1': {'environment': 'development', 'include': [], 'exclude': []}}}"
-      ]
-     },
-     "execution_count": 3,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# Print the configuration file (as a dictionary)\n",
-    "selector = archiver.selector\n",
-    "print(selector.filename)\n",
-    "env_dict = selector.get_dict()\n",
-    "env_dict"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "id": "a81e8b3b",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "OFF\n"
-     ]
-    }
-   ],
-   "source": [
-    "device_name = 'STAT/RECV/1'\n",
-    "d=DeviceProxy(device_name) \n",
-    "d.set_timeout_millis(10000)\n",
-    "state = str(d.state())\n",
-    "print(state)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "id": "f5394d09",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "Attribute stat/recv/1/version_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/opcua_missing_attributes_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/ant_status_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_led_colour_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/hbat_bf_delays_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/hbat_bf_delays_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/hbat_led_on_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/hbat_led_on_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/hbat_pwr_lna_on_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/hbat_pwr_lna_on_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/hbat_pwr_on_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/hbat_pwr_on_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_adc_locked_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_attenuator_db_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_attenuator_db_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_band_select_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_band_select_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_dth_freq_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_dth_freq_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_dth_on_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_led_green_on_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_led_green_on_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_led_red_on_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_led_red_on_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pcb_id_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pcb_number_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pcb_version_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pwr_1v8_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pwr_2v5_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pwr_3v3_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pwr_analog_on_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pwr_ant_iout_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pwr_ant_on_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pwr_ant_on_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pwr_ant_vin_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pwr_ant_vout_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pwr_digital_on_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pwr_good_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_temp_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/recvtr_i2c_error_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/recvtr_monitor_rate_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/recvtr_translator_busy_r will not be archived because polling is set to FALSE!\n"
-     ]
-    }
-   ],
-   "source": [
-    "# Add RECV attributes to perform load test\n",
-    "archiver.add_attributes_by_device(device_name,global_archive_period=5000)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 6,
-   "id": "ba3a25ac",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "{'tango://databaseds:10000/stat/recv/1/ant_mask_rw': 'Read value for attribute ANT_mask_RW has not been updated',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_mask_rw': 'Read value for attribute RCU_mask_RW has not been updated',\n",
-       " 'tango://databaseds:10000/stat/recv/1/status': 'Storing Error: mysql_stmt_bind_param() failed, err=Buffer type is not supported'}"
-      ]
-     },
-     "execution_count": 6,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# Print the errors for each attribute\n",
-    "# If the device is in OFF state, all its attributes should be in error (normal behaviour)\n",
-    "err_dict = archiver.get_subscriber_errors()\n",
-    "err_dict"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "id": "b4de92a0",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Device is now in ON state\n"
-     ]
-    }
-   ],
-   "source": [
-    "# Start the device\n",
-    "if state == \"OFF\":\n",
-    "    time.sleep(1)\n",
-    "    d.initialise()\n",
-    "    time.sleep(1)\n",
-    "state = str(d.state())\n",
-    "if state == \"STANDBY\":\n",
-    "    d.set_defaults()\n",
-    "    d.on()\n",
-    "state = str(d.state())\n",
-    "if state == \"ON\":\n",
-    "    print(\"Device is now in ON state\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 8,
-   "id": "5d40b87c",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "['tango://databaseds:10000/stat/recv/1/ant_mask_rw',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_mask_rw',\n",
-       " 'tango://databaseds:10000/stat/recv/1/state',\n",
-       " 'tango://databaseds:10000/stat/recv/1/status']"
-      ]
-     },
-     "execution_count": 8,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# Print the attributes currently managed by the event subscriber\n",
-    "attrs = archiver.get_subscriber_attributes()\n",
-    "attrs"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 9,
-   "id": "678879e3",
-   "metadata": {
-    "scrolled": true
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "{'tango://databaseds:10000/stat/recv/1/ant_mask_rw': 'Read value for attribute ANT_mask_RW has not been updated',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_mask_rw': 'Read value for attribute RCU_mask_RW has not been updated',\n",
-       " 'tango://databaseds:10000/stat/recv/1/status': 'Storing Error: mysql_stmt_bind_param() failed, err=Buffer type is not supported'}"
-      ]
-     },
-     "execution_count": 9,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# Print the errors for each attribute\n",
-    "err_dict = archiver.get_subscriber_errors()\n",
-    "err_dict"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 10,
-   "id": "d3904658",
-   "metadata": {
-    "scrolled": true
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Attribute                                     Poll Period     Archive Period \n",
-      "----------\n",
-      "STAT/RECV/1/ant_mask_rw                       1000            5000           \n",
-      "STAT/RECV/1/rcu_mask_rw                       1000            5000           \n",
-      "STAT/RECV/1/state                             1000            5000           \n",
-      "STAT/RECV/1/status                            1000            5000           \n"
-     ]
-    }
-   ],
-   "source": [
-    "# Print the attribute periods\n",
-    "def print_periods(attrs):\n",
-    "    print(\"{:<45} {:<15} {:<15}\".format('Attribute','Poll Period','Archive Period'))\n",
-    "    print(\"----------\")\n",
-    "    for a in attrs:\n",
-    "        a = parse_attribute_name(a)\n",
-    "        ap = AttributeProxy(a)\n",
-    "        att_fqname = ap.get_device_proxy().name()+'/'+ap.name()\n",
-    "        print(\"{:<45} {:<15} {:<15}\".format(att_fqname,ap.get_poll_period(),ap.get_property('archive_period')['archive_period'][0],sep='\\t'))\n",
-    "\n",
-    "attrs = archiver.get_subscriber_attributes()\n",
-    "print_periods(attrs)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "id": "27ef5564",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "'0.0 events/period'"
-      ]
-     },
-     "execution_count": 11,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# Print the current event subscriber archive frequency (number of total archive events per minute)\n",
-    "# Be aware that these statistics need some time (even minutes) since the device initialization to be reliable\n",
-    "archiver.get_subscriber_load()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 12,
-   "id": "241b5282",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Attribute                                     Record Freq     Failure Freq   \n",
-      "----------\n",
-      "STAT/RECV/1/ant_mask_rw                       0.0             12.0           \n",
-      "STAT/RECV/1/rcu_mask_rw                       0.0             12.0           \n",
-      "STAT/RECV/1/state                             0.0             12.0           \n",
-      "STAT/RECV/1/status                            0.0             12.0           \n"
-     ]
-    }
-   ],
-   "source": [
-    "# Print the current attribute archive frequency (number of events per minute)\n",
-    "# E.G. if an attribute is supposed to be archived every 10s, its frequency value should be 6\n",
-    "def print_freq(attrs):\n",
-    "    print(\"{:<45} {:<15} {:<15}\".format('Attribute','Record Freq','Failure Freq'))\n",
-    "    print(\"----------\")\n",
-    "    for a in attrs:\n",
-    "        a = parse_attribute_name(a)\n",
-    "        ap = AttributeProxy(a)\n",
-    "        att_fqname = ap.get_device_proxy().name()+'/'+ap.name()\n",
-    "        print(\"{:<45} {:<15} {:<15}\".format(att_fqname,archiver.get_attribute_freq(att_fqname),archiver.get_attribute_failures(att_fqname)))\n",
-    "\n",
-    "attrs = archiver.get_subscriber_attributes()\n",
-    "print_freq(attrs)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 13,
-   "id": "25446390",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "Attribute STAT/RECV/1/rcu_mask_rw removed!\n"
-     ]
-    }
-   ],
-   "source": [
-    "# Update the archive time of an attribute\n",
-    "archiver.update_archiving_attribute('STAT/RECV/1/rcu_mask_rw',polling_period=1000,event_period=10000)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 14,
-   "id": "9cc4f883",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Attribute                                     Poll Period     Archive Period \n",
-      "----------\n",
-      "STAT/RECV/1/ant_mask_rw                       1000            5000           \n",
-      "STAT/RECV/1/state                             1000            5000           \n",
-      "STAT/RECV/1/status                            1000            5000           \n",
-      "STAT/RECV/1/rcu_mask_rw                       1000            10000          \n",
-      "\n",
-      "Attribute                                     Record Freq     Failure Freq   \n",
-      "----------\n",
-      "STAT/RECV/1/ant_mask_rw                       0.0             3.0            \n",
-      "STAT/RECV/1/state                             1.0             2.0            \n",
-      "STAT/RECV/1/status                            -1.0            4.0            \n",
-      "STAT/RECV/1/rcu_mask_rw                       1.0             0.0            \n"
-     ]
-    }
-   ],
-   "source": [
-    "attrs = archiver.get_subscriber_attributes()\n",
-    "print_periods(attrs)\n",
-    "print()\n",
-    "print_freq(attrs)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 15,
-   "id": "c3415c09",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "Attribute stat/sdp/1/version_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/opcua_missing_attributes_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_wg_amplitude_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_wg_frequency_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_wg_phase_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_enable_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_enable_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_hdr_eth_destination_mac_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_hdr_eth_destination_mac_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_hdr_ip_destination_address_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_hdr_ip_destination_address_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_hdr_udp_destination_port_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_hdr_udp_destination_port_rw will not be archived because polling is set to FALSE!\n"
-     ]
-    },
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "OFF\n"
-     ]
-    },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "Attribute stat/sdp/1/fpga_beamlet_output_scale_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_scale_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_firmware_version_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_global_node_index_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_hardware_version_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_processing_enable_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_processing_enable_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_antenna_band_index_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_block_period_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_f_adc_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_fsub_type_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_nyquist_sampling_zone_index_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_nyquist_sampling_zone_index_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_observation_id_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_observation_id_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_station_id_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_station_id_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_subband_weights_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_subband_weights_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_wg_amplitude_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_wg_enable_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_wg_enable_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_wg_frequency_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_wg_phase_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/tr_fpga_mask_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/tr_fpga_mask_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/tr_fpga_communication_error_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/tr_sdp_config_first_fpga_nr_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/tr_sdp_config_nof_beamsets_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/tr_sdp_config_nof_fpgas_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/tr_software_version_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/tr_start_time_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/tr_tod_pps_delta_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_signal_input_mean_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_signal_input_rms_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_jesd204b_csr_rbd_count_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_jesd204b_csr_dev_syncn_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_jesd204b_rx_err0_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_jesd204b_rx_err1_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_bsn_monitor_input_bsn_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_bsn_monitor_input_nof_packets_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_bsn_monitor_input_nof_valid_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_bsn_monitor_input_nof_err_r will not be archived because polling is set to FALSE!\n"
-     ]
-    }
-   ],
-   "source": [
-    "# Add SDP attributes to load test\n",
-    "sdp_name = 'STAT/SDP/1'\n",
-    "d2=DeviceProxy(sdp_name) \n",
-    "state = str(d2.state())\n",
-    "print(state)\n",
-    "archiver.add_attributes_by_device(sdp_name,global_archive_period=5000)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 16,
-   "id": "141c52da",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Device is now in ON state\n"
-     ]
-    }
-   ],
-   "source": [
-    "# Start the SDP device\n",
-    "if state == \"OFF\":\n",
-    "    d2.set_timeout_millis(10000)#Temporary workaround due to new SPD implementation\n",
-    "    time.sleep(1)\n",
-    "    d2.initialise()\n",
-    "    time.sleep(1)\n",
-    "state = str(d2.state())\n",
-    "if state == \"STANDBY\":\n",
-    "    d.set_defaults()\n",
-    "    d2.on()\n",
-    "state = str(d2.state())\n",
-    "if state == \"ON\":\n",
-    "    print(\"Device is now in ON state\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 17,
-   "id": "b53e5b8b",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "{'tango://databaseds:10000/stat/recv/1/status': 'Storing Error: mysql_stmt_bind_param() failed, err=Buffer type is not supported',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_scrap_r': 'Read value for attribute FPGA_scrap_R has not been updated',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_scrap_rw': 'Read value for attribute FPGA_scrap_RW has not been updated',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_temp_r': 'Read value for attribute FPGA_temp_R has not been updated',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_weights_r': 'Read value for attribute FPGA_weights_R has not been updated',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_weights_rw': 'Read value for attribute FPGA_weights_RW has not been updated',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/tr_tod_r': 'Read value for attribute TR_tod_R has not been updated'}"
-      ]
-     },
-     "execution_count": 17,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# Check errors\n",
-    "err_dict = archiver.get_subscriber_errors()\n",
-    "err_dict\n",
-    "#Reminder: MySQL DBMS cannot handle SPD/FPGA_scrap_R attribute probably due to its dimension (8192)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 18,
-   "id": "4fed6cf4",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Attribute                                     Poll Period     Archive Period \n",
-      "----------\n",
-      "STAT/RECV/1/ant_mask_rw                       1000            5000           \n",
-      "STAT/RECV/1/state                             1000            5000           \n",
-      "STAT/RECV/1/status                            1000            5000           \n",
-      "STAT/RECV/1/rcu_mask_rw                       1000            10000          \n",
-      "STAT/SDP/1/fpga_scrap_r                       1000            5000           \n",
-      "STAT/SDP/1/fpga_scrap_rw                      1000            5000           \n",
-      "STAT/SDP/1/fpga_temp_r                        1000            5000           \n",
-      "STAT/SDP/1/fpga_weights_r                     1000            5000           \n",
-      "STAT/SDP/1/fpga_weights_rw                    1000            5000           \n",
-      "STAT/SDP/1/tr_tod_r                           1000            5000           \n",
-      "\n",
-      "Attribute                                     Record Freq     Failure Freq   \n",
-      "----------\n",
-      "STAT/RECV/1/ant_mask_rw                       0.0             3.0            \n",
-      "STAT/RECV/1/state                             1.0             2.0            \n",
-      "STAT/RECV/1/status                            -1.0            4.0            \n",
-      "STAT/RECV/1/rcu_mask_rw                       1.0             0.0            \n",
-      "STAT/SDP/1/fpga_scrap_r                       -6.0            18.0           \n",
-      "STAT/SDP/1/fpga_scrap_rw                      -6.0            18.0           \n",
-      "STAT/SDP/1/fpga_temp_r                        0.0             12.0           \n",
-      "STAT/SDP/1/fpga_weights_r                     -6.0            18.0           \n",
-      "STAT/SDP/1/fpga_weights_rw                    -6.0            18.0           \n",
-      "STAT/SDP/1/tr_tod_r                           0.0             12.0           \n"
-     ]
-    }
-   ],
-   "source": [
-    "# Check frequencies\n",
-    "attrs = archiver.get_subscriber_attributes()\n",
-    "print_periods(attrs)\n",
-    "print()\n",
-    "print_freq(attrs)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 19,
-   "id": "6e495661",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "'stat/recv/1/ant_mask_rw'"
-      ]
-     },
-     "execution_count": 19,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# Initialise the retriever object and print the archived attributes in the database\n",
-    "retriever = Retriever()\n",
-    "# Attribute chosen to be retrieved\n",
-    "attr_name = 'ant_mask_rw'\n",
-    "attr_fq_name = str(device_name+'/'+attr_name).lower()\n",
-    "attr_fq_name"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 20,
-   "id": "304f50f1",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Retrieve records in the last n hours (works even with decimals)\n",
-    "\n",
-    "# Use alternatively one of the following two methods to retrieve data (last n hours or interval)\n",
-    "records= retriever.get_attribute_value_by_hours(attr_fq_name,hours=0.1)\n",
-    "#records = retriever.get_attribute_value_by_interval(attr_fq_name,'2021-09-01 16:00:00', '2021-09-01 16:03:00')\n",
-    "\n",
-    "if not records:\n",
-    "    print('Empty result!')\n",
-    "else:\n",
-    "    # Convert DB Array records into Python lists\n",
-    "    data = build_array_from_record(records,records[0].dim_x_r)\n",
-    "    # Extract only the value from the array \n",
-    "    array_values = get_values_from_record(data)\n",
-    "\n",
-    "#records\n",
-    "#data\n",
-    "#array_values"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 21,
-   "id": "fb2c19f4",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "[[<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:52:29.957895',recv_time='2021-11-09 11:52:30.955440',insert_time='2021-11-09 11:52:30.957113',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:52:29.957895',recv_time='2021-11-09 11:52:30.955440',insert_time='2021-11-09 11:52:30.957113',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:52:29.957895',recv_time='2021-11-09 11:52:30.955440',insert_time='2021-11-09 11:52:30.957113',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:52:40.957954',recv_time='2021-11-09 11:52:40.997550',insert_time='2021-11-09 11:52:40.998981',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:52:40.957954',recv_time='2021-11-09 11:52:40.997550',insert_time='2021-11-09 11:52:40.998981',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:52:40.957954',recv_time='2021-11-09 11:52:40.997550',insert_time='2021-11-09 11:52:40.998981',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:52:50.958367',recv_time='2021-11-09 11:52:51.031650',insert_time='2021-11-09 11:52:51.033094',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:52:50.958367',recv_time='2021-11-09 11:52:51.031650',insert_time='2021-11-09 11:52:51.033094',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:52:50.958367',recv_time='2021-11-09 11:52:51.031650',insert_time='2021-11-09 11:52:51.033094',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:53:00.957834',recv_time='2021-11-09 11:53:01.070405',insert_time='2021-11-09 11:53:01.071890',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:53:00.957834',recv_time='2021-11-09 11:53:01.070405',insert_time='2021-11-09 11:53:01.071890',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:53:00.957834',recv_time='2021-11-09 11:53:01.070405',insert_time='2021-11-09 11:53:01.071890',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:53:10.957778',recv_time='2021-11-09 11:53:11.109409',insert_time='2021-11-09 11:53:11.110791',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:53:10.957778',recv_time='2021-11-09 11:53:11.109409',insert_time='2021-11-09 11:53:11.110791',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:53:10.957778',recv_time='2021-11-09 11:53:11.109409',insert_time='2021-11-09 11:53:11.110791',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:53:20.958163',recv_time='2021-11-09 11:53:21.139904',insert_time='2021-11-09 11:53:21.141175',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:53:20.958163',recv_time='2021-11-09 11:53:21.139904',insert_time='2021-11-09 11:53:21.141175',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:53:20.958163',recv_time='2021-11-09 11:53:21.139904',insert_time='2021-11-09 11:53:21.141175',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:53:30.958127',recv_time='2021-11-09 11:53:31.177200',insert_time='2021-11-09 11:53:31.178474',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:53:30.958127',recv_time='2021-11-09 11:53:31.177200',insert_time='2021-11-09 11:53:31.178474',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:53:30.958127',recv_time='2021-11-09 11:53:31.177200',insert_time='2021-11-09 11:53:31.178474',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:53:40.957758',recv_time='2021-11-09 11:53:41.215064',insert_time='2021-11-09 11:53:41.216572',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:53:40.957758',recv_time='2021-11-09 11:53:41.215064',insert_time='2021-11-09 11:53:41.216572',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:53:40.957758',recv_time='2021-11-09 11:53:41.215064',insert_time='2021-11-09 11:53:41.216572',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:53:50.958355',recv_time='2021-11-09 11:53:51.248946',insert_time='2021-11-09 11:53:51.250289',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:53:50.958355',recv_time='2021-11-09 11:53:51.248946',insert_time='2021-11-09 11:53:51.250289',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:53:50.958355',recv_time='2021-11-09 11:53:51.248946',insert_time='2021-11-09 11:53:51.250289',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:54:00.958079',recv_time='2021-11-09 11:54:01.282162',insert_time='2021-11-09 11:54:01.283518',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:54:00.958079',recv_time='2021-11-09 11:54:01.282162',insert_time='2021-11-09 11:54:01.283518',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:54:00.958079',recv_time='2021-11-09 11:54:01.282162',insert_time='2021-11-09 11:54:01.283518',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:54:10.958288',recv_time='2021-11-09 11:54:11.313545',insert_time='2021-11-09 11:54:11.314891',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:54:10.958288',recv_time='2021-11-09 11:54:11.313545',insert_time='2021-11-09 11:54:11.314891',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:54:10.958288',recv_time='2021-11-09 11:54:11.313545',insert_time='2021-11-09 11:54:11.314891',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:54:20.958563',recv_time='2021-11-09 11:54:21.344520',insert_time='2021-11-09 11:54:21.345807',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:54:20.958563',recv_time='2021-11-09 11:54:21.344520',insert_time='2021-11-09 11:54:21.345807',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:54:20.958563',recv_time='2021-11-09 11:54:21.344520',insert_time='2021-11-09 11:54:21.345807',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:54:30.958539',recv_time='2021-11-09 11:54:31.379597',insert_time='2021-11-09 11:54:31.380996',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:54:30.958539',recv_time='2021-11-09 11:54:31.379597',insert_time='2021-11-09 11:54:31.380996',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:54:30.958539',recv_time='2021-11-09 11:54:31.379597',insert_time='2021-11-09 11:54:31.380996',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:54:40.958046',recv_time='2021-11-09 11:54:41.414444',insert_time='2021-11-09 11:54:41.415878',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:54:40.958046',recv_time='2021-11-09 11:54:41.414444',insert_time='2021-11-09 11:54:41.415878',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:54:40.958046',recv_time='2021-11-09 11:54:41.414444',insert_time='2021-11-09 11:54:41.415878',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:54:50.958498',recv_time='2021-11-09 11:54:51.446569',insert_time='2021-11-09 11:54:51.447766',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:54:50.958498',recv_time='2021-11-09 11:54:51.446569',insert_time='2021-11-09 11:54:51.447766',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:54:50.958498',recv_time='2021-11-09 11:54:51.446569',insert_time='2021-11-09 11:54:51.447766',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:55:00.958172',recv_time='2021-11-09 11:55:01.478610',insert_time='2021-11-09 11:55:01.480002',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:55:00.958172',recv_time='2021-11-09 11:55:01.478610',insert_time='2021-11-09 11:55:01.480002',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:55:00.958172',recv_time='2021-11-09 11:55:01.478610',insert_time='2021-11-09 11:55:01.480002',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:55:10.957568',recv_time='2021-11-09 11:55:11.517281',insert_time='2021-11-09 11:55:11.518621',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:55:10.957568',recv_time='2021-11-09 11:55:11.517281',insert_time='2021-11-09 11:55:11.518621',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:55:10.957568',recv_time='2021-11-09 11:55:11.517281',insert_time='2021-11-09 11:55:11.518621',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:55:20.958282',recv_time='2021-11-09 11:55:21.552854',insert_time='2021-11-09 11:55:21.554212',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:55:20.958282',recv_time='2021-11-09 11:55:21.552854',insert_time='2021-11-09 11:55:21.554212',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:55:20.958282',recv_time='2021-11-09 11:55:21.552854',insert_time='2021-11-09 11:55:21.554212',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:55:30.957464',recv_time='2021-11-09 11:55:31.585827',insert_time='2021-11-09 11:55:31.587218',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:55:30.957464',recv_time='2021-11-09 11:55:31.585827',insert_time='2021-11-09 11:55:31.587218',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:55:30.957464',recv_time='2021-11-09 11:55:31.585827',insert_time='2021-11-09 11:55:31.587218',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:55:40.958453',recv_time='2021-11-09 11:55:41.618850',insert_time='2021-11-09 11:55:41.620081',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:55:40.958453',recv_time='2021-11-09 11:55:41.618850',insert_time='2021-11-09 11:55:41.620081',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:55:40.958453',recv_time='2021-11-09 11:55:41.618850',insert_time='2021-11-09 11:55:41.620081',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:55:50.958024',recv_time='2021-11-09 11:55:51.653490',insert_time='2021-11-09 11:55:51.654830',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:55:50.958024',recv_time='2021-11-09 11:55:51.653490',insert_time='2021-11-09 11:55:51.654830',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:55:50.958024',recv_time='2021-11-09 11:55:51.653490',insert_time='2021-11-09 11:55:51.654830',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:56:00.957550',recv_time='2021-11-09 11:56:01.685266',insert_time='2021-11-09 11:56:01.686650',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:56:00.957550',recv_time='2021-11-09 11:56:01.685266',insert_time='2021-11-09 11:56:01.686650',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:56:00.957550',recv_time='2021-11-09 11:56:01.685266',insert_time='2021-11-09 11:56:01.686650',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:56:10.958180',recv_time='2021-11-09 11:56:11.728988',insert_time='2021-11-09 11:56:11.730311',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:56:10.958180',recv_time='2021-11-09 11:56:11.728988',insert_time='2021-11-09 11:56:11.730311',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:56:10.958180',recv_time='2021-11-09 11:56:11.728988',insert_time='2021-11-09 11:56:11.730311',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:56:20.957521',recv_time='2021-11-09 11:56:21.762715',insert_time='2021-11-09 11:56:21.763981',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:56:20.957521',recv_time='2021-11-09 11:56:21.762715',insert_time='2021-11-09 11:56:21.763981',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:56:20.957521',recv_time='2021-11-09 11:56:21.762715',insert_time='2021-11-09 11:56:21.763981',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:56:30.957691',recv_time='2021-11-09 11:56:31.798641',insert_time='2021-11-09 11:56:31.799975',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:56:30.957691',recv_time='2021-11-09 11:56:31.798641',insert_time='2021-11-09 11:56:31.799975',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:56:30.957691',recv_time='2021-11-09 11:56:31.798641',insert_time='2021-11-09 11:56:31.799975',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:56:40.957644',recv_time='2021-11-09 11:56:41.839704',insert_time='2021-11-09 11:56:41.841005',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:56:40.957644',recv_time='2021-11-09 11:56:41.839704',insert_time='2021-11-09 11:56:41.841005',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:56:40.957644',recv_time='2021-11-09 11:56:41.839704',insert_time='2021-11-09 11:56:41.841005',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:56:50.957540',recv_time='2021-11-09 11:56:51.873701',insert_time='2021-11-09 11:56:51.875028',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:56:50.957540',recv_time='2021-11-09 11:56:51.873701',insert_time='2021-11-09 11:56:51.875028',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:56:50.957540',recv_time='2021-11-09 11:56:51.873701',insert_time='2021-11-09 11:56:51.875028',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:57:00.957772',recv_time='2021-11-09 11:57:01.913800',insert_time='2021-11-09 11:57:01.915239',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:57:00.957772',recv_time='2021-11-09 11:57:01.913800',insert_time='2021-11-09 11:57:01.915239',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:57:00.957772',recv_time='2021-11-09 11:57:01.913800',insert_time='2021-11-09 11:57:01.915239',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:57:10.957623',recv_time='2021-11-09 11:57:11.952909',insert_time='2021-11-09 11:57:11.954569',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:57:10.957623',recv_time='2021-11-09 11:57:11.952909',insert_time='2021-11-09 11:57:11.954569',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:57:10.957623',recv_time='2021-11-09 11:57:11.952909',insert_time='2021-11-09 11:57:11.954569',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:57:51.958517',recv_time='2021-11-09 11:57:52.032720',insert_time='2021-11-09 11:57:52.034096',idx='0',dim_x_r='3',dim_y_r='32',value_r='0',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:57:51.958517',recv_time='2021-11-09 11:57:52.032720',insert_time='2021-11-09 11:57:52.034096',idx='1',dim_x_r='3',dim_y_r='32',value_r='0',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:57:51.958517',recv_time='2021-11-09 11:57:52.032720',insert_time='2021-11-09 11:57:52.034096',idx='2',dim_x_r='3',dim_y_r='32',value_r='0',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:58:01.957612',recv_time='2021-11-09 11:58:02.070025',insert_time='2021-11-09 11:58:02.071232',idx='0',dim_x_r='3',dim_y_r='32',value_r='0',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:58:01.957612',recv_time='2021-11-09 11:58:02.070025',insert_time='2021-11-09 11:58:02.071232',idx='1',dim_x_r='3',dim_y_r='32',value_r='0',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:58:01.957612',recv_time='2021-11-09 11:58:02.070025',insert_time='2021-11-09 11:58:02.071232',idx='2',dim_x_r='3',dim_y_r='32',value_r='0',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:58:11.957976',recv_time='2021-11-09 11:58:12.098246',insert_time='2021-11-09 11:58:12.099522',idx='0',dim_x_r='3',dim_y_r='32',value_r='0',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:58:11.957976',recv_time='2021-11-09 11:58:12.098246',insert_time='2021-11-09 11:58:12.099522',idx='1',dim_x_r='3',dim_y_r='32',value_r='0',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:58:11.957976',recv_time='2021-11-09 11:58:12.098246',insert_time='2021-11-09 11:58:12.099522',idx='2',dim_x_r='3',dim_y_r='32',value_r='0',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>],\n",
-       " [<Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:58:21.957645',recv_time='2021-11-09 11:58:22.132944',insert_time='2021-11-09 11:58:22.134263',idx='0',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:58:21.957645',recv_time='2021-11-09 11:58:22.132944',insert_time='2021-11-09 11:58:22.134263',idx='1',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>,\n",
-       "  <Array_Boolean_RW(att_conf_id='1',data_time='2021-11-09 11:58:21.957645',recv_time='2021-11-09 11:58:22.132944',insert_time='2021-11-09 11:58:22.134263',idx='2',dim_x_r='3',dim_y_r='32',value_r='1',dim_x_w='3',dim_y_w='32',value_w='1',quality='0',att_error_desc_id='None')>]]"
-      ]
-     },
-     "execution_count": 21,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "data"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 22,
-   "id": "eb97ee97",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "Attribute stat/recv/1/status removed!\n",
-      "Attribute stat/sdp/1/fpga_scrap_r removed!\n",
-      "Attribute stat/sdp/1/fpga_scrap_rw removed!\n",
-      "Attribute stat/sdp/1/fpga_weights_r removed!\n",
-      "Attribute stat/sdp/1/fpga_weights_rw removed!\n"
-     ]
-    }
-   ],
-   "source": [
-    "#archiver.remove_attribute_from_archiver('STAT/recv/1/rcu_temperature_r')\n",
-    "archiver.remove_attributes_in_error()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 27,
-   "id": "6ffaeab3",
-   "metadata": {
-    "scrolled": true
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "['tango://databaseds:10000/archiving/hdbpp/eventsubscriber01']\n",
-      "tango://databaseds:10000/archiving/hdbpp/eventsubscriber01\n",
-      "tango://databaseds:10000/archiving/hdbpp/eventsubscriber01\n"
-     ]
-    }
-   ],
-   "source": [
-    "# Add a new subscriber if not already present\n",
-    "print(archiver.es_list)\n",
-    "print(archiver.get_next_subscriber())\n",
-    "if len(archiver.es_list)==1:\n",
-    "    archiver.add_event_subscriber()\n",
-    "print(archiver.get_next_subscriber()) # choose the best one with minimum load\n",
-    "new_subscriber = archiver.get_next_subscriber()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 28,
-   "id": "39f49c8e",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "Attribute stat/sdp/1/version_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/opcua_missing_attributes_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_wg_amplitude_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_wg_frequency_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_wg_phase_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_enable_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_enable_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_hdr_eth_destination_mac_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_hdr_eth_destination_mac_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_hdr_ip_destination_address_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_hdr_ip_destination_address_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_hdr_udp_destination_port_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_hdr_udp_destination_port_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_scale_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_scale_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_firmware_version_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_global_node_index_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_hardware_version_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_processing_enable_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_processing_enable_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_scrap_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_scrap_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_antenna_band_index_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_block_period_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_f_adc_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_fsub_type_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_nyquist_sampling_zone_index_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_nyquist_sampling_zone_index_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_observation_id_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_observation_id_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_station_id_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_station_id_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_subband_weights_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_subband_weights_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_temp_r removed!\n",
-      "Attribute stat/sdp/1/fpga_weights_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_weights_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_wg_amplitude_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_wg_enable_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_wg_enable_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_wg_frequency_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_wg_phase_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/tr_fpga_mask_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/tr_fpga_mask_rw not found in archiving list!\n",
-      "Attribute stat/sdp/1/tr_fpga_communication_error_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/tr_sdp_config_first_fpga_nr_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/tr_sdp_config_nof_beamsets_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/tr_sdp_config_nof_fpgas_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/tr_software_version_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/tr_start_time_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/tr_tod_r removed!\n",
-      "Attribute stat/sdp/1/tr_tod_pps_delta_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_signal_input_mean_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_signal_input_rms_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_jesd204b_csr_rbd_count_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_jesd204b_csr_dev_syncn_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_jesd204b_rx_err0_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_jesd204b_rx_err1_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_bsn_monitor_input_bsn_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_bsn_monitor_input_nof_packets_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_bsn_monitor_input_nof_valid_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/fpga_bsn_monitor_input_nof_err_r not found in archiving list!\n",
-      "Attribute stat/sdp/1/state not found in archiving list!\n",
-      "Attribute stat/sdp/1/status not found in archiving list!\n",
-      "Attribute stat/sdp/1/version_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/opcua_missing_attributes_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_wg_amplitude_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_wg_frequency_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_wg_phase_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_enable_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_enable_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_hdr_eth_destination_mac_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_hdr_eth_destination_mac_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_hdr_ip_destination_address_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_hdr_ip_destination_address_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_hdr_udp_destination_port_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_hdr_udp_destination_port_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_scale_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_beamlet_output_scale_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_firmware_version_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_global_node_index_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_hardware_version_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_processing_enable_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_processing_enable_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_antenna_band_index_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_block_period_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_f_adc_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_fsub_type_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_nyquist_sampling_zone_index_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_nyquist_sampling_zone_index_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_observation_id_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_observation_id_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_station_id_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_sdp_info_station_id_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_subband_weights_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_subband_weights_rw will not be archived because polling is set to FALSE!\n"
-     ]
-    },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "Attribute stat/sdp/1/fpga_wg_amplitude_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_wg_enable_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_wg_enable_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_wg_frequency_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_wg_phase_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/tr_fpga_mask_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/tr_fpga_mask_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/tr_fpga_communication_error_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/tr_sdp_config_first_fpga_nr_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/tr_sdp_config_nof_beamsets_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/tr_sdp_config_nof_fpgas_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/tr_software_version_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/tr_start_time_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/tr_tod_pps_delta_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_signal_input_mean_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_signal_input_rms_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_jesd204b_csr_rbd_count_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_jesd204b_csr_dev_syncn_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_jesd204b_rx_err0_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_jesd204b_rx_err1_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_bsn_monitor_input_bsn_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_bsn_monitor_input_nof_packets_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_bsn_monitor_input_nof_valid_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/sdp/1/fpga_bsn_monitor_input_nof_err_r will not be archived because polling is set to FALSE!\n"
-     ]
-    }
-   ],
-   "source": [
-    "# Move SDP attributes from one EventSubscriber to another one\n",
-    "archiver.remove_attributes_by_device(sdp_name)\n",
-    "archiver.add_attributes_by_device(sdp_name,global_archive_period=5000,es_name=new_subscriber)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 29,
-   "id": "a554cff4",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "d.off()\n",
-    "d2.off()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "100664ab",
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "StationControl",
-   "language": "python",
-   "name": "stationcontrol"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.7.3"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
-}
diff --git a/jupyter-notebooks/HdbppReader_demonstrator.ipynb b/jupyter-notebooks/HdbppReader_demonstrator.ipynb
deleted file mode 100644
index ea1786979fa6083cc7660176f1ece3da2bba21ad..0000000000000000000000000000000000000000
--- a/jupyter-notebooks/HdbppReader_demonstrator.ipynb
+++ /dev/null
@@ -1,1172 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "id": "d0328351",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import sys, time\n",
-    "sys.path.append('/hosthome/tango/tangostationcontrol/tangostationcontrol/toolkit/libhdbpp-python')\n",
-    "from hdbpp_reader.timescaledb import TimescaleDbReader"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "id": "c2295969",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Initialise a Reader object\n",
-    "reader = TimescaleDbReader({'database':'hdb', 'user':'postgres', 'password':'password', 'port':5432,'host':'archiver-timescale'})"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "id": "24cd0dbc",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "{'user': 'postgres',\n",
-       " 'password': 'password',\n",
-       " 'host': 'archiver-timescale',\n",
-       " 'database': 'hdb',\n",
-       " 'port': 5432}"
-      ]
-     },
-     "execution_count": 5,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "reader.parse_config('postgres:password@archiver-timescale:5432/hdb')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 6,
-   "id": "2e01bf2a",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "<connection object at 0x7fcf8f18ee08; dsn: 'user=postgres password=xxx dbname=hdb host=archiver-timescale port=5432', closed: 0>"
-      ]
-     },
-     "execution_count": 6,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# Check connection\n",
-    "reader.get_connection()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "id": "582e2c9e",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "['tango://databaseds:10000/stat/sdp/1/fpga_temp_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/version_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/opcua_missing_attributes_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_led_green_on_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_led_green_on_rw',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_led_red_on_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_led_red_on_rw',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_mask_rw',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_pcb_id_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_pcb_number_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_pcb_version_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_pwr_1v8_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_pwr_2v5_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_pwr_3v3_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_pwr_analog_on_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_pwr_digital_on_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_pwr_good_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_temp_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/recvtr_i2c_error_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/recvtr_monitor_rate_rw',\n",
-       " 'tango://databaseds:10000/stat/recv/1/recvtr_translator_busy_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_led_colour_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_error_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/recv_iout_error_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/recv_temp_error_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/recv_vout_error_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/state',\n",
-       " 'tango://databaseds:10000/stat/recv/1/status',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/version_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/opcua_missing_attributes_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_beamlet_output_enable_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_beamlet_output_enable_rw',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_beamlet_output_hdr_eth_destination_mac_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_beamlet_output_hdr_eth_destination_mac_rw',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_beamlet_output_hdr_ip_destination_address_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_beamlet_output_hdr_ip_destination_address_rw',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_beamlet_output_hdr_udp_destination_port_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_beamlet_output_hdr_udp_destination_port_rw',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_beamlet_output_scale_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_beamlet_output_scale_rw',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_boot_image_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_boot_image_rw',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_global_node_index_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_pps_present_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_pps_capture_cnt_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_pps_expected_cnt_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_pps_expected_cnt_rw',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_processing_enable_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_processing_enable_rw',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_scrap_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_scrap_rw',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_sdp_info_antenna_band_index_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_sdp_info_block_period_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_sdp_info_f_adc_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_sdp_info_fsub_type_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_sdp_info_nyquist_sampling_zone_index_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_sdp_info_nyquist_sampling_zone_index_rw',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_sdp_info_observation_id_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_sdp_info_observation_id_rw',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_sdp_info_station_id_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_sdp_info_station_id_rw',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_time_since_last_pps_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/tr_fpga_mask_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/tr_sdp_config_first_fpga_nr_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/tr_sdp_config_nof_beamsets_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/tr_sdp_config_nof_fpgas_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/tr_start_time_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/tr_tod_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/tr_tod_pps_delta_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_bsn_monitor_input_bsn_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_bsn_monitor_input_nof_packets_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_bsn_monitor_input_nof_valid_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_bsn_monitor_input_nof_err_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_error_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_processing_error_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_input_error_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/state',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/status',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_firmware_version_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/fpga_hardware_version_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/tr_fpga_mask_rw',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/tr_fpga_communication_error_r',\n",
-       " 'tango://databaseds:10000/stat/sdp/1/tr_software_version_r']"
-      ]
-     },
-     "execution_count": 7,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# Get the list of all the archived attributes\n",
-    "reader.get_attributes()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 8,
-   "id": "9f985ceb",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "['tango://databaseds:10000/stat/recv/1/version_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/opcua_missing_attributes_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_led_green_on_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_led_green_on_rw',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_led_red_on_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_led_red_on_rw',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_mask_rw',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_pcb_id_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_pcb_number_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_pcb_version_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_pwr_1v8_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_pwr_2v5_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_pwr_3v3_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_pwr_analog_on_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_pwr_digital_on_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_pwr_good_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_temp_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/recvtr_i2c_error_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/recvtr_monitor_rate_rw',\n",
-       " 'tango://databaseds:10000/stat/recv/1/recvtr_translator_busy_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_led_colour_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_error_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/recv_iout_error_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/recv_temp_error_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/recv_vout_error_r',\n",
-       " 'tango://databaseds:10000/stat/recv/1/state',\n",
-       " 'tango://databaseds:10000/stat/recv/1/status']"
-      ]
-     },
-     "execution_count": 8,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# Given a name pattern, return a list of archived attributes\n",
-    "device_name = 'STAT/RECV/1'\n",
-    "reader.get_attributes(pattern=f'*{device_name}*'.lower())"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 9,
-   "id": "5517c508",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "1"
-      ]
-     },
-     "execution_count": 9,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# Check if an attribute is archived\n",
-    "attr_name = 'rcu_mask_rw'\n",
-    "reader.is_attribute_archived(f'{device_name}/{attr_name}'.lower())"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 10,
-   "id": "740064f3",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "{}"
-      ]
-     },
-     "execution_count": 10,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# Return the list of the last n value for a given attribute(s)\n",
-    "# ! Cannot figure out why it does not work ... on the other hand, the relative code \n",
-    "# in the next cell works!\n",
-    "a = 'tango://databaseds:10000/stat/sdp/1/fpga_temp_r'\n",
-    "columns = [\"value_r\", \"value_w\", \"quality\", \"att_error_desc_id\"]\n",
-    "reader.get_last_attributes_values(attributes=a,columns=columns,n=3)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "id": "d48fe536",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "[(datetime.datetime(2022, 3, 9, 12, 0, 27, 686930, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  1),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 38, 34, 497576, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 38, 23, 838368, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2)]"
-      ]
-     },
-     "execution_count": 11,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "reader._cursor.execute(\"SELECT att_conf_id, table_name, att_name FROM att_conf WHERE att_name LIKE %s;\", ('%'+a+'%',))\n",
-    "att_id = reader._cursor.fetchall()\n",
-    "\n",
-    "reader._cursor.execute(f\"SELECT data_time, value_r, value_w, quality, att_error_desc_id FROM {att_id[0][1]} WHERE att_conf_id={att_id[0][0]} ORDER BY data_time DESC LIMIT 3\")\n",
-    "reader._cursor.fetchall()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 12,
-   "id": "fcab2a21",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "[(datetime.datetime(2022, 3, 9, 12, 0, 27, 686930, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  1),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 38, 34, 497576, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 38, 23, 838368, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 38, 23, 401983, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 38, 12, 908590, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 38, 12, 398194, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 38, 1, 974671, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 38, 1, 400801, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 37, 51, 57525, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 37, 50, 398277, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 37, 40, 124000, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 37, 39, 402424, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 37, 29, 194141, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 37, 28, 403995, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 37, 18, 325812, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 37, 17, 414731, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 37, 7, 423699, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 37, 6, 406963, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 36, 56, 399116, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 36, 55, 406792, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 36, 45, 491806, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 36, 44, 401894, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 36, 34, 594970, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 36, 33, 385793, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 36, 23, 718610, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 36, 23, 402214, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 36, 12, 785296, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 36, 12, 392858, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 36, 1, 840443, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 36, 1, 404531, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 35, 50, 913943, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 35, 50, 401988, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 35, 39, 984898, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 35, 39, 393043, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 35, 29, 69789, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 35, 28, 409644, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 35, 18, 114191, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 35, 17, 388089, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 35, 7, 192311, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 35, 6, 400084, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 34, 56, 295447, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 34, 55, 390150, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 34, 45, 458677, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 34, 44, 386107, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 34, 34, 598981, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 34, 33, 384623, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 34, 23, 717252, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 34, 22, 394157, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 34, 12, 660277, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 34, 11, 390295, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 34, 1, 641750, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 34, 0, 385438, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 33, 50, 633679, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 33, 49, 396047, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 33, 39, 494091, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 33, 38, 392001, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 33, 27, 532083, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 33, 7, 927971, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  1),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 31, 15, 82880, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 31, 4, 961433, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 31, 4, 405487, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 30, 54, 841743, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 30, 54, 402356, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 30, 44, 727440, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  2),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 30, 44, 400452, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 30, 33, 414200, tzinfo=datetime.timezone.utc),\n",
-       "  [0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0,\n",
-       "   0.0],\n",
-       "  None,\n",
-       "  0,\n",
-       "  None),\n",
-       " (datetime.datetime(2022, 3, 8, 15, 30, 15, 378183, tzinfo=datetime.timezone.utc),\n",
-       "  None,\n",
-       "  None,\n",
-       "  1,\n",
-       "  1)]"
-      ]
-     },
-     "execution_count": 12,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# Get the last attribute values given a start date and optionally a stop date\n",
-    "reader.get_attribute_values(a, start_date='2022-03-08 00:00:00')"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "StationControl",
-   "language": "python",
-   "name": "stationcontrol"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.7.3"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
-}
diff --git a/jupyter-notebooks/RECV_archive_all_attributes.ipynb b/jupyter-notebooks/RECV_archive_all_attributes.ipynb
deleted file mode 100644
index 9e3e2192d4931ad5894707d6286240626e56f09a..0000000000000000000000000000000000000000
--- a/jupyter-notebooks/RECV_archive_all_attributes.ipynb
+++ /dev/null
@@ -1,502 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "id": "b14a15ae",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import sys, time\n",
-    "import numpy as np\n",
-    "sys.path.append('/hosthome/tango/tangostationcontrol/tangostationcontrol')\n",
-    "from toolkit.archiver import *\n",
-    "from matplotlib import pyplot as plt"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "id": "1514b0cd",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "Attribute stat/recv/1/version_r not found in archiving list!\n",
-      "Attribute stat/recv/1/opcua_missing_attributes_r not found in archiving list!\n",
-      "Attribute stat/recv/1/ant_status_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_led_colour_r not found in archiving list!\n",
-      "Attribute stat/recv/1/ant_mask_rw not found in archiving list!\n",
-      "Attribute stat/recv/1/hbat_bf_delays_r not found in archiving list!\n",
-      "Attribute stat/recv/1/hbat_bf_delays_rw not found in archiving list!\n",
-      "Attribute stat/recv/1/hbat_led_on_r not found in archiving list!\n",
-      "Attribute stat/recv/1/hbat_led_on_rw not found in archiving list!\n",
-      "Attribute stat/recv/1/hbat_pwr_lna_on_r not found in archiving list!\n",
-      "Attribute stat/recv/1/hbat_pwr_lna_on_rw not found in archiving list!\n",
-      "Attribute stat/recv/1/hbat_pwr_on_r not found in archiving list!\n",
-      "Attribute stat/recv/1/hbat_pwr_on_rw not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_adc_locked_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_attenuator_db_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_attenuator_db_rw not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_band_select_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_band_select_rw not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_dth_freq_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_dth_freq_rw not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_dth_on_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_led_green_on_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_led_green_on_rw not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_led_red_on_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_led_red_on_rw not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_mask_rw not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pcb_id_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pcb_number_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pcb_version_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pwr_1v8_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pwr_2v5_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pwr_3v3_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pwr_analog_on_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pwr_ant_iout_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pwr_ant_on_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pwr_ant_on_rw not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pwr_ant_vin_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pwr_ant_vout_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pwr_digital_on_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_pwr_good_r not found in archiving list!\n",
-      "Attribute stat/recv/1/rcu_temp_r not found in archiving list!\n",
-      "Attribute stat/recv/1/recvtr_i2c_error_r not found in archiving list!\n",
-      "Attribute stat/recv/1/recvtr_monitor_rate_rw not found in archiving list!\n",
-      "Attribute stat/recv/1/recvtr_translator_busy_r not found in archiving list!\n",
-      "Attribute stat/recv/1/state not found in archiving list!\n",
-      "Attribute stat/recv/1/status not found in archiving list!\n",
-      "Device STAT/SDP/1 offline\n",
-      "Device STAT/SST/1 offline\n",
-      "Device STAT/XST/1 offline\n",
-      "Device STAT/UNB2/1 offline\n"
-     ]
-    }
-   ],
-   "source": [
-    "# Apply the chosen JSON configuration file in directory toolkit/archiver_config/\n",
-    "# RECV is set on PRODUCTION mode to test the archiving of all its attributes\n",
-    "archiver = Archiver(selector_filename='lofar2.json')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 9,
-   "id": "03dafaed",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "lofar2.json\n"
-     ]
-    },
-    {
-     "data": {
-      "text/plain": [
-       "{'global_variables': {'development_polling_time': '10000',\n",
-       "  'development_archive_time': '60000'},\n",
-       " 'devices': {'STAT/RECV/1': {'environment': 'development',\n",
-       "   'include': [],\n",
-       "   'exclude': ['CLK_Enable_PWR_R',\n",
-       "    'CLK_I2C_STATUS_R',\n",
-       "    'CLK_PLL_error_R',\n",
-       "    'CLK_PLL_locked_R',\n",
-       "    'CLK_translator_busy_R']},\n",
-       "  'STAT/SDP/1': {'environment': 'development',\n",
-       "   'include': [],\n",
-       "   'exclude': ['FPGA_scrap_R', 'FPGA_scrap_RW']},\n",
-       "  'STAT/SST/1': {'environment': 'development', 'include': [], 'exclude': []},\n",
-       "  'STAT/XST/1': {'environment': 'development', 'include': [], 'exclude': []},\n",
-       "  'STAT/UNB2/1': {'environment': 'development', 'include': [], 'exclude': []}}}"
-      ]
-     },
-     "execution_count": 9,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# Print the configuration file (as a dictionary)\n",
-    "selector = archiver.selector\n",
-    "print(selector.filename)\n",
-    "env_dict = selector.get_dict()\n",
-    "env_dict"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 10,
-   "id": "8720f9e7",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "OFF\n"
-     ]
-    }
-   ],
-   "source": [
-    "device_name = 'STAT/RECV/1'\n",
-    "d=DeviceProxy(device_name) \n",
-    "state = str(d.state())\n",
-    "print(state)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "id": "6a9c4f4c",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Device is now in ON state\n"
-     ]
-    }
-   ],
-   "source": [
-    "# Start the device\n",
-    "if state == \"OFF\":\n",
-    "    time.sleep(1)\n",
-    "    d.initialise()\n",
-    "    time.sleep(1)\n",
-    "state = str(d.state())\n",
-    "if state == \"STANDBY\":\n",
-    "    d.set_defaults()\n",
-    "    d.on()\n",
-    "state = str(d.state())\n",
-    "if state == \"ON\":\n",
-    "    print(\"Device is now in ON state\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 13,
-   "id": "f85bd73f",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "Attribute stat/recv/1/version_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/opcua_missing_attributes_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/ant_status_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_led_colour_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/hbat_bf_delays_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/hbat_bf_delays_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/hbat_led_on_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/hbat_led_on_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/hbat_pwr_lna_on_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/hbat_pwr_lna_on_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/hbat_pwr_on_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/hbat_pwr_on_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_adc_locked_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_attenuator_db_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_attenuator_db_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_band_select_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_band_select_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_dth_freq_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_dth_freq_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_dth_on_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_led_green_on_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_led_green_on_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_led_red_on_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_led_red_on_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pcb_id_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pcb_number_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pcb_version_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pwr_1v8_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pwr_2v5_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pwr_3v3_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pwr_analog_on_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pwr_ant_iout_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pwr_ant_on_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pwr_ant_on_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pwr_ant_vin_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pwr_ant_vout_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pwr_digital_on_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_pwr_good_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/rcu_temp_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/recvtr_i2c_error_r will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/recvtr_monitor_rate_rw will not be archived because polling is set to FALSE!\n",
-      "Attribute stat/recv/1/recvtr_translator_busy_r will not be archived because polling is set to FALSE!\n"
-     ]
-    }
-   ],
-   "source": [
-    "# Add RECV attributes to perform load test\n",
-    "archiver.add_attributes_by_device(device_name,global_archive_period=5000)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 14,
-   "id": "1a872f87",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "['tango://databaseds:10000/stat/recv/1/ant_mask_rw',\n",
-       " 'tango://databaseds:10000/stat/recv/1/rcu_mask_rw',\n",
-       " 'tango://databaseds:10000/stat/recv/1/state',\n",
-       " 'tango://databaseds:10000/stat/recv/1/status']"
-      ]
-     },
-     "execution_count": 14,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# Print the attributes currently managed by the event subscriber\n",
-    "attrs = archiver.get_subscriber_attributes()\n",
-    "attrs"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 15,
-   "id": "ca5e58fa",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "{'tango://databaseds:10000/stat/recv/1/status': 'Storing Error: mysql_stmt_bind_param() failed, err=Buffer type is not supported'}"
-      ]
-     },
-     "execution_count": 15,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# Print the errors for each attribute\n",
-    "err_dict = archiver.get_subscriber_errors()\n",
-    "err_dict"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 16,
-   "id": "c96f1a8d",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Attribute                                     Poll Period     Archive Period \n",
-      "----------\n",
-      "STAT/RECV/1/ant_mask_rw                       1000            5000           \n",
-      "STAT/RECV/1/rcu_mask_rw                       1000            5000           \n",
-      "STAT/RECV/1/state                             1000            5000           \n",
-      "STAT/RECV/1/status                            1000            5000           \n"
-     ]
-    }
-   ],
-   "source": [
-    "# Print the attribute periods\n",
-    "def print_periods(attrs):\n",
-    "    print(\"{:<45} {:<15} {:<15}\".format('Attribute','Poll Period','Archive Period'))\n",
-    "    print(\"----------\")\n",
-    "    for a in attrs:\n",
-    "        ap = AttributeProxy(a)\n",
-    "        att_fqname = ap.get_device_proxy().name()+'/'+ap.name()\n",
-    "        print(\"{:<45} {:<15} {:<15}\".format(att_fqname,ap.get_poll_period(),ap.get_property('archive_period')['archive_period'][0],sep='\\t'))\n",
-    "\n",
-    "attrs = archiver.get_subscriber_attributes()\n",
-    "print_periods(attrs)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 17,
-   "id": "b12e8887",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Turn off the device\n",
-    "d.off()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 20,
-   "id": "a906823c",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "[<Attribute(fullname='tango://databaseds:10000/stat/recv/1/ant_mask_rw',data_type ='4',ttl='0',facility ='tango://databaseds:10000',domain ='stat',family ='recv',member ='1',name ='ant_mask_rw')>,\n",
-       " <Attribute(fullname='tango://databaseds:10000/stat/recv/1/rcu_mask_rw',data_type ='4',ttl='0',facility ='tango://databaseds:10000',domain ='stat',family ='recv',member ='1',name ='rcu_mask_rw')>,\n",
-       " <Attribute(fullname='tango://databaseds:10000/stat/recv/1/state',data_type ='45',ttl='0',facility ='tango://databaseds:10000',domain ='stat',family ='recv',member ='1',name ='state')>,\n",
-       " <Attribute(fullname='tango://databaseds:10000/stat/recv/1/status',data_type ='41',ttl='0',facility ='tango://databaseds:10000',domain ='stat',family ='recv',member ='1',name ='status')>]"
-      ]
-     },
-     "execution_count": 20,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# Initialise the retriever object and print the archived attributes in the database\n",
-    "retriever = Retriever()\n",
-    "retriever.get_all_archived_attributes()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 18,
-   "id": "770d6dbc",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "'stat/recv/1/rcu_mask_rw'"
-      ]
-     },
-     "execution_count": 18,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# Attribute chosen to be retrieved\n",
-    "attr_name = 'rcu_mask_rw'\n",
-    "attr_fq_name = str(device_name+'/'+attr_name).lower()\n",
-    "attr_fq_name"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 21,
-   "id": "3734554e",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Retrieve records in the last n hours (works even with decimals)\n",
-    "\n",
-    "# Use alternatively one of the following two methods to retrieve data (last n hours or interval)\n",
-    "records= retriever.get_attribute_value_by_hours(attr_fq_name,hours=0.1)\n",
-    "#records = retriever.get_attribute_value_by_interval(attr_fq_name,'2021-09-01 16:00:00', '2021-09-01 16:03:00')\n",
-    "\n",
-    "if not records:\n",
-    "    print('Empty result!')\n",
-    "else:\n",
-    "    # Convert DB Array records into Python lists\n",
-    "    data = build_array_from_record(records,records[0].dim_x_r)\n",
-    "    # Extract only the value from the array \n",
-    "    array_values = get_values_from_record(data)\n",
-    "\n",
-    "#records\n",
-    "#data\n",
-    "#array_values"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 22,
-   "id": "edb9f117",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Extract and process timestamps for plotting purposes\n",
-    "def get_timestamps(data,strformat):\n",
-    "    timestamps = []\n",
-    "    for i in range(len(data)):\n",
-    "        timestamps.append(data[i][0].recv_time.strftime(strformat))\n",
-    "    return timestamps\n",
-    "timestamps = get_timestamps(data,\"%Y-%m-%d %X\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 23,
-   "id": "112962a0",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAaoAAACsCAYAAAAqoehfAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/MnkTPAAAACXBIWXMAAAsTAAALEwEAmpwYAAApBUlEQVR4nO2debgdVZW3319GSAhJIGEMEIYEUJB5UpAhyKQCtrRC48cktqioQVsaG9sP2wlbW2g73SKCItAgIA0CMoOAQgMSCCTIlECYDEOQhDkQsvqPvc+9+55bVaeSe+rWucl6n+c8p4bfXmvV3nVq1bDPLpkZjuM4jtOpDKo7AMdxHMcpwhOV4ziO09F4onIcx3E6Gk9UjuM4TkfjicpxHMfpaDxROY7jOB2NJyrHcRyno/FE5QAg6ShJfyxYf42kIyvwe46k75TUTpRkkoa0O44cfx+T9LSk1yRt0x8+nYGJpFMknV93HMsrnqiWAyTdIullScOr8mFm+5vZr6qy324k7SHpmT6a+RFwvJmtYmb3tSOuLFqdJGTocxO2pJ9J+vv2RlgqpkckTZa0p6TfS1ooaW6OdhdJd/RziM4AxhPVAEfSRGA3wIADW2gH90dMyxEbAA8uS8Ea63p/4OrmhVVehUraGBhsZo8CrwO/AL5WUOTD9HOMA4EVffuL8EQ18DkCuBM4B+hxay7eVvuppKslvQ7sKWk9Sf8j6UVJL0ma1lTmR/Hq7AlJ+yfLb5F0rKThkhZI2iJZN17Sm5LWiPMfkTQj6u6Q9L5Eu42keyW9KukiYKW8DZM0OMYzX9LjhANcuv5oSQ9FW49L+mxcPhK4Blgn3rZ7TdI6knaU9L8xrnmSpkkaluF3uKTXgMHA/ZLmxOWbx3pYIOlBSQcmZXrVdYbdo2Kcr8b6PVzS5sAZwC4xzgVR+2FJ90l6Jd5+PCUxdVv8XhDL7BLLvA9YYGbPRF+3SzpN0kvAKXG7fiTpKUnPSzpD0spJfAfFdntF0hxJ+0n6pKR7mrbjBElXJIu6Eo+Z3W1m5wGP5zQrwAENfbwy/IKkx4DH8uKIy+dK2juJo+XtNnVffR4d6/FlScdJ2kHSA7EtpyX6jSXdHH8b8yX9t6Qxyfp/lPRsbMNHJE3J8DlU0oWSLs3av5ri/42k8yW9AhwXf0fj4vqTJS2WtGqc/7ak04u2d7nFzPwzgD/AbODzwHbAO8CaybpzgIXABwgnJSOB+4HT4vRKwK5Re1Qs/xnCAfpzwF8AxfW3AMfG6V8A3038fAG4Nk5vA7wA7BTtHAnMBYYDw4AngROAocAh0ed3crbtOOBhYD1gNeD3hCvHIXH9h4GNAQG7A28A28Z1ewDPNNnbDtgZGAJMBB4CphbUrQGbxOmhsa7/KW7HXsCrwKY5db1Sk62RwCuJfm3gvUnd/7FJvwewZbT1PuB54OC4bmJaD0mZk4DvJzYXA1+M27tybPcrYl2OAq5M9DvG+D8Ufa4LbAaMiNs5KfHzJ+DQZP5aYN+mWPYG5mbU6drAs3TvVwbcEGNaOS+OqJ0L7J3YOgU4v8Xvo1FXZxD2932At4DLgTWi/ReA3aN+k+h7ODCecFJwely3KfA0sE5ie+M0lrgNv4v7w+AWsZ1C2P8Pjtu6cvT38bj+emAOsH+cvw34WN3HnDo+tQfgnz40Huwad/Rxcf5h4IRk/TnAucn8LsCLzQe4uO4oYHYyPyL+wNeK87fQnaj2BuYk2tuBI+L0T4FvN9l+hJBIPkiS/OK6O8hPVDcDxyXz+5BxgE7WXw58OU7vQVOiytBPBS4rWJ8mqt2A54BByfoLgVOy6jrD1khgAfBxYOWMuv9ji1hPB06L0xOz6gH4A7BbYvOpZJ0It+U2btofnojTP2vYz/B9PvDNOD2JkLhGJPvJS8DwpjJ5ierTwNlNdbxXMl8Ux1yWPVGtmyx7CfhkMn8pOScshCRyX5zehJDU9gaGNulOIZwE3Ar8hGQfL4jtFOC2pmXfjuWHxP3ty8CphCT7JrB6K7vL48dv/Q1sjgSuN7P5cf4Cmm7/Ec4AG6wHPGlmi3PsPdeYMLM34uQqGbrfAyMk7aTwjGxr4LK4bgPgq/GWyoJ4K2s9YJ34edbiLzLyZP7msU5T/D20kvaXdKekv0Y/BwDj8owpPOy/StJz8VbL94r0WbGY2ZKmeNZN5p8mBzN7Hfgk4SpxnqTfSdqsINadFDolvChpYSxXtG1jCFdAaSeFNJ7xhKQyPWmXa+NyCG00J8f8BcBhcfrvgMuT/WMKcIeZLcqLrYmu2345cRbF0ReeT6bfzJhfBUDSmpJ+HW/vvUJI0uMAzGw24eTmFOCFqFsnsbMz4er31KZ9vIjmfeZWwknWtsBMwtXm7tH2bDN7qaTd5QpPVAOU+GzhE8Du8cD7HOGW2laStkqk6Q/maWB99fGhrZm9C1xMOHgdBlxlZq8mPr5rZmOSzwgzuxCYB6wrSYm59QtczSMcuHppFXo4XkrombemmY0hHAAbtrMOFD8lXHVOMrNVCbfxlKHL4i/AepLS38z6hNtYDQoPTmZ2nZl9iHD762Hg5wXlLiCcoa9nZqMJt66Ktm1f4ObYNlnxzCcckN+btMtoM2uciDxNuI2axQ3AeElbE9r7gmRdVuLJRNJQwkH3hqZVzftoXhyvE5Jtg7XK+F1Kvhfj2TLuI58i2UfM7AIz25VwQmbAD5Ky1wPfB26StGZJf81teQfhFuPHgFvN7M+E/ewAQhJbIfFENXA5GHgXeA/himZrYHPC7Z8jcsrcTTj4nypppKSVJH1gGf1fQLhCOJyeB66fEx4K76TAyNgxYBTwv4TnJl+KD5z/hvBMIo+Lo3aCpLGEZzANhhGeI7wILFbo+LFPsv55YHVJo5NlowjPiV6LVzOfW4rtvYvwDOzEGPsewEeBX5cpHM/UD1Lo6LEIeA1oXJ09D0xoevA+Cvirmb0laUfClUyDF2PZjZJlBxCejWQSrwR/Dpym7k4v60raN0rOBo6WNEXSoLhus1j2HeAS4IeEZ0lpotk/9RvLrkR4pqe4jzW2a1fgATN7paCqcuMAZgCHxvrfnvCMs92MIrTNQknrkvRelLSppL3iSdJbhMSfXmFjZv9K+D3c1OgUsTTEK9XphOe+jcR0B+GK2hOVM+A4EvilmT1lZs81PsA04PCsq6Z4tv1Rwr32p4BnCMlmqTGzuwhnuOsQetg1lt9D6JAxDXiZ0AHhqLjubeBv4vxfo+//KXDzc+A6QgeQe1NtvIL7EiGZvUw4kF+RrH+Y8Azp8Xirax3gH6Lu1Wj7oqXY3rcJdbc/4erkvwjP5R4uaWIQ8BXCldlfCVcWjUR5M6Eb/HOSGrdxPw/8i6RXgW/G7WzE8gbwXeD2uG27EK6orm0Rwz8S2uPOeFvrRsLZO2Z2N3A0ocPFQsJBcYOk7AWEZzOXNG4dK/T8fM3Mnkp0HyQcwK8mXAm8SbjSgJxu6Skt4vhnwtXWy8C36HmC1C6+RbjttpCQgNP9czjhedF8wm3yNYCvZ2zDtwnPS2+UtNoyxHArIdHfncyPoru35wpHo+eN4zgDlHjFNc3Miq5Oq/B7IqEjz4kl9X8GDom3sxynNP4HM8dZPvj/NficS+ji3pJ4++9cT1LOsuBXVI7jDGgkHU7o1t7Mk2b23v6OJ0XSNYS/NjTzPTP7Xn/HM1DxROU4juN0NN6ZwnEcx+lo/BlVDqNHj7b11y/6i083b7/9NsOG5Q7pVbm2bv9Vaev2X5W2bv9Vaev2X5W2bv9VaavyP2vWrPlmNr61sjyeqHJYc801+e3ll7UWArNnz2GTTfL+o1i9tm7/VWnr9l+Vtm7/VWnr9l+Vtm7/VWmr8r/xJpOKRptZJvzWn+M4jtPReKJyHMdxOppKEpWkD0j6qqT1Fd79ckiybLSksxLt8MZ8HHbn+GTdJyR9VtJaCu/A+VRcvoHi68vjUCvpMCc7Re02seyGcfkQhfe+DJd0sKSjq9h2x3Ecp71U1j1d0lTCsDd/Ad5vZufGZdOAT5nZOYn2qMZ80/QRhPHhxhGGX3nUzM5NdZLWJwy5cg5haJX9CK8gOEfSpwlDoEwgDPc/wcx+KmknYCszO7Mp5r8H/h5g/Pjx251/3nmltvWtRYtYaXi5t8BXoa3bf1Xauv1Xpa3bf1Xauv1Xpa3bf1Xaqvzvu99+081s+1LiklTSmSImpNcI44odCMxPlg0njPC9DeEdK48m82/E6Q2ANQkDP04hvKjtWcL4V8TBHreKSeo9hHcyAYyJmk0lfRIYS3h5H4Rh/DeJttciYwTqmLjOBJg8ebKt6A9F69bW7b8qbd3+q9LW7b8qbd3+q9JW5b8KKklUZnZ6MntGhuSEgvnG9JN0D8oIYUThhv35ie4peg7G+d8Z/uYT3u1yYWLbcRzHGQB4ZwrHcRyno/FE5TiO43Q0nqgcx3GcjsYTleM4jtPRtExUkjaSdKWk+ZJekPRbSRu1Kuc4juM47aDMFdUFhNdgr0V47fgldPeecxzHcZxKKZOoRpjZeWa2OH7OJ/z/yXEcx3Eqp0yiukbSSZImxqGLTgSulrSapNWyCnToEEpTJe0dt+PE+Gdhx3Ecp8NpOYSSpCcKVpuZZT6v6sAhlCYTEvNtwAHATDO7pylmH0Kpg7R1+69KW7f/qrR1+69KW7f/qrTL1RBKZrbh0hrt0CGUngd2Be4EFgCbAj0SlQ+h1Fnauv1Xpa3bf1Xauv1Xpa3bf1Xa5WoIJUmDCVczE1O9mf04r0yHDqEE8Mf4/XSGxnEcx+lAyoz1dyXhymYmsKTacBzHcRynJ2US1QQze1/lkTiO4zhOBmV7/e1TeSSO4ziOk0GZK6o7gcskDSJ0WhCht9+qlUbmOI7jOJRLVD8GdiF0567mdcCO4ziOk0OZW39PA7M8STmO4zh1UOaK6nHgFknXAIsaC4u6pzuO4zhOuyhzRfUEcBMwDBiVfHLpkCGUJks6RtLWTba29iGUHMdxBg4th1BaZsM1D6EEnAdMBa4iJNltoq11gI3wIZQ6Xlu3/6q0dfuvSlu3/6q0dfuvSrtcDaEkaTxwIvBeklHTzWyvgjJTqXkIJWBzwogUY4HXE1uXAqvhQyh1vLZu/1Vp6/ZflbZu/1Vp6/ZflXa5GkKJMCTRRcBHgOOAI4EXiwp00BBKs5Lpmcn0ZRkxOY7jOB1ImWdUq5vZ2cA7ZnarmR0D5F5NOY7jOE47KXNF1bitNk/ShwnPnDLfQ+U4juM47aZMovqOpNHAV4H/AFYldFJwHMdxnMopk6heNrOFwEJgTwjdzyuNynEcx3EiZZ5R/UfJZY7jOI7TdnKvqCTtArwfGC/pK8mqVel+a67jOI7jVErRFdUwYBVCMktHpHgFOKTIaIeOTLGTpOMlTfSRKRzHcQYOuVdUZnYrcKukc8zsSYD4qo9VzOyVIqNmdrukHQivr7+RMDJFY9nrdL8SHjNbJOmPcfouSZsnplYCbgY2BFYHXoq6JyXNjprHgF3jn4RfBhrfc4CPAm9K2jKx/Sbhf2BrEP6D5TiO43QwLYdQknQB4Y++7xJGiFgV+Hcz+2FBmamEUSiuJo5MAUyIyy4EvgOcS/fIFN+I829EX6cTRqaYSOgK/ydgR+BtMzs7jkxxMnAaYWSKbQl/9DVgN2CLOL8d8AghOb6d2N4WGGFm6Z+DfQilDtPW7b8qbd3+q9LW7b8qbd3+q9IOpCGUyiSqGWa2taTDCQf4k4Dpy/vr6SdPnmzXXnN1Ke3yOhRK3dq6/Velrdt/Vdq6/Velrdt/Vdqq/G+8yaS2J6oyvf6GShoKHAxcYWbvEK5cHMdxHKdyyiSqnwFzgZHAbfFZUOEzKsdxHMdpFy3/8GtmPwF+kix6UtKe1YXkOI7jON20vKKStKaks+MbfpH0HsII6o7jOI5TOWVu/Z0DXEd44SCEXnpTK4rHcRzHcXpQJlGNM7OLgSUAZraY0FXdcRzHcSqnTKJ6XdLqxJ5+knYmDFDrOI7jOJVTJlF9BbgC2FjS7YQ/5n6xqECHDqHU8L+FD6HkOI4zcGj5h18ASUOATQEBj8T/UrUqMxW4l/Cixfeb2blx2TTgU2Z2TqI9qjHfNH0E8L/AOODDwKNmdm6qiwnnaMKztJeB/YARwHmEZ2lXAcPMbGb0fyFwADDTzO5pitlHpuggbd3+q9LW7b8qbd3+q9LW7b8q7UAamaJl93RJgwkH9olRv48kzOzHBWWmEoZLmk0cQilZNhzYStI2dA+h1Jh/I05vQBhC6S1gCmEIpWeBodH+uKhbnzCEUiNxjomaTYHNCUM3jSXcvmz4XwlYEDU9EpWZnQmcCWFkihX9H+Z1a+v2X5W2bv9Vaev2X5W2bv9VaavyXwVlXpx4JSFhzCR2qGiFmZ2ezJ6RITmhYL4x/SRwd7J8emJ/fqJ7Crg20aXj981Kpmcm009mxOQ4juN0IGUS1YTlfVw/x3Ecp3Mp05niGkn7VB6J4ziO42RQ5orqTuCy+C6qdwgdKszMVq00MsdxHMehXKL6MbALoZecj5ruOI7j9Ctlbv09DczyJOU4juPUQZkrqseBW+KgtIsaC4u6pzuO4zhOuyiTqJ6In2Hx4ziO4zj9Rpn3UX1raY1K+gCwM3AJ4c/C84F5cdlZwL+Z2bFROxz4TzM7VtJOwA5mNi2u+wThD7u/BQ4DXjSz8+Mfgj9jZt+QNAXY1sx+GMvsBLwf+B2wK3Cvmc2IQyntCdwB7A782syeWtptcxzHcfqX3CGUJE0zs+MlXUnGq+fN7MBCwx02hBLhqvCrhD8g+xBKA0Bbt/+qtHX7r0pbt/+qtHX7r0q7vAyhdARwPPCjpTXaiUMoxfmX8SGUBoy2bv9Vaev2X5W2bv9Vaev2X5V2eRlCaQ6Amd26tEY7dAglCAmvYdtxHMcZABQlqvGSvpK30nv9OY7jOP1BUaIaDKxCGInCcRzHcWqhKFHNM7N/6bdIHMdxHCeDopEp/ErKcRzHqZ2iRDWl36JwHMdxnBxyE5WZ/bU/A3Ecx3GcLMoMSus4juM4tVFJopL0AUlflbS+pOMkHZIsGy3prEQ7vDEvaSdJxyfrPiHps5LWknSCpE/F5RtI+k6cniLpa0mZnaJ2sqRj4tBJSBoi6XxJEyWdGP8s7DiO43Q4uUMo9dlw5w2hNBGYAPwPPoTSgNDW7b8qbd3+q9LW7b8qbd3+q9IuL0MoLTMdOoTSKsAmwBJ8CKUBoa3bf1Xauv1Xpa3bf1Xauv1XpV1ehlBaZjp0CKWZwIVx+rKsuB3HcZzOwztTOI7jOB2NJyrHcRyno6msM8VAR9KrwCMl5eMIz8Pq0tbtvypt3f6r0tbtvypt3f6r0tbtvyptVf43NbNRJbXlMDP/ZHyAewaKtm7/vl2+XZ3g37dr4G1X2Y/f+nMcx3E6Gk9UjuM4TkfjiSqfMweQtm7/VWnr9l+Vtm7/VWnr9l+Vtm7/VWnr9l8a70zhOI7jdDR+ReU4juN0NJ6oHMdxnM6m3d0Il4cPYWDbRwhjFZ5UoPsF8AIwq4W99YDfA38GHgS+XKBdiTB01P1R+60S8Q4G7gOuaqGbSxhKagYtupASxk38DfAw8BCwS4Zm02ir8XkFmFpg84S4TbMIw1mtVKD9ctQ92Gwzq96B1YAbgMfi99gC7d9Gu0uA7VvY/WGsgwcIQ2+NydF9O2pmANcD67TaR4CvAgaMK/B/CmGcy0YdH1BkF/hijPdB4F8L7F6U2Jwbv7N0WwN3NvYZYMcCm1sRBpGeCVwJrFq0/2e02RY5ul7tVWAzq73ytL3aLE+b1WYFdpvb7Ig8m83tVWAzq73ytL3arEDbq83IOQYBGwJ3EY6LFwGjcnTHR03Xvt3XT+1JodM+hIP+HGAjwqjr9wPvydF+ENiW1olqbWDbOD2KMBBvnk0Bq8TpoXHH2LmF/a8AF1AuUZXacYBfAcfG6WHAmBL19hywQc76dYEngJXj/MXAUTnaLQhJagRhPMobgU2K6j3+yE+K0ycBPyjQbk5IsrfQM1FlafcBhsTpH8RPlm7VZPpLwBlF+wjhwHEdYUzLcQX+TwH+ocy+B+wZ62p4nF+jzH4K/BvwzRyb1wP7x+kDgFsK/P8J2D1OHwN8u2j/z2izaTm6Xu1VYDOrvfK0vdosT5vVZgV2e7RZga5XexX5z2ivPLu92qxA26vNyDkGEX6zh8blZwCfy9FtQ3hbxVzalKj81l9vdgRmm9njZvY28GvgoCyhmd0GtHwTspnNM7N74/SrhCuUdXO0Zmavxdmh8ZPb40XSBMIrUM7K0ywtkkYTDkRnx5jeNrMFLYpNAeaY2ZMFmiHAypKGEJLQX3J0mwN3mdkbZrYYuBX4m8bKnHo/iJBcid8H52nN7CEz6zXqSI72+hgDhLPUCTm6V5LZkcQ2K9hHTgNOJGnbsvtTgfZzwKlmtihqXmhlV5KATwAX5uiMcJYNMJrYZjnaycBtcfoG4ONRm7f/N7fZh7J0We2VZzOnvfK0vdqsxW+1R5uV/V0X6Hq1VyubTe2Vp+3VZgXaXm1WcAzai3CXpdFeB2fpzOw+M5vbXA99wRNVb9YFnk7mnyEnqSwLkiYSzjjuKtAMljSDcGvlBjPL1QKnE348S0q4N+B6SdPju7fy2BB4EfilpPsknSVpZAvbh9I9On1vx2bPAj8ijHY/D1hoZtfnyGcBu0laXdIIwlnhei38r2lm8+L0c4TXxLSbY4Br8lZK+q6kp4HDCWe8ebqDgGfN7P6Sfo+X9ICkX0gaW6CbTKi3uyTdKmmHErZ3A543s8dy1k8Ffhi360fA1wtsPUj3Sd3fktFmTft/bpuV+Z2U0PZqr2ZtUZul2lZtlhFDZps16QrbK2e7MturSTuVgjZr0ma2WfMxiHCXaUFyEvAMsO5SHquWGU9U/YikVYBLCc9cXsnTmdm7ZrY14UWPO0raIsfeR4AXzGx61voMdjWzbYH9gS9I+mCObgjhts5PzWwbwvu8TsozKmkY4b1jlxRoxhJ+EBsSngWMbLyxuRkze4hwy+Z6witcZgDvFm5Zz/JGwVXosiDpZGAxPV8j0+z3ZDNbL2qOz9LExPtPFCSyJn4KbEx47jCPcNsnjyGE5z47A18DLo5n4EUcRsEJBuGs/4S4XScQr7JzOAb4vKTphNtLb6cri/b/tM3K/k6KtFntlaXNa7NUG+3ktlmG3cw2y9DltldBHfRqrwxtbptlaDPbrPkYBGyWte1lj1V9pui+4Ir4AXYBrkvmvw58vUA/kRbPqKz7Hu51wFeWMp5vkvGMIq77PuHMZi7hjPQN4PySdk8psLsWMDeZ3w34XYGtg4DrW/j7W+DsZP4I4L9Kxvo94PNF9U7o/LJ2nF4beKRVG9H0jCpPCxxFeOA8oky7A+s3xdalBbYknH3OjZ/FhKvMtUrYbd7m5vlrgT2T+TnA+ILtGgI8T7g9lmdzId3/txTwSsk6mAzcXbT/Z7VZli6vvfK0Oe1V+PtL26xZW9RmJexOJNwhyNr+zPYq2K6s9sqym9lmJWLt0WbJ8m8SEul8up//9ThOJrr02dxc/BlVZfwJmCRpw3ilcChwRV8MxrOks4GHzOzHLbTjJY2J0ysDHyL0CuqFmX3dzCaY2cQY581mlnmVImmkpFGNacJD51lZWjN7Dnha0qZx0RRCb6E8Wp2VQ/hh7yxpRKyPKYT75JlIWiN+r094PnVBC/tXAEfG6SOB37bQl0LSfoRbqwea2RsFuknJ7EHkt9lMM1vDzCbGdnuG8JD7uRy7ayezHyOnzSKXEx7QI2kyoRNM0YjXewMPm9kzBZq/ALvH6b0IPfQySdpsEPAN4ktTC/b/rDYr+zvJtJnVXgXaXm2Wpc1rM0LSyLKb1WZZ23U52e2VVwc92qugXnu1WUEd9GqznGPQQ4Reg4fEokcCN5U9VvWZdmS75e1DeCbyKOEM5+QC3YWES/t3CDvvp3N0uxJuazS6ws4gdjPO0L6P0NX8AcIO/s2SMe9BQa8/Qi/G++nuSpq7XVG/NaFr6wOEH9TYHN1I4CVgdIkYv0XYkWcB5xF7O+Vo/0BIjvcDU1rVO7A6cBPhQHojsFqB9mNxehHhYHNdgXY24Zllo93OyNFdGrfrAUI333XL7CMkZ505ds8jdB1+gHBgX7tAOww4P8ZxL7BXUQzAOcBxLep1V8Lbte8nPNPYrkD7ZcLv5lHgVLrP6jP3/4w22z9H16u9CmxmtVeetleb5Wmz2qzAbnObHZSj69VeRf4z2ivPf682K9D2ajNyjkGEY8jdsY4viXazdF+K7bWYkDTP6usx2YdQchzHcToav/XnOI7jdDSeqBzHcZyOxhOV4ziO09F4onIcx3E6Gk9UjuM4Tkfjicpx2oSkgyWZpMx/8Vfk8+rGf1lK6idKKvovluN0HJ6oHKd9HAb8MX73QmEw3tz5ZcHMDrDWAwY7zoDGE5XjtIE4htquhD++Hpos30PSHyRdAfy5eT5qLlcYKPhBxcGCJR0j6fTEzmcknZbhd66kcfFK6SFJP492ro+jBSBpO0n3S7of+EJSdrCkH0r6UxxA9bNx+QmSfhGnt5Q0K45R6Di14InKcdrDQcC1ZvYo8JKk7ZJ12xJeVDc5Z/4YM9sO2B74kqTVCe/++aikoVFzNOFlhUVMAv7TzN4LLCC+ZgP4JfBFM9uqSf9pwij2OwA7AJ+RtCHw78Amkj4Wy37WCoaPcpyq8UTlOO3hMMK7y4jf6e2/u83siYL5L8WrnTsJr1mYZOE9PzcDH4nPvIaa2cwWMTxhZjPi9HRgYnx+NcbC+6MgDO/TYB/gCIXXNNxFGNJokpktIQzseh5wq5nd3sKv41RKn++RO86KjqTVCOO0bSnJCG87Nklfi5LXm4q8npTdgzDY6C5m9oakWwivAofwMsx/IoyP+MsSoSxKpt8FVm4VOuFK67qMdZOA1wivZHGcWvErKsfpO4cA55nZBhZG2F4PeILwepRWjAZejklqM8K7iQCw8BK69YC/o/Xo9JnEjhYLJO0aFx2erL4O+Fzj9qKkyXGU/dHATwhveV5d0iE4To14onKcvnMYcFnTskvJ6f3XxLXAEEkPEUavvrNp/cXA7Wb2ch/iOxr4z3iLL32R4lmEDh33xi7rPyPcZTmN8KzrUcJzrFMbr4NwnDrw0dMdp4ORdBVwmpndVHcsjlMXfkXlOB2IpDGSHgXe9CTlrOj4FZXjOI7T0XivvxzGrrGjvfP2QoSSu/rq+lLTPIAydOGr53zXsh7zPQwk9nqKmhS9lyVlmosHm1k+ijVZPlrGkWM732cvl71jL4yhwE6vcr1Pzpqbrues9V7WS1dgs7Guub2z7DaXKdDn+SWefPbynxVXj1WWaKxpUY6NXie6abmmdTlaldVnnVQX+LeeE1BqvrfNXifzvebBUiPptiT11ztUy5pMYspY37JMum4plmW6tZ5yi4ssWZgui/OzWXSdme1HG/FElcM7by9k693ORIMGoUFCCt8AGjSIQXFZmFeXDmBQ1Hbp4/ygQYO6kkBj/aAeNtS1flDDZqIfFNd3+1GTn25NVxmRxK3uMvFI09AP6irTbSfY7LbT7YMYB91xiCabdJdpnhfRRndSHCSabNCrTO956yrXZVfWNN990O2aVzKPddnqioPUhnVpGvPCuuPAupZ12Uh0aZkuvzRsds93fTKWdc8v6V5vcb0t6V5vSzKWZWi6EtkSyFjfY5kZ2JIeGhIbNOm7plPNkqChyWaP+YYmsRmWddvtpVnSbbPZB0sMa9LYksRutGlLogYy9EFrSRmL661RJpa35jLJ+kb57mXBbw/NEuvatsa8JT6byyx5t9mG9SoTND1t9lr2bljWo8y7TTbi+iWLDXvHur67ly1hyeJo4x3jI4sfHUeb8WdUjuM4TkfjicpxHMfpaDxROY7jOB2NJyrHcRyno/FE5TiO43Q0nqgcx3GcjsYTleM4jtPReKJyHMdxOhpPVI7jOE5H44nKcRzH6Wg8UTmO4zgdjScqx3Ecp6PxROU4juN0NJ6oHMdxnI7GE5XjOI7T0fgbfnOQNAt4q+44SjAOmF93ECXwONvLQIhzIMQIHme7WcnMtminQX9xYj5vmdn2dQfRCkn3eJztw+NsHwMhRvA4242ke9pt02/9OY7jOB2NJyrHcRyno/FElc+ZdQdQEo+zvXic7WMgxAgeZ7tpe5zemcJxHMfpaPyKynEcx+loVrhEJWk1STdIeix+j83RXStpgaSrmpZvKOkuSbMlXSRpWFw+PM7Pjusn9lOcR0bNY5KOjMtGSZqRfOZLOj2uO0rSi8m6Y+uKMy6/RdIjSTxrxOVtq88+1uUISb+T9LCkByWdmujbUpeS9ot1MFvSSRnrc+tC0tfj8kck7VvWZn/GKelDkqZLmhm/90rKZLZ/TXFOlPRmEssZSZntYvyzJf1EkmqM8/Cm3/cSSVvHdW2tzxIxflDSvZIWSzqkaV3eb37p69LMVqgP8K/ASXH6JOAHObopwEeBq5qWXwwcGqfPAD4Xpz8PnBGnDwUuqjpOYDXg8fg9Nk6PzdBNBz4Yp48CpvVnfRbFCdwCbJ9Rpm312ZcYgRHAnlEzDPgDsH+76hIYDMwBNor27wfeU6YugPdE/XBgw2hncBmb/RznNsA6cXoL4NmkTGb71xTnRGBWjt27gZ0BAdc09oE64mzSbAnMqaI+S8Y4EXgfcC5wSKvf07LW5Qp3RQUcBPwqTv8KODhLZGY3Aa+my2Lm3wv4TUb51O5vgCl9POsqE+e+wA1m9lczexm4AdivKebJwBqEA2wVtCXOFnb7Wp/LHKOZvWFmvwcws7eBe4EJyxhHFjsCs83s8Wj/1zHevPjTujgI+LWZLTKzJ4DZ0V4Zm/0Wp5ndZ2Z/icsfBFaWNLyP8bQ9zjyDktYGVjWzOy0cac8l57hRQ5yHxbJV0DJGM5trZg8AS5rKZv6elrUuV8REtaaZzYvTzwFrLkXZ1YEFZrY4zj8DrBun1wWeBojrF0Z9lXF2+cyIp0HjTCztNfNxSQ9I+o2k9foQY7vi/GW8TfHPyQ+xnfXZlrqUNIZwlX1TsrivdVmmDfPqIq9sGZv9GWfKx4F7zWxRsiyr/euKc0NJ90m6VdJuif6ZFjb7O84GnwQubFrWrvrsy35UtG8udV0ulyNTSLoRWCtj1cnpjJmZpNq6PfZTnIcC/y+ZvxK40MwWSfos4Yxtr8yS/RPn4Wb2rKRRwKUx1nOX0kbldSlpCOGA8BMzezwuXuq6XJGR9F7gB8A+yeK2tH+bmAesb2YvSdoOuDzG3JFI2gl4w8xmJYs7qT7bxnKZqMxs77x1kp6XtLaZzYuXoS8shemXgDGShsQznAnAs3Hds8B6wDPxoDY66quM81lgj2R+AuEedcPGVsAQM5ue+ExjOovw/KaQKuM0s2fj96uSLiDcbjiXpazPquuS8N+Qx8zs9MTnUtdljt/0Sizdp5o1zXVRVLaVzf6ME0kTgMuAI8xsTqNAQfv3e5zxrsOiGM90SXOAyVGf3u6tvT4jh9J0NdXm+iwTY1HZPZrK3sIy1uWKeOvvCqDRA+VI4LdlC8Yd+fdAo3dLWj61ewhwc9PttirivA7YR9JYhZ5s+8RlDQ6jaUeOB+oGBwIP9SHGPsUpaYikcTGuocBHgMbZYTvrs091Kek7hIPE1LRAm+ryT8Akhd6kwwgHnysK4k/r4grgUIXeYRsCkwgPqsvY7Lc44y3T3xE6tNzeELdo/zriHC9pcIxnI0J9Ph5vG78iaed4K+0IluK40e44Y3yDgE+QPJ+qoD77sh9l/p6WuS5b9bZY3j6Ee7w3AY8BNwKrxeXbA2cluj8ALwJvEu6j7huXb0Q4GMwGLgGGx+UrxfnZcf1G/RTnMdHnbODoJhuPA5s1Lfs+4YH2/YSku1ldcQIjCT0SH4gx/TswuN312ccYJwBGSEIz4ufYdtYlcADwKKGH1clx2b8AB7aqC8KtzTnAIyS9p7JstuG3s0xxAt8AXk/qbwahg09u+9cU58djHDMInWY+mtjcnnDQnwNMIw6WUEeccd0ewJ1N9tpenyVi3IFwfHydcLX3YNHvaVnr0kemcBzHcTqaFfHWn+M4jjOA8ETlOI7jdDSeqBzHcZyOxhOV4ziO09F4onIcx3E6Gk9UjuM4TkfjicpxHMfpaDxROY7jOB3N/wGf1vCbptllLwAAAABJRU5ErkJggg==\n",
-      "text/plain": [
-       "<Figure size 432x288 with 2 Axes>"
-      ]
-     },
-     "metadata": {
-      "needs_background": "light"
-     },
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "# Plot of array values\n",
-    "\n",
-    "heatmap = np.array(array_values,dtype=np.float64)\n",
-    "fig = plt.figure()\n",
-    "plt.rcParams['figure.figsize'] = [128, 64]\n",
-    "plt.rcParams['figure.dpi'] = 128\n",
-    "ax = fig.add_subplot(111)\n",
-    "im = ax.imshow(heatmap, interpolation='nearest',cmap='coolwarm')\n",
-    "ax.set_xlabel('Array index')\n",
-    "ax.set_ylabel('Timestamp')\n",
-    "ax.set_xlim([0,(records[0].dim_x_r)-1])\n",
-    "ax.set_xticks(np.arange(0,records[0].dim_x_r))\n",
-    "\n",
-    "ax.set_yticks(range(0,len(timestamps)))\n",
-    "ax.set_yticklabels(timestamps,fontsize=4)\n",
-    "\n",
-    "# Comment the previous two lines and uncomment the following line if there are too many timestamp labels\n",
-    "#ax.set_yticks(range(0,len(timestamps),10))\n",
-    "\n",
-    "ax.set_title('Archived data for '+ attr_fq_name)\n",
-    "ax.grid()\n",
-    "cbar = fig.colorbar(ax=ax, mappable=im, orientation='horizontal')\n",
-    "plt.show()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "a4a7caae",
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "StationControl",
-   "language": "python",
-   "name": "stationcontrol"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.7.3"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
-}
diff --git a/jupyter-notebooks/archiving_demo.ipynb b/jupyter-notebooks/archiving_demo.ipynb
deleted file mode 100644
index d478c20c71ce35a7c4d017a180bdd404d0b06c2d..0000000000000000000000000000000000000000
--- a/jupyter-notebooks/archiving_demo.ipynb
+++ /dev/null
@@ -1,351 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "d56e59b7",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import sys, time\n",
-    "import numpy as np\n",
-    "sys.path.append('/hosthome/tango/tangostationcontrol/tangostationcontrol/')\n",
-    "from toolkit.archiver_util import *\n",
-    "from toolkit.archiver_configurator import *\n",
-    "from toolkit.archiver import *\n",
-    "from toolkit.retriever import *\n",
-    "from matplotlib import pyplot as plt"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "fd619562",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Create an archiver object\n",
-    "archiver = Archiver()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "b3748c34",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Safety cleanup\n",
-    "DeviceProxy(archiver.get_subscribers()[0]).delete_property('AttributeList')\n",
-    "DeviceProxy(archiver.get_subscribers()[0]).delete_property('__SubDevices')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "c1eedafb",
-   "metadata": {
-    "scrolled": true
-   },
-   "outputs": [],
-   "source": [
-    "# Apply the chosen JSON configuration file in directory toolkit/archiver_config/\n",
-    "config_dict = archiver.get_configuration()\n",
-    "#print(config_dict)\n",
-    "archiver.apply_configuration(config_dict)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "76e9656f",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Print max loads\n",
-    "# print(DATATYPES_SIZE_DICT)\n",
-    "print(archiver.get_maximum_device_load('STAT/SDP/1'))\n",
-    "print(archiver.get_maximum_device_load('STAT/RECV/1'))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "948e95f0",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# RECV device\n",
-    "device_name = 'STAT/RECV/1'\n",
-    "#device_name = 'STAT/UNB2/1'\n",
-    "d=DeviceProxy(device_name) \n",
-    "d.set_timeout_millis(30000)\n",
-    "state = str(d.state())\n",
-    "print(device_name + ' : ' + state)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "225a5e06",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Start RECV device\n",
-    "if state == 'FAULT':\n",
-    "    d.off()\n",
-    "    time.sleep(3)\n",
-    "if state == \"OFF\":\n",
-    "    time.sleep(1)\n",
-    "    d.initialise()\n",
-    "    time.sleep(1)\n",
-    "state = str(d.state())\n",
-    "if state == \"STANDBY\":\n",
-    "    d.set_defaults()\n",
-    "    d.on()\n",
-    "state = str(d.state())\n",
-    "if state == \"ON\":\n",
-    "    print(f\"Device {device_name} is now in ON state\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "0e27ac40",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# SDP device\n",
-    "device_name = 'STAT/SDP/1'\n",
-    "d2=DeviceProxy(device_name) \n",
-    "d2.set_timeout_millis(10000)\n",
-    "state = str(d2.state())\n",
-    "print(device_name + ' : ' + state)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "348a9d44",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Start SDP device\n",
-    "if state == 'FAULT':\n",
-    "    d2.off()\n",
-    "    time.sleep(3)\n",
-    "if state == \"OFF\":\n",
-    "    time.sleep(1)\n",
-    "    d2.initialise()\n",
-    "    time.sleep(1)\n",
-    "state = str(d2.state())\n",
-    "if state == \"STANDBY\":\n",
-    "    d2.set_defaults()\n",
-    "    d2.on()\n",
-    "state = str(d2.state())\n",
-    "if state == \"ON\":\n",
-    "    print(f\"Device {device_name} is now in ON state\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "c33fa7ee",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Add boolean scalar attribute\n",
-    "archiver.add_attribute_to_archiver('stat/recv/1/RECVTR_translator_busy_R', polling_period=1000, archive_event_period=5000)\n",
-    "# Add boolean array attribute\n",
-    "archiver.add_attribute_to_archiver('stat/recv/1/rcu_mask_rw', polling_period=1000, archive_event_period=5000)\n",
-    "# Add boolean image attribute\n",
-    "archiver.add_attribute_to_archiver('stat/recv/1/ANT_mask_RW', polling_period=1000, archive_event_period=5000)\n",
-    "\n",
-    "# Add double scalar attribute\n",
-    "archiver.add_attribute_to_archiver('stat/sdp/1/TR_tod_pps_delta_R', polling_period=1000, archive_event_period=5000)\n",
-    "# Add double array attribute\n",
-    "archiver.add_attribute_to_archiver('stat/sdp/1/fpga_temp_r', polling_period=1000, archive_event_period=5000)\n",
-    "# Add double image attribute\n",
-    "archiver.add_attribute_to_archiver('stat/recv/1/HBAT_antenna_ITRF_offsets_R', polling_period=1000, archive_event_period=5000)\n",
-    "\n",
-    "# Add long array attribute\n",
-    "#archiver.add_attribute_to_archiver('stat/sdp/1/FPGA_bsn_monitor_input_nof_err_R', polling_period=1000, archive_event_period=5000)\n",
-    "\n",
-    "# Add ulong scalar attribute\n",
-    "#archiver.add_attribute_to_archiver('stat/sdp/1/TR_sdp_config_first_fpga_nr_R', polling_period=1000, archive_event_period=5000)\n",
-    "# Add ulong array attribute\n",
-    "#archiver.add_attribute_to_archiver('stat/recv/1/RCU_LED_colour_R', polling_period=1000, archive_event_period=5000)\n",
-    "\n",
-    "# Add long64 scalar attribute\n",
-    "archiver.add_attribute_to_archiver('stat/recv/1/RECVTR_monitor_rate_RW', polling_period=1000, archive_event_period=5000)\n",
-    "# Add long64 array attribute\n",
-    "archiver.add_attribute_to_archiver('stat/recv/1/RCU_PCB_ID_R', polling_period=1000, archive_event_period=5000)\n",
-    "# Add long64 image attribute\n",
-    "archiver.add_attribute_to_archiver('stat/recv/1/HBAT_BF_delay_steps_R', polling_period=1000, archive_event_period=5000)\n",
-    "\n",
-    "# Add string scalar attribute\n",
-    "#archiver.add_attribute_to_archiver('stat/recv/1/status', polling_period=1000, archive_event_period=5000)\n",
-    "# Add string array attribute\n",
-    "#archiver.add_attribute_to_archiver('stat/recv/1/opcua_missing_attributes_R', polling_period=1000, archive_event_period=5000)\n",
-    "\n",
-    "# Add state scalar attribute\n",
-    "#archiver.add_attribute_to_archiver('stat/recv/1/state', polling_period=1000, archive_event_period=5000)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "ec7878b2",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Print the errors for each attribute\n",
-    "# If the device is in OFF state, all its attributes should be in error (normal behaviour)\n",
-    "err_dict = archiver.get_subscriber_errors()\n",
-    "err_dict"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "3d2ce2da",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Print the attributes currently managed by the event subscriber\n",
-    "attrs = archiver.get_subscriber_attributes()\n",
-    "attrs"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "0ec2abd3",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "retriever = RetrieverTimescale()\n",
-    "attr_name = 'stat/sdp/1/tr_tod_pps_delta_r'\n",
-    "records = retriever.get_attribute_value_by_hours(attr_name,1.5)\n",
-    "records"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "81b60192",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "timestamps = [item.data_time.strftime(\"%Y-%m-%d %X:%f\") for item in records]\n",
-    "values = [float(item.value_r) for item in records]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "26b4aab8",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "plt.plot(timestamps,values)\n",
-    "plt.title('Archived data for '+ attr_name)\n",
-    "plt.xticks(rotation=90)\n",
-    "plt.grid()\n",
-    "#plt.rcParams['figure.figsize'] = [12, 8]\n",
-    "plt.rcParams['figure.dpi'] = 60\n",
-    "plt.show()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "18aeb91f",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "views_records = retriever.get_lofar_attribute('stat/sdp/1/tr_tod_pps_delta_r')\n",
-    "views_records"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "eb24ed8f",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "views_records = retriever.get_lofar_attribute('stat/sdp/1/fpga_temp_r')\n",
-    "views_records"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "d7a624b0",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "d.off()\n",
-    "d2.off()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "3e08fc4a",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Retrieve boolean image attributes\n",
-    "attr_name = 'stat/recv/1/ant_mask_rw'\n",
-    "records = retriever.get_attribute_value_by_hours(attr_name,1.5)\n",
-    "print(np.array(records[0].value_r).shape)\n",
-    "#records[0].value_r"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "cb2f36f6",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Retrieve numeric image attributes\n",
-    "attr_name = 'stat/recv/1/HBAT_antenna_ITRF_offsets_R'.lower()\n",
-    "records = retriever.get_attribute_value_by_hours(attr_name,0.5)\n",
-    "print(np.array(records[0].value_r).shape)\n",
-    "#records[0].value_r"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "a53be0f9",
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "StationControl",
-   "language": "python",
-   "name": "stationcontrol"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.7.3"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
-}
diff --git a/sbin/run_integration_test.sh b/sbin/run_integration_test.sh
index cecea594ae98e7c8842c6871d70ad48c12b9ef2d..46f6aa77dc41ebb9198f1705564dfb9cf627b661 100755
--- a/sbin/run_integration_test.sh
+++ b/sbin/run_integration_test.sh
@@ -86,11 +86,10 @@ SIMULATORS=(sdptr-sim recv-sim unb2-sim apsct-sim apspu-sim ccd-sim)
 
 make build "${DEVICES[@]}" "${SIMULATORS[@]}"
 make build logstash integration-test http-json-schemas
-make build archiver-timescale hdbppts-cm hdbppts-es
 
 # Start and stop sequence
 make stop http-json-schemas
-make stop "${DEVICES[@]}" "${SIMULATORS[@]}" hdbppts-es hdbppts-cm archiver-timescale
+make stop "${DEVICES[@]}" "${SIMULATORS[@]}"
 make stop device-docker # this one does not test well in docker-in-docker
 make stop logstash
 
@@ -108,7 +107,6 @@ make start logstash http-json-schemas
 # Do not remove `bash`, otherwise statement ignored by gitlab ci shell!
 bash "${LOFAR20_DIR}"/sbin/update_ConfigDb.sh "${LOFAR20_DIR}"/CDB/LOFAR_ConfigDb.json
 bash "${LOFAR20_DIR}"/sbin/update_ConfigDb.sh "${LOFAR20_DIR}"/CDB/test_environment_ConfigDb.json
-bash "${LOFAR20_DIR}"/sbin/update_ConfigDb.sh "${LOFAR20_DIR}"/CDB/tango-archiver-data/archiver-devices.json
 bash "${LOFAR20_DIR}"/sbin/update_ConfigDb.sh "${LOFAR20_DIR}"/CDB/stations/simulators_ConfigDb.json
 bash "${LOFAR20_DIR}"/sbin/update_ConfigDb.sh "${LOFAR20_DIR}"/CDB/stations/dummy_positions_ConfigDb.json
 
@@ -120,18 +118,11 @@ sleep 5
 
 # shellcheck disable=SC2086
 make start "${DEVICES[@]}"
-# Archive devices: archive-timescale first
-make start archiver-timescale
 
-# Wait for archiver and devices to restart
-make await archiver-timescale "${DEVICES[@]}"
+# Wait for devices to restart
+make await "${DEVICES[@]}"
 
-# Give archiver-timescale time to start
-# shellcheck disable=SC2016
-echo '/usr/local/bin/wait-for-it.sh archiver-timescale:5432 --strict --timeout=300 -- true' | make run dsconfig bash -
-
-# Archive devices: Now that archiver-timescale is up we can start the CM and ES
-make start hdbppts-cm hdbppts-es
+make run dsconfig bash -
 
 # Start the integration test
 cd "$LOFAR20_DIR/docker-compose" || exit 1
@@ -143,8 +134,6 @@ integration_test tilebeam_performance "device-recv device-tilebeam device-antenn
 
 integration_test digitalbeam_performance "device-sdp device-recv device-digitalbeam device-beamlet device-antennafield" "${LOFAR20_DIR}/CDB/integrations/digitalbeam_cluster_ConfigDb.json"
 
-integration_test observations "archiver-timescale hdbppts-cm hdbppts-es" "${LOFAR20_DIR}/CDB/integrations/multiobs_ConfigDb.json"
-
 integration_test configuration "device-configuration"
 
 make restart "${DEVICES[@]}"
diff --git a/sbin/tag_and_push_docker_image.sh b/sbin/tag_and_push_docker_image.sh
index cfc2299d8bfc0a66d2cabc385e0e2ceb2429b0d1..1e5cbe7bb80c825fee7a11a28164c2f1910dc56a 100755
--- a/sbin/tag_and_push_docker_image.sh
+++ b/sbin/tag_and_push_docker_image.sh
@@ -86,9 +86,6 @@ LOCAL_IMAGES=(
 
   "itango docker-compose_itango y"
 
-  "archiver-timescale timescaledb y" "hdbpp hdbpp y" "hdbppts-cm hdbppts-cm y"
-  "hdbppts-es hdbppts-es y"
-
   "grafana grafana n" "prometheus prometheus n"
   "jupyter-lab docker-compose_jupyter-lab n"
   "integration-test docker-compose_integration-test n"
diff --git a/tangostationcontrol/VERSION b/tangostationcontrol/VERSION
index 34a83616bb5aa9a70c5713bc45cd45498a50ba24..54d1a4f2a4a7f6afc19897c88a7b73c17ccc54fb 100644
--- a/tangostationcontrol/VERSION
+++ b/tangostationcontrol/VERSION
@@ -1 +1 @@
-0.12.1
+0.13.0
diff --git a/tangostationcontrol/docs/source/interfaces/monitoring.rst b/tangostationcontrol/docs/source/interfaces/monitoring.rst
index e7029a6414eacb03d40328e204628a5cb1f69601..b847dfa1296f5c776ea08851dea7cee10f08d473 100644
--- a/tangostationcontrol/docs/source/interfaces/monitoring.rst
+++ b/tangostationcontrol/docs/source/interfaces/monitoring.rst
@@ -18,7 +18,6 @@ You are encouraged to inspect each panel (graph) to see the underlying database
 The Grafana dashboards are configured with the following data sources:
 
 - :ref:`prometheus-section`, the time-series database that caches the latest values of all monitoring points (see next section),
-- *Archiver DB*, the database that provides a long-term cache of attributes,
 - :ref:`tangodb`, providing access to device properties (fixed settings),
 - :ref:`loki`, the log output of the devices.
 
diff --git a/tangostationcontrol/requirements.txt b/tangostationcontrol/requirements.txt
index 9b0aae99639cd91190d184066a90ccbf00872363..4e42c9a51eb0f2f9bce911054992ff4186542427 100644
--- a/tangostationcontrol/requirements.txt
+++ b/tangostationcontrol/requirements.txt
@@ -9,7 +9,6 @@ mock
 asyncua >= 0.9.90 # LGPLv3
 PyMySQL[rsa] >= 1.0.2 # MIT
 psycopg2-binary >= 2.9.2 # LGPL
-sqlalchemy >= 1.4.26 # MIT
 pysnmp >= 0.1.7 # BSD
 h5py >= 3.1.0 # BSD
 jsonschema >= 4.0.0 # MIT
diff --git a/tangostationcontrol/tangostationcontrol/devices/docker.py b/tangostationcontrol/tangostationcontrol/devices/docker.py
index 3431d31f403aa86152ee3b213e5dba4d1e16a17d..37d1c82212cf1c5289c4ef56e5ab694abab0e86c 100644
--- a/tangostationcontrol/tangostationcontrol/devices/docker.py
+++ b/tangostationcontrol/tangostationcontrol/devices/docker.py
@@ -223,14 +223,6 @@ class Docker(LOFARDevice):
     )
 
     # Other containers
-    archiver_timescale_R = AttributeWrapper(
-        comms_annotation={"container": "archiver-timescale"}, datatype=bool
-    )
-    archiver_timescale_RW = AttributeWrapper(
-        comms_annotation={"container": "archiver-timescale"},
-        datatype=bool,
-        access=AttrWriteType.READ_WRITE,
-    )
     databaseds_R = AttributeWrapper(
         comms_annotation={"container": "databaseds"}, datatype=bool
     )
@@ -269,22 +261,6 @@ class Docker(LOFARDevice):
         datatype=bool,
         access=AttrWriteType.READ_WRITE,
     )
-    hdbppts_cm_R = AttributeWrapper(
-        comms_annotation={"container": "hdbppts-cm"}, datatype=bool
-    )
-    hdbppts_cm_RW = AttributeWrapper(
-        comms_annotation={"container": "hdbppts-cm"},
-        datatype=bool,
-        access=AttrWriteType.READ_WRITE,
-    )
-    hdbppts_es_R = AttributeWrapper(
-        comms_annotation={"container": "hdbppts-es"}, datatype=bool
-    )
-    hdbppts_es_RW = AttributeWrapper(
-        comms_annotation={"container": "hdbppts-es"},
-        datatype=bool,
-        access=AttrWriteType.READ_WRITE,
-    )
     itango_R = AttributeWrapper(comms_annotation={"container": "itango"}, datatype=bool)
     itango_RW = AttributeWrapper(
         comms_annotation={"container": "itango"},
diff --git a/tangostationcontrol/tangostationcontrol/devices/interfaces/lofar_device.py b/tangostationcontrol/tangostationcontrol/devices/interfaces/lofar_device.py
index 1dd6143e69769faaff1a393ea292df429e2db69e..5c8083e87dd99cdf7c13124ec92f6e47eca8646c 100644
--- a/tangostationcontrol/tangostationcontrol/devices/interfaces/lofar_device.py
+++ b/tangostationcontrol/tangostationcontrol/devices/interfaces/lofar_device.py
@@ -18,7 +18,6 @@ from tango import (
     DeviceProxy,
     AttrDataFormat,
     DevSource,
-    DevDouble,
 )
 
 # PyTango imports
@@ -28,7 +27,6 @@ from tangostationcontrol.common.lofar_logging import log_exceptions
 from tangostationcontrol.common.states import DEFAULT_COMMAND_STATES, INITIALISED_STATES
 from tangostationcontrol.common.type_checking import sequence_not_str
 from tangostationcontrol.devices.device_decorators import only_in_states, fault_on_error
-from tangostationcontrol.toolkit.archiver import Archiver
 
 # Additional import
 from tangostationcontrol import __version__ as version
@@ -496,13 +494,6 @@ class LOFARDevice(Device, metaclass=DeviceMeta):
         self.set_state(DevState.DISABLE)
         self.set_status("Device is in the DISABLE state.")
 
-    @only_in_states(DEFAULT_COMMAND_STATES)
-    @command(dtype_out=DevDouble)
-    def max_archiving_load(self):
-        """Return the maximum archiving load for the device attributes"""
-        archiver = Archiver()
-        return archiver.get_maximum_device_load(self.get_name())
-
     def _boot(self, initialise_hardware=True):
         # setup connections
         self.Initialise()
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/configuration/configDB/archiver-devices.json b/tangostationcontrol/tangostationcontrol/integration_test/configuration/configDB/archiver-devices.json
deleted file mode 100644
index f726543c6916327352719fa50732988533b6d3fd..0000000000000000000000000000000000000000
--- a/tangostationcontrol/tangostationcontrol/integration_test/configuration/configDB/archiver-devices.json
+++ /dev/null
@@ -1,34 +0,0 @@
-{
-    "servers": {
-        "hdbppes-srv": {
-            "01": {
-                "HdbEventSubscriber": {
-                    "archiving/hdbppts/eventsubscriber01": {
-                        "attribute_properties": {},
-                        "properties": {
-                            "CheckPeriodicTimeoutDelay": ["5"],
-                            "PollingThreadPeriod": ["3"],
-                            "LibConfiguration": ["connect_string= user=postgres password=password host=archiver-timescale port=5432 dbname=hdb","host=archiver-timescale","libname=libhdb++timescale.so","dbname=hdb","port=5432", "user=postgres", "password=password"],
-                            "polled_attr": []
-                        }
-                    }
-                }
-            }
-        },
-        "hdbppcm-srv": {
-            "01": {
-                "HdbConfigurationManager": {
-                    "archiving/hdbppts/confmanager01": {
-                        "attribute_properties": {},
-                        "properties": {
-                            "ArchiverList": ["archiving/hdbppts/eventsubscriber01"],
-                            "MaxSearchSize": ["1000"],
-                            "LibConfiguration": ["connect_string= user=postgres password=password host=archiver-timescale port=5432 dbname=hdb","host=archiver-timescale","libname=libhdb++timescale.so","dbname=hdb","port=5432", "user=postgres", "password=password"],
-                            "polled_attr": []
-                        }
-                    }
-                }
-            }
-        }
-    }
-}
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/configuration/test_device_configuration.py b/tangostationcontrol/tangostationcontrol/integration_test/configuration/test_device_configuration.py
index 00bb74860f075133b8a121fd575a3ba7093ac5fc..df858150b87b4fc7773878882c63277131a3f4ac 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/configuration/test_device_configuration.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/configuration/test_device_configuration.py
@@ -41,7 +41,6 @@ class TestDeviceConfiguration(AbstractTestBases.TestDeviceBase):
     DB_DEFAULT_CONFIG_FILE = "LOFAR_ConfigDb.json"
     DB_FILE_LIST = [
         "test_environment_ConfigDb.json",
-        "archiver-devices.json",
         "simulators_ConfigDb.json",
         "dummy_positions_ConfigDb.json",
     ]
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver.py b/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver.py
deleted file mode 100644
index 7c8fd99f9ee0dc8c4fedbf0c2a80644185f56cf2..0000000000000000000000000000000000000000
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver.py
+++ /dev/null
@@ -1,244 +0,0 @@
-# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
-# SPDX-License-Identifier: Apache-2.0
-
-import time
-from datetime import datetime
-
-from tango import DevState
-from tangostationcontrol.integration_test.base import BaseIntegrationTestCase
-from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
-from tangostationcontrol.toolkit.archiver import Archiver
-from tangostationcontrol.toolkit.archiver_util import attribute_fqdn
-from tangostationcontrol.toolkit.retriever import RetrieverTimescale
-
-
-class TestArchiver(BaseIntegrationTestCase):
-    def setUp(self):
-        super().setUp()
-        self.archiver = Archiver()
-        self.assertIsNotNone(self.archiver)
-
-    def test_archiver_initialisation(self):
-        """Test archiver main attributes"""
-        self.assertEqual(self.archiver.cm_name, "archiving/hdbppts/confmanager01")
-        self.assertTrue(len(self.archiver.es_list))  # subscribers list not empty
-
-    def test_hdbpp_library(self):
-        """Test if the correct hdbpp library is retrieved (TimescaleDB)"""
-        cm_name = self.archiver.cm_name
-        self.assertEqual(
-            "libhdb++timescale.so", self.archiver.get_hdbpp_libname(cm_name)
-        )
-
-    def test_next_subscriber(self):
-        """Test if there is an available subscriber"""
-        self.assertIsNotNone(self.archiver.get_next_subscriber())
-
-    def test_archiver_configuration(self):
-        """Test archiver configuration file"""
-        dev_config_dict = self.archiver.get_configuration("lofar2_dev")
-        prod_config_dict = self.archiver.get_configuration("lofar2_prod")
-        self.assertIsNotNone(dev_config_dict)
-        self.assertIsNotNone(prod_config_dict)
-        self.assertNotEqual(dev_config_dict, prod_config_dict)
-
-    def test_archive_scalar_attribute(self):
-        """Test if a scalar attribute is correctly archived"""
-        # Start RECV Device
-        recv_proxy = TestDeviceProxy("STAT/RECV/1")
-        recv_proxy.off()
-        time.sleep(1)  # To be deleted with L2SS-592
-        recv_proxy.initialise()
-        time.sleep(1)  # To be deleted with L2SS-592
-        self.assertEqual(DevState.STANDBY, recv_proxy.state())
-        recv_proxy.set_defaults()
-        recv_proxy.on()
-        self.assertEqual(DevState.ON, recv_proxy.state())
-
-        polling_period = 1000
-        archive_event_period = 3000
-        attr_fullname = "stat/recv/1/recvtr_translator_busy_r"  # boolean, but lofar view returns int
-        self.archiver.add_attribute_to_archiver(
-            attr_fullname, polling_period, archive_event_period
-        )
-        time.sleep(3)
-        # Test if the attribute has been correctly added to event subscriber
-        self.assertTrue(
-            self.archiver.is_attribute_archived(attribute_fqdn(attr_fullname))
-        )
-
-        # Retrieve data from DB views
-        self.retriever = RetrieverTimescale()
-        self.assertIsNotNone(self.retriever)
-        records = self._wait_for_archiving(
-            attr_fullname, archive_event_period * 2
-        )  # wait for >1 period to avoid race conditions
-        self.assertTrue(len(records) > 0)
-        item = records[-1]  # last table record
-        self.assertEqual("stat/recv/1", item.device)  # column device
-        self.assertEqual("recvtr_translator_busy_r", item.name)  # column attribute
-        self.assertEqual(datetime, type(item.data_time))  # column datetime
-        self.assertEqual(int, type(item.value))  # column value
-
-        # Remove attribute at the end of the test
-        self.archiver.remove_attribute_from_archiver(attr_fullname)
-        time.sleep(3)
-        # Test if the attribute has been correctly removed
-        self.assertFalse(
-            self.archiver.is_attribute_archived(attribute_fqdn(attr_fullname))
-        )
-
-        recv_proxy.off()
-
-    def test_archive_array_attribute(self):
-        """Test if an array attribute is correctly archived"""
-        # Start SDP Device
-        sdp_proxy = TestDeviceProxy("STAT/SDP/1")
-        sdp_proxy.off()
-        time.sleep(1)  # To be deleted with L2SS-592
-        sdp_proxy.initialise()
-        time.sleep(1)  # To be deleted with L2SS-592
-        self.assertEqual(DevState.STANDBY, sdp_proxy.state())
-        sdp_proxy.set_defaults()
-        sdp_proxy.on()
-        self.assertEqual(DevState.ON, sdp_proxy.state())
-
-        polling_period = 1000
-        archive_event_period = 3000
-        attr_fullname = "stat/sdp/1/fpga_temp_r"  # double
-        self.archiver.add_attribute_to_archiver(
-            attr_fullname, polling_period, archive_event_period
-        )
-        time.sleep(3)
-        # Test if the attribute has been correctly added to event subscriber
-        self.assertTrue(
-            self.archiver.is_attribute_archived(attribute_fqdn(attr_fullname))
-        )
-
-        # Retrieve data from DB views
-        self.retriever = RetrieverTimescale()
-        self.assertIsNotNone(self.retriever)
-        records = self._wait_for_archiving(attr_fullname, archive_event_period)
-        self.assertTrue(len(records) > 0)
-        item = records[-1]  # last table record
-        self.assertEqual("stat/sdp/1", item.device)  # column device
-        self.assertEqual("fpga_temp_r", item.name)  # column attribute
-        self.assertEqual(datetime, type(item.data_time))  # column datetime
-        self.assertEqual(int, type(item.x))  # column index
-        self.assertEqual(float, type(item.value))  # column value
-
-        # Remove attribute at the end of the test
-        self.archiver.remove_attribute_from_archiver(attr_fullname)
-        time.sleep(3)
-        # Test if the attribute has been correctly removed
-        self.assertFalse(
-            self.archiver.is_attribute_archived(attribute_fqdn(attr_fullname))
-        )
-
-        sdp_proxy.off()
-
-    def test_archive_image_boolean_attribute(self):
-        """Test if a boolean image attribute is correctly archived"""
-        # Start RECV Device
-        recv_proxy = TestDeviceProxy("STAT/RECV/1")
-        recv_proxy.off()
-        time.sleep(1)  # To be deleted with L2SS-592
-        recv_proxy.initialise()
-        time.sleep(1)  # To be deleted with L2SS-592
-        self.assertEqual(DevState.STANDBY, recv_proxy.state())
-        recv_proxy.set_defaults()
-        recv_proxy.on()
-        self.assertEqual(DevState.ON, recv_proxy.state())
-
-        polling_period = 1000
-        archive_event_period = 5000
-        attr_fullname = "stat/recv/1/hbat_pwr_on_rw"  # boolean 96x32
-        self.archiver.add_attribute_to_archiver(
-            attr_fullname, polling_period, archive_event_period
-        )
-        time.sleep(3)
-        # Test if the attribute has been correctly added to event subscriber
-        self.assertTrue(
-            self.archiver.is_attribute_archived(attribute_fqdn(attr_fullname))
-        )
-
-        # Retrieve data from DB views
-        self.retriever = RetrieverTimescale()
-        self.assertIsNotNone(self.retriever)
-        records = self._wait_for_archiving(attr_fullname, archive_event_period)
-        self.assertTrue(len(records) > 0)
-        item = records[-1]  # last table record
-        self.assertEqual("stat/recv/1", item.device)  # column device
-        self.assertEqual("hbat_pwr_on_rw", item.name)  # column attribute
-        self.assertEqual(datetime, type(item.data_time))  # column datetime
-        self.assertEqual(int, type(item.x))  # column index x
-        self.assertEqual(int, type(item.y))  # column index y
-        self.assertEqual(int, type(item.value))  # column value (bool stored as int)
-        self.assertLessEqual(item.value, 1)  # column value (must be 0 or 1)
-
-        # Remove attribute at the end of the test
-        self.archiver.remove_attribute_from_archiver(attr_fullname)
-        time.sleep(3)
-        # Test if the attribute has been correctly removed
-        self.assertFalse(
-            self.archiver.is_attribute_archived(attribute_fqdn(attr_fullname))
-        )
-
-        recv_proxy.off()
-
-    def test_get_maximum_device_load(self):
-        """Test if the maximum device load is correctly computed"""
-        # Start RECV Device
-        device_name = "STAT/RECV/1"
-        # Start RECV Device
-        recv_proxy = TestDeviceProxy(device_name)
-        recv_proxy.off()
-        time.sleep(1)  # To be deleted with L2SS-592
-        recv_proxy.initialise()
-        time.sleep(1)  # To be deleted with L2SS-592
-        self.assertEqual(DevState.STANDBY, recv_proxy.state())
-        recv_proxy.set_defaults()
-        recv_proxy.on()
-        self.assertEqual(DevState.ON, recv_proxy.state())
-
-        config_dict = self.archiver.get_configuration()
-        self.archiver.apply_configuration(config_dict)
-        time.sleep(3)
-        max_load = self.archiver.get_maximum_device_load(device_name)
-        self.assertGreater(max_load, 0)
-
-    def test_archive_right_number_of_attributes(self):
-        """Test if the right number of attributes are archived, following the JSON configuration file"""
-        # Start SDP Device
-        device_name = "STAT/SDP/1"
-        sdp_proxy = TestDeviceProxy("STAT/SDP/1")
-        sdp_proxy.off()
-        time.sleep(1)  # To be deleted with L2SS-592
-        sdp_proxy.initialise()
-        time.sleep(1)  # To be deleted with L2SS-592
-        self.assertEqual(DevState.STANDBY, sdp_proxy.state())
-        sdp_proxy.set_defaults()
-        sdp_proxy.on()
-        self.assertEqual(DevState.ON, sdp_proxy.state())
-
-        config_dict = self.archiver.get_configuration("lofar2_dev")
-        self.archiver.apply_configuration(config_dict)
-        # 4 SDP_attributes contain the suffix '_error_R'
-        # 1 SDP_attribute contains the suffix '_mask_RW'
-        # 3 SDP_attributes contain the suffix '_version_R'
-        # FPGA_temp_R is included by default
-        time.sleep(10)  # loading time
-        archived_attrs = self.archiver.get_subscriber_attributes()
-        sdp_archived_attrs = [a for a in archived_attrs if device_name.lower() in a]
-        self.assertEqual(9, len(sdp_archived_attrs))
-
-    def _wait_for_archiving(
-        self, attr_fullname: str, archive_event_period_ms: int, max_wait: int = 10
-    ):
-        wait = 0
-        records = self.retriever.get_lofar_attribute(attr_fullname)
-        while not (len(records) > 0) and wait < max_wait:
-            time.sleep(archive_event_period_ms / 1000.0)
-            records = self.retriever.get_lofar_attribute(attr_fullname)
-            wait += 1
-        return records
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver_util.py b/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver_util.py
deleted file mode 100644
index 37e429dd332f3b661885a108f122d56fdb625fd3..0000000000000000000000000000000000000000
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver_util.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
-# SPDX-License-Identifier: Apache-2.0
-
-import json
-
-import pkg_resources
-from tango import DevState
-from tangostationcontrol.integration_test.base import BaseIntegrationTestCase
-from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
-from tangostationcontrol.toolkit.archiver_util import (
-    get_attributes_from_suffix,
-    retrieve_attributes_from_wildcards,
-)
-
-
-class TestArchiverUtil(BaseIntegrationTestCase):
-    def setUp(self):
-        super().setUp()
-        self.config_dict = json.load(
-            pkg_resources.resource_stream(
-                "tangostationcontrol.toolkit", f"archiver_config/lofar2_dev.json"
-            )
-        )
-
-    def test_get_attributes_from_suffix(self):
-        """Test if attributes are correctly matched with the defined global suffixes"""
-        device_name = "STAT/RECV/1"
-        attribute_name = "ANT_mask_RW"
-        dev_suffixes = self.config_dict["global"]["suffixes"]
-        # Start RECV Device
-        recv_proxy = TestDeviceProxy(device_name)
-        recv_proxy.off()
-        self.assertEqual(DevState.OFF, recv_proxy.state())
-        self.assertIn(
-            attribute_name, get_attributes_from_suffix(device_name, dev_suffixes)
-        )
-
-    def test_retrieve_attributes_from_wildcards(self):
-        """Test if attributes are correctly retrieved with wildcards matching"""
-        device_name = "STAT/SDP/1"
-        attribute_names = ["FPGA_scrap_R", "FPGA_scrap_RW"]
-        exclude_list = self.config_dict["devices"][device_name]["exclude"]
-        # Start SDP Device
-        sdp_proxy = TestDeviceProxy(device_name)
-        sdp_proxy.off()
-        self.assertEqual(DevState.OFF, sdp_proxy.state())
-        for a in attribute_names:
-            self.assertIn(
-                f"{device_name}/{a}".lower(),
-                retrieve_attributes_from_wildcards(device_name, exclude_list),
-            )
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/observations/__init__.py b/tangostationcontrol/tangostationcontrol/integration_test/observations/__init__.py
deleted file mode 100644
index 68ddd5cdc3efaa38e853aef337c08beb99c50c4c..0000000000000000000000000000000000000000
--- a/tangostationcontrol/tangostationcontrol/integration_test/observations/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
-# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/observations/test_archiver.py b/tangostationcontrol/tangostationcontrol/integration_test/observations/test_archiver.py
deleted file mode 100644
index 69a686b142a986b26dc66d06b5ca213ad07a70d2..0000000000000000000000000000000000000000
--- a/tangostationcontrol/tangostationcontrol/integration_test/observations/test_archiver.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
-# SPDX-License-Identifier: Apache-2.0
-
-from tangostationcontrol.integration_test.base import BaseIntegrationTestCase
-from tangostationcontrol.toolkit.archiver import Archiver
-from tangostationcontrol.toolkit.archiver_configurator import get_multimember_devices
-
-
-class TestArchiver(BaseIntegrationTestCase):
-    def setUp(self):
-        super().setUp()
-        self.archiver = Archiver()
-        self.assertIsNotNone(self.archiver)
-
-    def test_archiver_initialisation(self):
-        """Test archiver main attributes"""
-        self.assertEqual(self.archiver.cm_name, "archiving/hdbppts/confmanager01")
-        self.assertTrue(len(self.archiver.es_list))  # subscribers list not empty
-        """Test if there is an available subscriber"""
-        self.assertIsNotNone(self.archiver.get_next_subscriber())
-
-    def test_get_multimember_devices(self):
-        """Test if multimember devices are correctly identified"""
-        config_dict = self.archiver.get_configuration()
-        self.assertIsNotNone(config_dict)
-        env_dict = config_dict["devices"]
-        matched_devices_dict = get_multimember_devices(env_dict)
-        obs_devices_list = sorted(list(matched_devices_dict.keys()))
-        self.assertListEqual(
-            obs_devices_list,
-            ["STAT/Observation/1", "STAT/Observation/2", "STAT/Observation/3"],
-        )
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/README.md b/tangostationcontrol/tangostationcontrol/toolkit/README.md
deleted file mode 100644
index adcd1a321759d5e36d525f3f2911f2779254ec16..0000000000000000000000000000000000000000
--- a/tangostationcontrol/tangostationcontrol/toolkit/README.md
+++ /dev/null
@@ -1,54 +0,0 @@
-# Tango Archiving Framework
-
-The Archiver class in archiver.py defines the methods to manage the device attributes archiving allowed by Tango.
-
-The main components (and the relative Docker containers) are:
-
-- Configuration Manager (container: hdbppts-cm): Device server that assists in adding, modifying, moving, deleting an Attribute to/from the archiving system
-- Event Subscriber (container: hdbppts-es): The EventSubscriber TANGO device server, is the archiving system engine. On typical usage, it will subscribe to archive events on request by the ConfigurationManager device. The EventSubscriber is designed to start archiving all the already configured Attributes, even if the ConfigurationManager is not running.
-- Archiving DBMS (container: archiver-timescale): Specific Database devoted to storing attribute values.
-- (Optional) HDB++ Viewer (container: hdbpp-viewer): Standalone JAVA application designed to monitor signals coming from database
-
-## Archiver creation
-When an Archiver object is created, we can define four of its properties:
-- the Selector configuration file-name, a JSON file in which environment properties are defined
-- the ConfigurationManager name (Tango namespace)
-- at least one EventSubscriber name (Tango namespace)
-- the default context archiving for the subscribers. This means that a default archiving strategy will be applied to
-all the attributes. Of course this strategy can be tuned individually for each attribute if needed.
-Archiving strategies are ['ALWAYS','RUN','SHUTDOWN','SERVICE']
-- ALWAYS:always stored
-- RUN:stored during run
-- SHUTDOWN:stored during shutdown
-- SERVICE:stored during maintenance activities
-
-## Select environment configuration
-The Selector object creates a dictionary from a JSON configuration file (if not defined by user, a default lofar2.json is retrieved)
-in order to allow a custom starting configuration of the archiving procedure.
-In the JSON file, for each Tango device, three variables are defined:
-- Environment, which defines the general behaviour of archiving framework, in particular:
-    - "Development" -> none of the attributes are archived by default
-    - "Production" -> all the attributes are archived by default
-- Include, which defines a list of the attributes that must be added to archiving (to be used in "Development" mode)
-- Exclude, which defines a list of the attributes that must be removed from the archiving (to be used in "Production" mode)
-The advantages of using such a configuration selection is that every user can load a custom configuration following its necessities.
-
-## Add an attribute
-When adding an attribute to the archiving framework, we must define the following properties:
-- the EventSubscriber name that will take charge of the attribute
-- the archiving strategy (4 options defined above)
-- the attribute polling period (it should have been already defined in TangoDB)
-- the archive event period (MOST IMPORTANT, it defines the frequency rate at which an attribute is archived in the DBMS)
-
-It is important to understand that, when an attribute is successfully added to the EventSubscriber list, the archiving begins without an explicit 'Start' command, rather it follows the archiving strategy already defined.
-
-The 'Start' command is used instead during a session when an attribute has been paused/stopped for any reason, or it has raised some kind of issue.
-
-## Difference between Stop and Remove an attribute
-When stopping an attribute archiving, the framework does not remove it from the list.
-This means that archiving is stopped for the current session, but if the device is restarted,  the attribute archiving will be restarted as well.
-In order to definitely stop the archiving, the attribute must be removed from the attribute list.
-
-## Update an attribute
-If we want to update the archiving properties of an attribute (e.g. the archive event period), there is a relative method.
-It must be noted that the updating is not istantaneous because, following the framework architecture, an attribute must be first removed from the EventSubscriber list and then re-added with the new properties.
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/TODO_HdbppPython.md b/tangostationcontrol/tangostationcontrol/toolkit/TODO_HdbppPython.md
deleted file mode 100644
index a72618b3fffac8b4c94f6b46d4bb1353db637c67..0000000000000000000000000000000000000000
--- a/tangostationcontrol/tangostationcontrol/toolkit/TODO_HdbppPython.md
+++ /dev/null
@@ -1,47 +0,0 @@
-# To Do List 
-
-## Updates to incorporate Hdbpp-Python as our Retriever
-
-The library in [libhdbpp-python](https://gitlab.com/tango-controls/hdbpp/libhdbpp-python) implements an AbstractReader and multiple Readers (i.e. what is called Retriever in our repo) following the relative hdb++ engine. Currently (March 2022), only MariaDB implementation is on the master branch, while Timescaledb is on development in [libhdbpp-python-timescaledb-branch](https://gitlab.com/tango-controls/hdbpp/libhdbpp-python/-/tree/package_and_timescaledb_support).
-
-### Approach 
-The Reader relies upon hard-coded SQL scripts inside the Python methods, managed as strings.
-Even our first version of the Retriever used this approach, but then we decided to overcome the hardcoded SQL scripts using the SQLAlchemy Python library, which has led to a more stable, reliable and customizable code.
-
-### Compatibility
-The libhdbpp-reader is compatible with our code and our archiver setup, as demonstrated in [demonstrator](../../../jupyter-notebooks/HdbppReader_demonstrator.ipynb).
-
-### Functionalities in libhdbpp-python-reader
-These are the functionalities implemented in the libhdbpp-reader:
-- get_connection() : Return the connection object to avoid a client to open one for custom queries.
-- get_attributes(active=False, pattern='') : Queries the database for the current list of archived attributes
-- is_attribute_archived(attribute, active=False): Returns if an attribute has values in DB
-- get_last_attribute_value(attribute) : Returns last value inserted in DB for an attribute
-- get_last_attributes_values(attributes, columns = 'time, r_value'): Returns last values inserted in DB for a list of attributes
-- get_attribute_values(attribute,start_date, stop_date=None,decimate=None,**params): Returns attribute values between start and stop dates
-- get_attributes_values(attributes,start_date, stop_date=None,decimate=None,correlate = False,columns = 'time, r_value',**params): Returns attributes values between start and stop dates, using decimation or not, correlating the values or not.
-
-### TODO List for our repository
-The Reader in libhdpp-python has roughly the same functionalities of ours, but if we need to align our methods to the AbstractReader, we must:
-- replace the methods/parameter names
-- introduce the concept of active Attribute (boolean flag that indicates if the attribute is being currently archived)
-- add the decimate parameter (avg, count, etc..)
-- add the correlation parameter ('if True, data is generated so that there is available data for each timestamp of each attribute')
-- add a more general pattern parameter to retrieve methods
-
-### TODO List for libhdbpp-python in case of our contribuition
-Since we experimented that SQLAlchemy Python library adds many more benefits compared to the use of bare SQL-strings, in case of our contribuition to the libhdbpp-python, these functionalities must be added to the tango-repository:
-- install and import SQLAlchemy library
-- development of an ArchiverBase class that maps the DBMS-schema (we've had alredy both the TimescaleDB and MariaDB version)
-- replacement of the SQL-strings in Reader methods with classes/methods representing relative DB tables/scripts
-- Add some small extra functionalities to match our developed methods (get_attribute_format(), get_attribute_tablename(), etc...)
-
-### Linting and other general issues to be fixed in libhdbpp-python code
-If we want that pipeline doesn't raise errors we need to fix the following errors:
-- imported but unused packages (F401)
-- use of bare exceptions (B001)
-- use of mutable data structures for argument defaults (B006)
-- Xenon complexity of some methods (acceptable for now)
-
-
-
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/archiver.py b/tangostationcontrol/tangostationcontrol/toolkit/archiver.py
deleted file mode 100644
index 7b66d194c37785a274bb0546e3f908d245440a8c..0000000000000000000000000000000000000000
--- a/tangostationcontrol/tangostationcontrol/toolkit/archiver.py
+++ /dev/null
@@ -1,630 +0,0 @@
-#! /usr/bin/env python3
-# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
-# SPDX-License-Identifier: Apache-2.0
-
-import json
-import logging
-import re
-import time
-from functools import wraps
-
-import pkg_resources
-from tango import DeviceProxy, AttributeProxy, DevState, DevFailed
-from tangostationcontrol.toolkit.archiver_configurator import (
-    get_parameters_from_attribute,
-    get_include_attribute_list,
-    get_exclude_attribute_list,
-    get_global_env_parameters,
-    get_multimember_devices,
-)
-from tangostationcontrol.toolkit.archiver_util import (
-    get_db_config,
-    device_fqdn,
-    attribute_fqdn,
-    get_size_from_datatype,
-    filter_attribute_list,
-)
-
-logger = logging.getLogger()
-
-
-def warn_if_attribute_not_found():
-    """
-    Log a warning if an exception is thrown indicating access to an non-existing attribute
-    was requested, and swallow the exception.
-    """
-
-    def inner(func):
-        @wraps(func)
-        def warn_wrapper(self, attribute_name, *args, **kwargs):
-            try:
-                return func(self, attribute_name, *args, **kwargs)
-            except DevFailed as e:
-                if e.args[0].reason in [
-                    "Attribute not found",
-                    "BadSignalName",
-                    "API_AttrNotFound",
-                ]:
-                    logger.warning(
-                        f"Attribute {attribute_name} not found: {e.args[0].desc}"
-                    )
-                else:
-                    raise
-
-        return warn_wrapper
-
-    return inner
-
-
-def warn_if_device_not_connected():
-    """
-    Log a warning if an exception is thrown indicating access to an non-connected device
-    """
-
-    def inner(func):
-        @wraps(func)
-        def warn_wrapper(self, attribute_name, *args, **kwargs):
-            try:
-                return func(self, attribute_name, *args, **kwargs)
-            except DevFailed as e:
-                if "API_CantConnectToDevice" in str(e):
-                    logger.warning(
-                        f"Attribute {attribute_name} not reachable: {e.args[0].desc}"
-                    )
-                else:
-                    raise
-
-        return warn_wrapper
-
-    return inner
-
-
-class Archiver:
-    """
-    The Archiver class implements the basic operations to perform attributes archiving
-    """
-
-    # Global environment variables set by configuration file
-    GLOBAL_POLLING_TIME = 1000
-    GLOBAL_ARCHIVE_ABS_CHANGE = 1
-    GLOBAL_ARCHIVE_REL_CHANGE = None
-    GLOBAL_ARCHIVE_PERIOD = 10000  # 3600000 (prod)
-    GLOBAL_EVENT_PERIOD = 1000  # 60000 (prod)
-    GLOBAL_STRATEGY = "RUN"
-
-    def __init__(
-        self, cm_name: str = "archiving/hdbppts/confmanager01", context: str = "RUN"
-    ):
-        self.cm_name = cm_name
-        self.cm = DeviceProxy(cm_name)
-        try:
-            if self.cm.state() == DevState.FAULT:
-                raise Exception(
-                    f"Configuration Manager {cm_name} is in FAULT state: {self.cm.status()}"
-                )
-        except Exception as e:
-            raise Exception(
-                f"Connection failed with Configuration Manager {cm_name}"
-            ) from e
-        self.es_list = [es_name for es_name in self.get_subscribers(from_db=False)]
-        self.cm.write_attribute(
-            "Context", context
-        )  # Set default Context Archiving for all the subscribers
-
-    def get_hdbpp_libname(self, device_name: str):
-        """
-        Get the hdbpp library name used by the Configuration Manager or by the EventSubscribers
-        Useful in the case of different DBMS architectures (e.g. MySQL, TimescaleDB)
-        """
-        config = get_db_config(device_name)
-        return config["libname"]
-
-    def get_subscribers(self, from_db: bool = False):
-        """
-        Get the list of Event Subscribers managed by the Configuration Manager.
-        It can be retrieved as a device property (stored in TangoDB) or as a device attribute.
-        Choose from_db=True only if new subscribers are not added dynamically while ConfManager is running.
-        """
-        if from_db:
-            es_list = self.cm.get_property("ArchiverList")["ArchiverList"] or []
-        else:
-            es_list = self.cm.ArchiverList or []
-        return es_list
-
-    def get_next_subscriber(self):
-        """
-        Return the first available Event Subscriber for archiving operations
-        TODO: the choice among subscribers should be done analysing their load
-        """
-        es_list = self.get_subscribers()
-        # Only one subscriber in ConfManager list
-        if len(es_list) == 1:
-            return es_list[0]
-        else:
-            # Choose the best subscriber analysing their load
-            load_dict = {}
-            for es_name in es_list:
-                es = DeviceProxy(es_name)
-                load_dict[es_name] = float(es.AttributeRecordFreq or 0)
-            # Return the subscriber's name with min load
-            min_es = min(load_dict, key=load_dict.get)
-            return min_es
-
-    def get_configuration(self, resource: str = "lofar2_dev") -> dict:
-        """Read an archiver configuration from one of the preinstalled resources in archiver_config."""
-        resource = pkg_resources.resource_stream(
-            __name__, f"archiver_config/{resource}.json"
-        )
-        return json.load(resource)
-
-    def apply_configuration(self, config_dict: dict):
-        """
-        Apply the customized strategy defined by the given archiver configuration.
-        """
-        # Set global development env variables
-        (
-            self.GLOBAL_POLLING_TIME,
-            self.GLOBAL_ARCHIVE_ABS_CHANGE,
-            self.GLOBAL_ARCHIVE_REL_CHANGE,
-            self.GLOBAL_ARCHIVE_PERIOD,
-            self.GLOBAL_EVENT_PERIOD,
-            self.GLOBAL_STRATEGY,
-        ) = get_global_env_parameters(config_dict)
-        # Set devices archiving
-        env_dict = config_dict["devices"]
-        # Check if device has more than one member (domain/family/*)
-        multimember_devices_dict = get_multimember_devices(env_dict)
-        # Merge the two configuration dictionaries
-        extended_env_dict = {**env_dict, **multimember_devices_dict}
-        extended_config_dict = config_dict.copy()  # Copy to preserve original dict
-        extended_config_dict["devices"] = extended_env_dict
-        for device in extended_env_dict:
-            try:
-                # DEV environment -> all attributes are excluded by default
-                # PROD environment -> all attributes are included by default
-                if not device.endswith("*"):
-                    self.configure_device(extended_config_dict, device)
-            except Exception as e:
-                if "API_DeviceNotExported" in str(e):  # ignore if device is offline
-                    logger.warning(f"Device {device} offline")
-                elif "API_CantConnectToDevice" in str(e):
-                    logger.warning(f"Device {device} not found")
-                elif "DB_DeviceNotDefined" in str(e):
-                    logger.warning(f"Device {device} not defined in TangoDB")
-                else:
-                    raise Exception from e
-
-    def configure_device(self, config_dict: dict, device: str):
-        """
-        Procedure that enables the Archiving configuration for a certain device as defined in the config file
-        """
-        # Cleanup the subscriber
-        # self.remove_attributes_by_device(device)
-        # Attributes to be included in archiving stategy with specific parameters
-        include_att_list = get_include_attribute_list(device, config_dict)
-        # Attributes to be excluded from archiving
-        exclude_att_list = get_exclude_attribute_list(device, config_dict)
-        exclude_att_list = [a for a in exclude_att_list if a not in include_att_list]
-        try:
-            for att in include_att_list:
-                # Retrieve specific attribute parameters from config file
-                (
-                    archive_period,
-                    event_period,
-                    abs_change,
-                    rel_change,
-                ) = get_parameters_from_attribute(device, att, config_dict)
-                att_fqname = attribute_fqdn(att)
-                # Add the attribute to the archiver setting either specific or global parameters
-                self.add_attribute_to_archiver(
-                    att_fqname,
-                    self.GLOBAL_POLLING_TIME,
-                    archive_period or self.GLOBAL_ARCHIVE_PERIOD,
-                    self.GLOBAL_STRATEGY,
-                    abs_change or self.GLOBAL_ARCHIVE_ABS_CHANGE,
-                    rel_change or self.GLOBAL_ARCHIVE_REL_CHANGE,
-                )
-            self.add_attributes_by_device(
-                device,
-                self.GLOBAL_ARCHIVE_PERIOD,
-                self.GLOBAL_ARCHIVE_ABS_CHANGE,
-                self.GLOBAL_ARCHIVE_REL_CHANGE,
-                exclude=exclude_att_list,
-            )
-            # Remove attributes by custom configuration if already present
-            # The following cycle is a security check in the special case that an attribute is in the
-            # included list in DEV mode, and in the excluded list in PROD mode
-            for att in exclude_att_list:
-                att_fqname = attribute_fqdn(att)
-                self.remove_attribute_from_archiver(att_fqname)
-        except DevFailed as e:
-            if "already subscribed" in str(e):
-                logger.warning(f"Multiple entries of Attribute {att} in config file")
-            else:
-                raise
-
-    def add_event_subscriber(self, es_name: str = None):
-        """
-        Add an additional Event Subscriber to the Configuration Manager
-        """
-        # If the subscriber name is not defined, generate the name by incrementing the existing one
-        if es_name is None:
-            last_es_name = self.get_subscribers()[-1]
-            last_es_idx = int(last_es_name[-2:])
-            es_name = last_es_name[:-2] + "0" + str(last_es_idx + 1)
-        try:
-            es = DeviceProxy(es_name)
-            if es.state() == DevState.FAULT:
-                raise Exception(f"Event Subscriber {es_name} is in FAULT state")
-            self.cm.ArchiverAdd(device_fqdn(es_name))
-        except DevFailed as e:
-            if e.args[0].reason == "Archiver already present":
-                logger.warning(
-                    f"Event Subscriber {es_name} already present in Configuration Manager"
-                )
-            else:
-                raise
-
-    @warn_if_attribute_not_found()
-    @warn_if_device_not_connected()
-    def add_attribute_to_archiver(
-        self,
-        attribute_name: str,
-        polling_period: int,
-        archive_event_period: int,
-        strategy: str = "RUN",
-        abs_change: int = 1,
-        rel_change: int = None,
-        es_name: str = None,
-    ):
-        """
-        Takes as input the attribute name, polling period (ms), event period (ms) and archiving strategy,
-        and adds the selected attribute to the subscriber's list of archiving attributes.
-        The ConfigurationManager and EventSubscriber devices must be already up and running.
-        The archiving-DBMS must be already properly configured.
-        """
-        attribute_name = attribute_fqdn(attribute_name)
-        try:
-            self.cm.write_attribute("SetAttributeName", attribute_name)
-            self.cm.write_attribute(
-                "SetArchiver", es_name or self.get_next_subscriber()
-            )
-            self.cm.write_attribute("SetStrategy", strategy)
-            self.cm.write_attribute("SetPollingPeriod", polling_period)
-            self.cm.write_attribute("SetPeriodEvent", archive_event_period)
-            if abs_change is not None:
-                self.cm.write_attribute("SetAbsoluteEvent", abs_change)
-            if rel_change is not None:
-                self.cm.write_attribute("SetRelativeEvent", rel_change)
-            self.cm.AttributeAdd()
-            logger.info(f"Attribute {attribute_name} added to archiving list!")
-        except DevFailed as e:
-            if e.args[0].reason == "Already archived" or "already subscribed" in str(e):
-                logger.warning(f"Attribute {attribute_name} already in archiving list!")
-            else:
-                raise
-
-    def add_attributes_by_device(
-        self,
-        device_name,
-        global_archive_period: int = None,
-        global_abs_change: int = 1,
-        global_rel_change: int = None,
-        es_name: str = None,
-        exclude: list = None,
-    ):
-        """
-        Add sequentially all the attributes of the selected device in the event subscriber list, if not already present
-        """
-        if not exclude:
-            """B006 Do not use mutable data structures for argument defaults.
-            They are created during function definition time. All calls to the
-            function reuse this one instance of that data structure,
-            persisting changes between them"""
-            exclude = []
-
-        attrs_list = filter_attribute_list(device_name, exclude)
-        for a in attrs_list:
-            attr_fullname = attribute_fqdn(f"{device_name}/{a}")
-            attr_proxy = AttributeProxy(attr_fullname)
-            if attr_proxy.is_polled() and not self.is_attribute_archived(
-                attr_fullname
-            ):  # if not polled, attribute is also not archived
-                try:
-                    es = DeviceProxy(
-                        es_name or self.get_next_subscriber()
-                    )  # choose an e.s. or get the first one available
-                    polling_period = (
-                        attr_proxy.get_poll_period() or self.prod_polling_time
-                    )
-                    archive_period = global_archive_period or int(
-                        attr_proxy.get_property("archive_period")["archive_period"][0]
-                    )
-                    abs_change = global_abs_change
-                    rel_change = global_rel_change
-                    self.add_attribute_to_archiver(
-                        attr_fullname,
-                        polling_period=polling_period,
-                        archive_event_period=archive_period,
-                        abs_change=abs_change,
-                        rel_change=rel_change,
-                        es_name=es.name(),
-                    )
-                except IndexError as e:
-                    logger.warning(
-                        f"Attribute {attr_fullname} will not be archived because archive event period is not defined!"
-                    )
-                except Exception as e:
-                    raise Exception from e
-            else:
-                logger.warning(
-                    f"Attribute {attr_fullname} will not be archived because polling is set to FALSE!"
-                )
-
-    @warn_if_attribute_not_found()
-    def remove_attribute_from_archiver(self, attribute_name: str):
-        """
-        Stops the data archiving of the attribute passed as input, and remove it from the subscriber's list.
-        """
-        attribute_name = attribute_fqdn(attribute_name)
-        self.cm.AttributeStop(attribute_name)
-        self.cm.AttributeRemove(attribute_name)
-        logger.warning(f"Attribute {attribute_name} removed!")
-
-    @warn_if_attribute_not_found()
-    def remove_attributes_by_device(self, device_name: str, exclude: list = None):
-        """
-        Stops the data archiving of all the attributes of the selected device, and remove them from the
-        subscriber's list
-        """
-        if not exclude:
-            """B006 Do not use mutable data structures for argument defaults.
-            They are created during function definition time. All calls to the
-            function reuse this one instance of that data structure,
-            persisting changes between them"""
-            exclude = []
-        es_list = self.get_subscribers()
-        for es_name in es_list:
-            es = DeviceProxy(es_name)
-            archived_attrs = es.AttributeList or []
-            exclude_list = [attribute_fqdn(a.lower()) for a in exclude]
-            # Search the attributes in the EventSubscriber list from their device name
-            match = re.compile(f".*{device_name.lower()}.*").match
-            attrs_list = [
-                a.lower()
-                for a in list(filter(match, archived_attrs))
-                if a.lower() not in exclude_list
-            ]
-            for a in attrs_list:
-                self.remove_attribute_from_archiver(a)
-
-    def remove_attributes_in_error(self, exclude: list = None, es_name: str = None):
-        """
-        Remove from the subscribers list all the attributes currently in error (not being archived)
-        """
-
-        if not exclude:
-            """B006 Do not use mutable data structures for argument defaults.
-            They are created during function definition time. All calls to the
-            function reuse this one instance of that data structure,
-            persisting changes between them"""
-            exclude = []
-
-        if es_name is not None:
-            es_list = [es_name]
-        else:
-            es_list = self.get_subscribers()
-        for es_name in es_list:
-            es = DeviceProxy(es_name)
-            attributes_nok = es.AttributeNokList or []
-            exclude_list = [attribute_fqdn(a.lower()) for a in exclude]
-            attrs_list = [
-                a.lower() for a in list(attributes_nok) if a.lower() not in exclude_list
-            ]
-            for a in attrs_list:
-                self.remove_attribute_from_archiver(a)
-
-    @warn_if_attribute_not_found()
-    def start_archiving_attribute(self, attribute_name: str):
-        """
-        Starts the archiving of the attribute passed as input.
-        The attribute must be already present in the subscriber's list
-        """
-        attribute_name = attribute_fqdn(attribute_name)
-        self.cm.AttributeStart(attribute_name)
-
-    @warn_if_attribute_not_found()
-    def stop_archiving_attribute(self, attribute_name: str):
-        """
-        Stops the archiving of the attribute passed as input.
-        The attribute must be already present in the subscriber's list
-        """
-        attribute_name = attribute_fqdn(attribute_name)
-        self.cm.AttributeStop(attribute_name)
-
-    def is_attribute_archived(self, attribute_name: str):
-        """
-        Check if an attribute is in the archiving list
-        """
-        attribute_name = attribute_fqdn(attribute_name)
-        attributes = self.cm.AttributeSearch(attribute_name)
-
-        # search returns all matches in which attribute_name is part of the name,
-        # so check whether an exact match is included.
-        return any(attribute_name == a for a in attributes)
-
-    def update_archiving_attribute(
-        self,
-        attribute_name: str,
-        polling_period: int,
-        archive_period: int,
-        strategy: str = "RUN",
-    ):
-        """
-        Update the archiving properties of an attribute already in a subscriber list
-        """
-        attribute_name = attribute_fqdn(attribute_name)
-        self.remove_attribute_from_archiver(attribute_name)
-        time.sleep(3.0)
-        self.add_attribute_to_archiver(
-            attribute_name, polling_period, archive_period, strategy
-        )
-        time.sleep(3.0)
-        self.start_archiving_attribute(attribute_name)
-        logger.info(f"Attribute {attribute_name} successfully updated!")
-
-    def get_subscriber_attributes(self, es_name: str = None):
-        """
-        Return the list of attributes managed by the event subscribers
-        """
-        attrs = []
-        if es_name is not None:
-            es_list = [es_name]
-        else:
-            es_list = self.get_subscribers()
-        for es_name in es_list:
-            es = DeviceProxy(es_name)
-            attrs.extend(list(es.AttributeList or []))
-        return attrs
-
-    def get_subscriber_errors(self, es_name: str = None):
-        """
-        Return a dictionary of the attributes currently in error, defined as AttributeName -> AttributeError
-        """
-        attrs = []
-        errs = []
-        es_list = [es_name] if es_name else self.get_subscribers()
-
-        for es_name in es_list:
-            es = DeviceProxy(es_name)
-            attrs.extend(list(es.AttributeList or []))
-            errs.extend(list(es.AttributeErrorList or []))
-        return {a: e for a, e in zip(attrs, errs) if e}
-
-    def get_attribute_errors(self, attribute_name: str):
-        """
-        Return the error related to the attribute
-        """
-        attribute_name = attribute_fqdn(attribute_name)
-        errs_dict = self.get_subscriber_errors()
-        for e in errs_dict:
-            if attribute_name == e:
-                return errs_dict[e]
-        return None
-
-    def get_subscriber_load(self, use_freq: bool = True, es_name: str = None):
-        """
-        Return the estimated load of an archiver, in frequency of records or number
-        of attributes
-        """
-        es = DeviceProxy(es_name or self.get_next_subscriber())
-        if use_freq:
-            return str(es.AttributeRecordFreq) + (" events/period")
-        else:
-            return len(es.AttributeList or [])
-
-    def get_started_attributes(self, regex: str = "/*", es_name: str = None):
-        """
-        Return a list of the attributes that are being currently archived
-        """
-        es = DeviceProxy(es_name or self.get_next_subscriber())
-        attribute_list = es.AttributeStartedList or []
-        pattern = re.compile(regex)
-        return [a for a in attribute_list if pattern.search(a)]
-
-    def get_attribute_subscriber(self, attribute_name: str):
-        """
-        Given an attribute name, return the event subscriber associated with it
-        """
-        attribute_name = attribute_fqdn(attribute_name)
-        # Check if attribute is archived
-        if self.is_attribute_archived(attribute_name):
-            # If the ConfManager manages more than one subscriber
-            if len(self.get_subscribers()) > 1:
-                for es_name in self.get_subscribers():
-                    # Search the attribute in the subscriber list
-                    for a in list(DeviceProxy(es_name).AttributeList or []):
-                        if attribute_name.lower() == a:
-                            return es_name
-            else:
-                return self.get_next_subscriber()
-        else:
-            logger.warning(f"Attribute {attribute_name} not found!")
-
-    def get_attribute_freq(self, attribute_name: str):
-        """
-        Return the attribute archiving frequency in events/minute
-        """
-        attribute_name = attribute_fqdn(attribute_name)
-        if self.is_attribute_archived(attribute_name):
-            es = DeviceProxy(self.get_attribute_subscriber(attribute_name))
-            freq_dict = {
-                a: r for a, r in zip(es.AttributeList, es.AttributeRecordFreqList)
-            }
-            for f in freq_dict:
-                if attribute_name.lower() == f:
-                    return freq_dict[f]
-        else:
-            logger.warning(f"Attribute {attribute_name} not found!")
-
-    def get_attribute_failures(self, attribute_name: str):
-        """
-        Return the attribute failure archiving frequency in events/minute
-        """
-        attribute_name = attribute_fqdn(attribute_name)
-        if self.is_attribute_archived(attribute_name):
-            es = DeviceProxy(self.get_attribute_subscriber(attribute_name))
-            fail_dict = {
-                a: r for a, r in zip(es.AttributeList, es.AttributeFailureFreqList)
-            }
-            for f in fail_dict:
-                if attribute_name.lower() == f:
-                    return fail_dict[f]
-        else:
-            logger.warning(f"Attribute {attribute_name} not found!")
-
-    def get_maximum_device_load(self, device_name: str):
-        """Compute maximum archiving load (bytes/second) based on device configuration"""
-        load_list = []
-        # Get the list of started attributes (truncated in order to match AttributeInfo names)
-        attributes_started = [
-            str(a).split("/")[-1]
-            for a in self.get_started_attributes(regex=device_name.lower())
-        ]
-        # Get the list of attributes info
-        attributes_info = DeviceProxy(device_name).attribute_list_query()
-        # Filter the archived attributes
-        for attribute_info in attributes_info:
-            if attribute_info.name.lower() in attributes_started:
-                attr_dict = {
-                    "attribute": attribute_info.name.lower(),
-                    "polling_period": AttributeProxy(
-                        device_name + "/" + attribute_info.name
-                    ).get_poll_period(),
-                    "data_type": attribute_info.data_type,
-                    "dim_x": attribute_info.max_dim_x,
-                    "dim_y": attribute_info.max_dim_y,
-                }
-                load_list.append(attr_dict)
-        # Compute the total load
-        polling_load = 0
-        for a in load_list:
-            polling_period = a["polling_period"] / 1000  # in seconds
-            n_bytes = get_size_from_datatype(a["data_type"])
-            x = int(a["dim_x"]) or 1
-            y = int(a["dim_y"]) or 1
-            polling_load = polling_load + ((n_bytes * (x * y)) / polling_period)
-        return polling_load
-
-
-class AttributeFormatException(Exception):
-    """
-    Exception that handles wrong attribute naming
-    """
-
-    def __init__(
-        self,
-        message="Wrong Tango attribute format! Try: domain/family/member/attribute (e.g. STAT/RECV/1/temperature)",
-    ):
-        self.message = message
-        super().__init__(self.message)
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/archiver_base_ts.py b/tangostationcontrol/tangostationcontrol/toolkit/archiver_base_ts.py
deleted file mode 100644
index 3586268aad634375381206a12e3b5b737e12d50c..0000000000000000000000000000000000000000
--- a/tangostationcontrol/tangostationcontrol/toolkit/archiver_base_ts.py
+++ /dev/null
@@ -1,1036 +0,0 @@
-#! /usr/bin/env python3
-# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
-# SPDX-License-Identifier: Apache-2.0
-
-from typing import List
-
-import numpy
-from sqlalchemy import Column, Integer, String
-from sqlalchemy.dialects.postgresql import ARRAY, TIMESTAMP, FLOAT, JSON
-from sqlalchemy.dialects.postgresql.base import BYTEA
-from sqlalchemy.dialects.postgresql.ranges import INT4RANGE, INT8RANGE
-from sqlalchemy.orm import declarative_base
-from sqlalchemy.sql.sqltypes import INTEGER, TEXT, Boolean
-
-# Declarative system used to define classes mapped to relational DB tables
-Base = declarative_base()
-
-
-# ----------------- LOFAR VIEWS ----------------- #
-
-
-class Lofar_Scalar_Attribute(Base):
-    """
-    Abstract Class that represents a Lofar customized Tango Attribute view
-    """
-
-    __abstract__ = True
-    __table_args__ = {"extend_existing": True}
-
-    data_time = Column(TIMESTAMP, primary_key=True)
-    device = Column(String, primary_key=True)
-    name = Column(String, primary_key=True)
-
-    def __repr__(self):
-        return f"<Attribute(device='{self.device}', name='{self.name}', data_time='{self.data_time}',value='{self.value}'>"
-
-
-class Lofar_Scalar_Boolean(Lofar_Scalar_Attribute):
-    __tablename__ = "lofar_scalar_boolean"
-    value = Column(Boolean)
-
-
-class Lofar_Scalar_Double(Lofar_Scalar_Attribute):
-    __tablename__ = "lofar_scalar_double"
-    value = Column(FLOAT)
-
-
-class Lofar_Scalar_Encoded(Lofar_Scalar_Attribute):
-    __tablename__ = "lofar_scalar_encoded"
-    value = Column(BYTEA)
-
-
-class Lofar_Scalar_Enum(Lofar_Scalar_Attribute):
-    __tablename__ = "lofar_scalar_enum"
-    value = Column(INTEGER)
-
-
-class Lofar_Scalar_Float(Lofar_Scalar_Attribute):
-    __tablename__ = "lofar_scalar_float"
-    value = Column(FLOAT)
-
-
-class Lofar_Scalar_Long(Lofar_Scalar_Attribute):
-    __tablename__ = "lofar_scalar_long"
-    value = Column(INT4RANGE)
-
-
-class Lofar_Scalar_Long64(Lofar_Scalar_Attribute):
-    __tablename__ = "lofar_scalar_long64"
-    value = Column(INT8RANGE)
-
-
-class Lofar_Scalar_Short(Lofar_Scalar_Attribute):
-    __tablename__ = "lofar_scalar_short"
-    value = Column(INTEGER)
-
-
-class Lofar_Scalar_State(Lofar_Scalar_Attribute):
-    __tablename__ = "lofar_scalar_state"
-    value = Column(INTEGER)
-
-
-class Lofar_Scalar_String(Lofar_Scalar_Attribute):
-    __tablename__ = "lofar_scalar_string"
-    value = Column(TEXT)
-
-
-class Lofar_Scalar_Uchar(Lofar_Scalar_Attribute):
-    __tablename__ = "lofar_scalar_uchar"
-    value = Column(INTEGER)
-
-
-class Lofar_Scalar_Ulong(Lofar_Scalar_Attribute):
-    __tablename__ = "lofar_scalar_ulong"
-    value = Column(INTEGER)
-
-
-class Lofar_Scalar_Ulong64(Lofar_Scalar_Attribute):
-    __tablename__ = "lofar_scalar_ulong64"
-    value = Column(INTEGER)
-
-
-class Lofar_Scalar_Ushort(Lofar_Scalar_Attribute):
-    __tablename__ = "lofar_scalar_ushort"
-    value = Column(INTEGER)
-
-
-class Lofar_Array_Attribute(Base):
-    """
-    Abstract Class that represents a Lofar customized Tango Attribute view
-    """
-
-    __abstract__ = True
-    __table_args__ = {"extend_existing": True}
-
-    data_time = Column(TIMESTAMP, primary_key=True)
-    device = Column(String, primary_key=True)
-    name = Column(String, primary_key=True)
-    x = Column(INTEGER, primary_key=True)
-
-    def __repr__(self):
-        return f"<Attribute(device='{self.device}', name='{self.name}', data_time='{self.data_time}',index='{self.x}',value='{self.value}'>"
-
-
-class Lofar_Array_Boolean(Lofar_Array_Attribute):
-    __tablename__ = "lofar_array_boolean"
-    value = Column(Boolean)
-
-
-class Lofar_Array_Double(Lofar_Array_Attribute):
-    __tablename__ = "lofar_array_double"
-    value = Column(FLOAT)
-
-
-class Lofar_Array_Encoded(Lofar_Array_Attribute):
-    __tablename__ = "lofar_array_encoded"
-    value = Column(BYTEA)
-
-
-class Lofar_Array_Enum(Lofar_Array_Attribute):
-    __tablename__ = "lofar_array_enum"
-    value = Column(INTEGER)
-
-
-class Lofar_Array_Float(Lofar_Array_Attribute):
-    __tablename__ = "lofar_array_float"
-    value = Column(FLOAT)
-
-
-class Lofar_Array_Long(Lofar_Array_Attribute):
-    __tablename__ = "lofar_array_long"
-    value = Column(INT4RANGE)
-
-
-class Lofar_Array_Long64(Lofar_Array_Attribute):
-    __tablename__ = "lofar_array_long64"
-    value = Column(INT8RANGE)
-
-
-class Lofar_Array_Short(Lofar_Array_Attribute):
-    __tablename__ = "lofar_array_short"
-    value = Column(INTEGER)
-
-
-class Lofar_Array_State(Lofar_Array_Attribute):
-    __tablename__ = "lofar_array_state"
-    value = Column(INTEGER)
-
-
-class Lofar_Array_String(Lofar_Array_Attribute):
-    __tablename__ = "lofar_array_string"
-    value = Column(TEXT)
-
-
-class Lofar_Array_Uchar(Lofar_Array_Attribute):
-    __tablename__ = "lofar_array_uchar"
-    value = Column(INTEGER)
-
-
-class Lofar_Array_Ulong(Lofar_Array_Attribute):
-    __tablename__ = "lofar_array_ulong"
-    value = Column(INTEGER)
-
-
-class Lofar_Array_Ulong64(Lofar_Array_Attribute):
-    __tablename__ = "lofar_array_ulong64"
-    value = Column(INTEGER)
-
-
-class Lofar_Array_Ushort(Lofar_Array_Attribute):
-    __tablename__ = "lofar_array_ushort"
-    value = Column(INTEGER)
-
-
-class Lofar_Image_Attribute(Base):
-    """
-    Abstract Class that represents a Lofar customized Tango Attribute view
-    """
-
-    __abstract__ = True
-    __table_args__ = {"extend_existing": True}
-
-    data_time = Column(TIMESTAMP, primary_key=True)
-    device = Column(String, primary_key=True)
-    name = Column(String, primary_key=True)
-    x = Column(INTEGER, primary_key=True)
-    y = Column(INTEGER, primary_key=True)
-
-    def __repr__(self):
-        return f"<Attribute(device='{self.device}', name='{self.name}', data_time='{self.data_time}',index_x='{self.x}',index_y='{self.y}',value='{self.value}'>"
-
-
-class Lofar_Image_Boolean(Lofar_Image_Attribute):
-    __tablename__ = "lofar_image_boolean"
-    value = Column(Boolean)
-
-
-class Lofar_Image_Double(Lofar_Image_Attribute):
-    __tablename__ = "lofar_image_double"
-    value = Column(FLOAT)
-
-
-class Lofar_Image_Encoded(Lofar_Image_Attribute):
-    __tablename__ = "lofar_image_encoded"
-    value = Column(BYTEA)
-
-
-class Lofar_Image_Enum(Lofar_Image_Attribute):
-    __tablename__ = "lofar_image_enum"
-    value = Column(INTEGER)
-
-
-class Lofar_Image_Float(Lofar_Image_Attribute):
-    __tablename__ = "lofar_image_float"
-    value = Column(FLOAT)
-
-
-class Lofar_Image_Long(Lofar_Image_Attribute):
-    __tablename__ = "lofar_image_long"
-    value = Column(INT4RANGE)
-
-
-class Lofar_Image_Long64(Lofar_Image_Attribute):
-    __tablename__ = "lofar_image_long64"
-    value = Column(INT8RANGE)
-
-
-class Lofar_Image_Short(Lofar_Image_Attribute):
-    __tablename__ = "lofar_image_short"
-    value = Column(INTEGER)
-
-
-class Lofar_Image_State(Lofar_Image_Attribute):
-    __tablename__ = "lofar_image_state"
-    value = Column(INTEGER)
-
-
-class Lofar_Image_String(Lofar_Image_Attribute):
-    __tablename__ = "lofar_image_string"
-    value = Column(TEXT)
-
-
-class Lofar_Image_Uchar(Lofar_Image_Attribute):
-    __tablename__ = "lofar_image_uchar"
-    value = Column(INTEGER)
-
-
-class Lofar_Image_Ulong(Lofar_Image_Attribute):
-    __tablename__ = "lofar_image_ulong"
-    value = Column(INTEGER)
-
-
-class Lofar_Image_Ulong64(Lofar_Image_Attribute):
-    __tablename__ = "lofar_image_ulong64"
-    value = Column(INTEGER)
-
-
-class Lofar_Image_Ushort(Lofar_Image_Attribute):
-    __tablename__ = "lofar_image_ushort"
-    value = Column(INTEGER)
-
-
-# ----------------- ----------------- ----------------- #
-
-
-class Attribute(Base):
-    """
-    Class that represents a Tango Attribute mapped to table 'att_conf'
-    """
-
-    __tablename__ = "att_conf"
-    __table_args__ = {"extend_existing": True}
-
-    att_conf_id = Column(Integer, primary_key=True)
-    att_name = Column(String)
-    att_conf_type_id = Column(Integer)
-    att_conf_format_id = Column(Integer)
-    table_name = Column(String)
-    cs_name = Column(String)
-    domain = Column(String)
-    family = Column(String)
-    member = Column(String)
-    name = Column(String)
-    ttl = Column(Integer)
-
-    def __repr__(self):
-        return f"<Attribute(fullname='{self.att_name}',data_type ='{self.att_conf_type_id}',format='{self.att_conf_format_id}',table_name='{self.table_name}',cs_name ='{self.cs_name}',domain ='{self.domain}',family ='{self.family}',member ='{self.member}',name ='{self.name}'),ttl='{self.ttl}'>"
-
-
-class DataType(Base):
-    """
-    Class that represents a Tango Data Type mapped to table 'att_conf_data_type'
-    """
-
-    __tablename__ = "att_conf_type"
-    __table_args__ = {"extend_existing": True}
-
-    att_conf_type_id = Column(Integer, primary_key=True)
-    type = Column(String)
-
-    def __repr__(self):
-        return f"<DataType(type='{self.type}')>"
-
-
-class Format(Base):
-    """
-    Class that represents a Tango Format mapped to table 'att_conf_format'
-    """
-
-    __tablename__ = "att_conf_format"
-    __table_args__ = {"extend_existing": True}
-
-    att_conf_format_id = Column(Integer, primary_key=True)
-    format = Column(String)
-    format_num = Column(Integer)
-
-    def __repr__(self):
-        return f"<Format(format='{self.format}', format_num='{self.format_num}')>"
-
-
-class Scalar(Base):
-    """
-    Abstract class that represents Super-class of Scalar mapper classes
-    """
-
-    # In the concrete inheritance use case, it is common that the base class is not represented
-    # within the database, only the subclasses. In other words, the base class is abstract.
-    __abstract__ = True
-
-    # Primary key is not defined for tables which store values, but SQLAlchemy requires a mandatory
-    # primary key definition. Anyway, this definition is on Python-side and does not compromise
-    # DBMS architecture
-    att_conf_id = Column(Integer, primary_key=True)
-    data_time = Column(TIMESTAMP, primary_key=True)
-    quality = Column(Integer)
-    att_error_desc_id = Column(Integer)
-    details = Column(JSON)
-
-
-class Scalar_Boolean(Scalar):
-    """
-    Class that represents a Tango Boolean mapped to table 'att_scalar_devboolean'
-    """
-
-    __tablename__ = "att_scalar_devboolean"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(Boolean)
-    value_w = Column(Boolean)
-
-    def __repr__(self):
-        return f"<Scalar_Boolean(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Scalar_Double(Scalar):
-    """
-    Class that represents a Tango Double mapped to table 'att_scalar_devdouble'
-    """
-
-    __tablename__ = "att_scalar_devdouble"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(FLOAT)
-    value_w = Column(FLOAT)
-
-    def __repr__(self):
-        return f"<Scalar_Double(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Scalar_Encoded(Scalar):
-    """
-    Class that represents a Tango Encoded mapped to table 'att_scalar_devencoded'
-    """
-
-    __tablename__ = "att_scalar_devencoded"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(BYTEA)
-    value_w = Column(BYTEA)
-
-    def __repr__(self):
-        return f"<Scalar_Encoded(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Scalar_Enum(Scalar):
-    """
-    Class that represents a Tango Enum mapped to table 'att_scalar_devenum'
-    """
-
-    __tablename__ = "att_scalar_devenum"
-    __table_args__ = {"extend_existing": True}
-    value_r_label = Column(TEXT)
-    value_r = Column(INTEGER)
-    value_w_label = Column(TEXT)
-    value_w = Column(INTEGER)
-
-    def __repr__(self):
-        return f"<Scalar_Enum(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r_label='{self.value_r_label}',value_r='{self.value_r}',value_w_label='{self.value_w_label}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Scalar_Float(Scalar):
-    """
-    Class that represents a Tango Float mapped to table 'att_scalar_devfloat'
-    """
-
-    __tablename__ = "att_scalar_devfloat"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(FLOAT)
-    value_w = Column(FLOAT)
-
-    def __repr__(self):
-        return f"<Scalar_Float(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Scalar_Long(Scalar):
-    """
-    Class that represents a Tango Long mapped to table 'att_scalar_devlong'
-    """
-
-    __tablename__ = "att_scalar_devlong"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(INT4RANGE)
-    value_w = Column(INT4RANGE)
-
-    def __repr__(self):
-        return f"<Scalar_Long(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Scalar_Long64(Scalar):
-    """
-    Class that represents a Tango Long64 mapped to table 'att_scalar_devlong64'
-    """
-
-    __tablename__ = "att_scalar_devlong64"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(INT8RANGE)
-    value_w = Column(INT8RANGE)
-
-    def __repr__(self):
-        return f"<Scalar_Long64(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Scalar_Short(Scalar):
-    """
-    Class that represents a Tango Short mapped to table 'att_scalar_devshort'
-    """
-
-    __tablename__ = "att_scalar_devshort"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(INTEGER)
-    value_w = Column(INTEGER)
-
-    def __repr__(self):
-        return f"<Scalar_Short(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Scalar_State(Scalar):
-    """
-    Class that represents a Tango State mapped to table 'att_scalar_devstate'
-    """
-
-    __tablename__ = "att_scalar_devstate"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(INTEGER)
-    value_w = Column(INTEGER)
-
-    def __repr__(self):
-        return f"<Scalar_State(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Scalar_String(Scalar):
-    """
-    Class that represents a Tango String mapped to table 'att_scalar_devstring'
-    """
-
-    __tablename__ = "att_scalar_devstring"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(TEXT)
-    value_w = Column(TEXT)
-
-    def __repr__(self):
-        return f"<Scalar_String(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Scalar_UChar(Scalar):
-    """
-    Class that represents a Tango UChar mapped to table 'att_scalar_devuchar'
-    """
-
-    __tablename__ = "att_scalar_devuchar"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(INTEGER)
-    value_w = Column(INTEGER)
-
-    def __repr__(self):
-        return f"<Scalar_UChar(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Scalar_ULong(Scalar):
-    """
-    Class that represents a Tango ULong mapped to table 'att_scalar_devulong'
-    """
-
-    __tablename__ = "att_scalar_devulong"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(INTEGER)
-    value_w = Column(INTEGER)
-
-    def __repr__(self):
-        return f"<Scalar_ULong(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Scalar_ULong64(Scalar):
-    """
-    Class that represents a Tango ULong64 mapped to table 'att_scalar_devulong64'
-    """
-
-    __tablename__ = "att_scalar_devulong64"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(INTEGER)
-    value_w = Column(INTEGER)
-
-    def __repr__(self):
-        return f"<Scalar_ULong64(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Scalar_UShort(Scalar):
-    """
-    Class that represents a Tango UShort mapped to table 'att_scalar_devushort'
-    """
-
-    __tablename__ = "att_scalar_devushort"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(INTEGER)
-    value_w = Column(INTEGER)
-
-    def __repr__(self):
-        return f"<Scalar_UShort(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Array(Base):
-    """
-    Abstract class that represents Super-class of Array mapper classes
-    """
-
-    __abstract__ = True
-    # Primary key is not defined for tables which store values, but SQLAlchemy requires a mandatory
-    # primary key definition. Anyway, this definition is on Python-side and does not compromise
-    # DBMS architecture
-    att_conf_id = Column(Integer, primary_key=True)
-    data_time = Column(TIMESTAMP, primary_key=True)
-    quality = Column(Integer)
-    att_error_desc_id = Column(Integer)
-    details = Column(JSON)
-
-
-class Array_Boolean(Array):
-    """
-    Class that represents a Tango Boolean Array mapped to table 'att_array_devboolean'
-    """
-
-    __tablename__ = "att_array_devboolean"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(Boolean))
-    value_w = Column(ARRAY(Boolean))
-
-    def __repr__(self):
-        return f"<Array_Boolean(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Image_Boolean(Array):
-    """
-    Class that represents a Tango Boolean Image mapped to table 'att_image_devboolean'
-    """
-
-    __tablename__ = "att_image_devboolean"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(Boolean))
-    value_w = Column(ARRAY(Boolean))
-
-    def __repr__(self):
-        return f"<Image_Boolean(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Array_Double(Array):
-    """
-    Class that represents a Tango Double Array mapped to table 'att_array_devdouble'
-    """
-
-    __tablename__ = "att_array_devdouble"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(FLOAT))
-    value_w = Column(ARRAY(FLOAT))
-
-    def __repr__(self):
-        return f"<Array_Double(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Image_Double(Array):
-    """
-    Class that represents a Tango Double Image mapped to table 'att_image_devdouble'
-    """
-
-    __tablename__ = "att_image_devdouble"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(FLOAT))
-    value_w = Column(ARRAY(FLOAT))
-
-    def __repr__(self):
-        return f"<Image_Double(att_conf_id='{self.att_conf_id}', data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Array_Encoded(Array):
-    """
-    Class that represents a Tango Encoded Array mapped to table 'att_array_devencoded'
-    """
-
-    __tablename__ = "att_array_devencoded"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(BYTEA))
-    value_w = Column(ARRAY(BYTEA))
-
-    def __repr__(self):
-        return f"<Array_Encoded(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Image_Encoded(Array):
-    """
-    Class that represents a Tango Encoded Array mapped to table 'att_image_devencoded'
-    """
-
-    __tablename__ = "att_image_devencoded"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(BYTEA))
-    value_w = Column(ARRAY(BYTEA))
-
-    def __repr__(self):
-        return f"<Image_Encoded(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Array_Enum(Array):
-    """
-    Class that represents a Tango Enum Array mapped to table 'att_array_devenum'
-    """
-
-    __tablename__ = "att_array_devenum"
-    __table_args__ = {"extend_existing": True}
-    value_r_label = Column(ARRAY(TEXT))
-    value_r = Column(ARRAY(INTEGER))
-    value_w_label = Column(ARRAY(TEXT))
-    value_w = Column(ARRAY(INTEGER))
-
-    def __repr__(self):
-        return f"<Array_Enum(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r_label='{self.value_r_label}',value_r='{self.value_r}',value_w_label='{self.value_w_label}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Image_Enum(Array):
-    """
-    Class that represents a Tango Enum Array mapped to table 'att_image_devenum'
-    """
-
-    __tablename__ = "att_image_devenum"
-    __table_args__ = {"extend_existing": True}
-    value_r_label = Column(ARRAY(TEXT))
-    value_r = Column(ARRAY(INTEGER))
-    value_w_label = Column(ARRAY(TEXT))
-    value_w = Column(ARRAY(INTEGER))
-
-    def __repr__(self):
-        return f"<Image_Enum(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r_label='{self.value_r_label}',value_r='{self.value_r}',value_w_label='{self.value_w_label}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Array_Float(Array):
-    """
-    Class that represents a Tango Float Array mapped to table 'att_array_devfloat'
-    """
-
-    __tablename__ = "att_array_devfloat"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(FLOAT))
-    value_w = Column(ARRAY(FLOAT))
-
-    def __repr__(self):
-        return f"<Array_Float(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Image_Float(Array):
-    """
-    Class that represents a Tango Float Array mapped to table 'att_image_devfloat'
-    """
-
-    __tablename__ = "att_image_devfloat"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(FLOAT))
-    value_w = Column(ARRAY(FLOAT))
-
-    def __repr__(self):
-        return f"<Image_Float(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Array_Long(Array):
-    """
-    Class that represents a Tango Long Array mapped to table 'att_array_devlong'
-    """
-
-    __tablename__ = "att_array_devlong"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(INT4RANGE))
-    value_w = Column(ARRAY(INT4RANGE))
-
-    def __repr__(self):
-        return f"<Array_Long(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Image_Long(Array):
-    """
-    Class that represents a Tango Long Array mapped to table 'att_image_devlong'
-    """
-
-    __tablename__ = "att_image_devlong"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(INT4RANGE))
-    value_w = Column(ARRAY(INT4RANGE))
-
-    def __repr__(self):
-        return f"<Image_Long(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Array_Long64(Array):
-    """
-    Class that represents a Tango Long64 Array mapped to table 'att_array_devlong64'
-    """
-
-    __tablename__ = "att_array_devlong64"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(INT8RANGE))
-    value_w = Column(ARRAY(INT8RANGE))
-
-    def __repr__(self):
-        return f"<Array_Long64(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Image_Long64(Array):
-    """
-    Class that represents a Tango Long64 Array mapped to table 'att_image_devlong64'
-    """
-
-    __tablename__ = "att_image_devlong64"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(INT8RANGE))
-    value_w = Column(ARRAY(INT8RANGE))
-
-    def __repr__(self):
-        return f"<Image_Long64(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Array_Short(Array):
-    """
-    Class that represents a Tango Short Array mapped to table 'att_array_devshort'
-    """
-
-    __tablename__ = "att_array_devshort"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(INTEGER))
-    value_w = Column(ARRAY(INTEGER))
-
-    def __repr__(self):
-        return f"<Array_Short(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Image_Short(Array):
-    """
-    Class that represents a Tango Short Array mapped to table 'att_image_devshort'
-    """
-
-    __tablename__ = "att_image_devshort"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(INTEGER))
-    value_w = Column(ARRAY(INTEGER))
-
-    def __repr__(self):
-        return f"<Image_Short(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Array_State(Array):
-    """
-    Class that represents a Tango State Array mapped to table 'att_array_devstate'
-    """
-
-    __tablename__ = "att_array_devstate"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(INT4RANGE))
-    value_w = Column(ARRAY(INT4RANGE))
-
-    def __repr__(self):
-        return f"<Array_State(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Image_State(Array):
-    """
-    Class that represents a Tango State Array mapped to table 'att_image_devstate'
-    """
-
-    __tablename__ = "att_image_devstate"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(INT4RANGE))
-    value_w = Column(ARRAY(INT4RANGE))
-
-    def __repr__(self):
-        return f"<Image_State(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Array_String(Array):
-    """
-    Class that represents a Tango String Array mapped to table 'att_array_devstring'
-    """
-
-    __tablename__ = "att_array_devstring"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(TEXT))
-    value_w = Column(ARRAY(TEXT))
-
-    def __repr__(self):
-        return f"<Array_String(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Image_String(Array):
-    """
-    Class that represents a Tango String Array mapped to table 'att_image_devstring'
-    """
-
-    __tablename__ = "att_image_devstring"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(TEXT))
-    value_w = Column(ARRAY(TEXT))
-
-    def __repr__(self):
-        return f"<Image_String(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Array_UChar(Array):
-    """
-    Class that represents a Tango UChar Array mapped to table 'att_array_devuchar'
-    """
-
-    __tablename__ = "att_array_devuchar"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(INTEGER))
-    value_w = Column(ARRAY(INTEGER))
-
-    def __repr__(self):
-        return f"<Array_UChar(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Image_UChar(Array):
-    """
-    Class that represents a Tango UChar Array mapped to table 'att_image_devuchar'
-    """
-
-    __tablename__ = "att_image_devuchar"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(INTEGER))
-    value_w = Column(ARRAY(INTEGER))
-
-    def __repr__(self):
-        return f"<Image_UChar(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Array_ULong(Array):
-    """
-    Class that represents a Tango ULong Array mapped to table 'att_array_devulong'
-    """
-
-    __tablename__ = "att_array_devulong"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(INTEGER))
-    value_w = Column(ARRAY(INTEGER))
-
-    def __repr__(self):
-        return f"<Array_ULong(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Image_ULong(Array):
-    """
-    Class that represents a Tango ULong Array mapped to table 'att_image_devulong'
-    """
-
-    __tablename__ = "att_image_devulong"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(INTEGER))
-    value_w = Column(ARRAY(INTEGER))
-
-    def __repr__(self):
-        return f"<Image_ULong(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Array_ULong64(Array):
-    """
-    Class that represents a Tango ULong64 Array mapped to table 'att_array_devulong64'
-    """
-
-    __tablename__ = "att_array_devulong64"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(INTEGER))
-    value_w = Column(ARRAY(INTEGER))
-
-    def __repr__(self):
-        return f"<Array_ULong64(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Image_ULong64(Array):
-    """
-    Class that represents a Tango ULong64 Array mapped to table 'att_image_devulong64'
-    """
-
-    __tablename__ = "att_image_devulong64"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(INTEGER))
-    value_w = Column(ARRAY(INTEGER))
-
-    def __repr__(self):
-        return f"<Image_ULong64(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Array_UShort(Array):
-    """
-    Class that represents a Tango UShort Array mapped to table 'att_array_devushort'
-    """
-
-    __tablename__ = "att_array_devushort"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(INTEGER))
-    value_w = Column(ARRAY(INTEGER))
-
-    def __repr__(self):
-        return f"<Array_UShort(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-class Image_UShort(Array):
-    """
-    Class that represents a Tango UShort Array mapped to table 'att_image_devushort'
-    """
-
-    __tablename__ = "att_image_devushort"
-    __table_args__ = {"extend_existing": True}
-    value_r = Column(ARRAY(INTEGER))
-    value_w = Column(ARRAY(INTEGER))
-
-    def __repr__(self):
-        return f"<Image_UShort(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
-
-
-def get_class_by_tablename(tablename: str):
-    """
-    Returns class reference mapped to a table.
-    """
-    for mapper in Base.registry.mappers:
-        c = mapper.class_
-        classname = c.__name__
-        if not classname.startswith("_"):
-            if hasattr(c, "__tablename__") and c.__tablename__ == tablename:
-                return c
-    return None
-
-
-def get_viewclass_by_tablename(tablename: str):
-    """
-    Returns class reference mapped to a view table name.
-    Example -> if parameter tablename is 'att_array_devdouble',
-               the method retrieves from the string the format ('array') and datatype ('double')
-               and finds the class that matches the relative view table name ('lofar_array_double')
-    """
-    format = tablename.split("_")[1].lower()
-    datatype = tablename.split("_")[2][3:].lower()  # Remove 'dev' prefix
-    for mapper in Base.registry.mappers:
-        c = mapper.class_
-        classname = c.__name__
-        if not classname.startswith("_"):
-            if hasattr(c, "__tablename__"):
-                if format == "scalar" and c.__tablename__ == f"lofar_scalar_{datatype}":
-                    return c
-                elif format == "array" and c.__tablename__ == f"lofar_array_{datatype}":
-                    return c
-                elif format == "image" and c.__tablename__ == f"lofar_image_{datatype}":
-                    return c
-    return None
-
-
-def build_array_from_record(rows: List[Array], dim_x: int):
-    """
-    Converts Array database items in Python lists
-    """
-    matrix = numpy.array([])
-    for i in range(0, dim_x):
-        x = numpy.array(
-            [item for item in rows if item.idx == i]
-        )  # group records by array index
-        if i == 0:
-            matrix = numpy.append(matrix, x)  # append first row
-        else:
-            matrix = numpy.vstack([matrix, x])  # stack vertically
-    result = numpy.transpose(
-        matrix
-    )  # transpose -> each row is a distinct array of value
-    list_result = result.tolist()
-    return list_result
-
-
-def get_values_from_record(data_matrix: List[Array]):
-    """
-    Returns a matrix of values from a matrix of Array records
-    """
-    array_matrix = numpy.matrix(data_matrix)
-    value_matrix = numpy.empty(array_matrix.shape)
-    for index in range(array_matrix.size):  # for each object element
-        value_matrix.itemset(
-            index, array_matrix.item(index).value_r
-        )  # extract the value from object and put in the matrix
-    return value_matrix
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/archiver_config/__init__.py b/tangostationcontrol/tangostationcontrol/toolkit/archiver_config/__init__.py
deleted file mode 100644
index 68ddd5cdc3efaa38e853aef337c08beb99c50c4c..0000000000000000000000000000000000000000
--- a/tangostationcontrol/tangostationcontrol/toolkit/archiver_config/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
-# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/archiver_config/lofar2.json b/tangostationcontrol/tangostationcontrol/toolkit/archiver_config/lofar2.json
deleted file mode 100644
index da8f563b3cfe69d6dff7c985ad6d70ae65618bb6..0000000000000000000000000000000000000000
--- a/tangostationcontrol/tangostationcontrol/toolkit/archiver_config/lofar2.json
+++ /dev/null
@@ -1,89 +0,0 @@
-{
-    "_global_variables": {
-        "development": {
-            "polling_time": "1000",
-            "archive_abs_change": "1",
-            "archive_rel_change": null,    
-            "archive_period": "10000",
-            "event_period": "1000",
-            "strategy": "RUN",
-            "suffixes":[
-                {"attribute": "_error_R",   "archive_period": "10000", "event_period": "1000",  "abs_change": "1", "rel_change": null},
-                {"attribute": "_good_R",    "archive_period": "10000", "event_period": "1000",  "abs_change": "1", "rel_change": null},
-                {"attribute": "_mask_RW",   "archive_period": "10000", "event_period": "1000",  "abs_change": "1", "rel_change": null},
-                {"attribute": "_version_R", "archive_period": "60000", "event_period": "10000", "abs_change": "1", "rel_change": null}
-            ]
-        },
-        "production":{
-            "polling_time": "1000",
-            "archive_abs_change": "1",
-            "archive_rel_change": null,    
-            "archive_period": "3600000",
-            "event_period": "60000",
-            "strategy": "RUN",
-            "infixes":[
-                {"attribute": "/*IOUT*",   "archive_period": "60000",  "event_period": "1000",  "abs_change": null, "rel_change": 5},
-                {"attribute": "/*VOUT*",   "archive_period": "60000",  "event_period": "1000",  "abs_change": null, "rel_change": 5},
-                {"attribute": "/*TEMP*",   "archive_period": "60000",  "event_period": "1000",  "abs_change": 0.5,  "rel_change": 5}
-            ],
-            "suffixes":[
-                {"attribute": "_error_R",   "archive_period": "60000",      "event_period": "1000",  "abs_change": "1", "rel_change": null},
-                {"attribute": "_good_R",    "archive_period": "60000",      "event_period": "1000",  "abs_change": "1", "rel_change": null},
-                {"attribute": "_mask_RW",   "archive_period": "3600000",    "event_period": "1000",  "abs_change": "1", "rel_change": null}
-            ]
-        }     
-    },
-    "devices": {
-        "STAT/Beamlet/1": {
-            "environment": "development",
-            "exclude": [
-                "FPGA_bf_weights_*"
-            ],
-            "include": []
-        },
-        "STAT/Observation/*": {
-            "environment": "development",
-            "exclude": [],
-            "include": []
-        },
-        "STAT/RECV/1": {
-            "environment": "development",
-            "exclude": [],
-            "include": []
-        },
-        "STAT/SDP/1": {
-            "environment": "development",
-            "exclude": [
-                "FPGA_scrap_*",
-                "FPGA_signal_input_*"
-            ],
-            "include": [
-                {"attribute":"FPGA_temp_R",             "archive_period": "10000", "event_period": "1000",   "abs_change": "1", "rel_change": null},
-                {"attribute":"FPGA_firmware_version_R", "archive_period": "60000", "event_period": "10000",  "abs_change": "1", "rel_change": null}
-            ]
-        },
-        "STAT/SST/1": {
-            "environment": "development",
-            "exclude": [
-                "sst_R",
-                "sst_timestamp_R",
-                "integration_interval_R",
-                "subbands_calibrated_R"
-            ],
-            "include": []
-        },
-        "STAT/UNB2/1": {
-            "environment": "development",
-            "exclude": [],
-            "include": []
-        },
-        "STAT/XST/1": {
-            "environment": "development",
-            "exclude": [
-                "xst_*_R",
-                "integration_interval_R"
-            ],
-            "include": []
-        }
-    }
-}
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/archiver_config/lofar2_dev.json b/tangostationcontrol/tangostationcontrol/toolkit/archiver_config/lofar2_dev.json
deleted file mode 100644
index c0802b4b95a1d47b12a1273c718611f31ed58256..0000000000000000000000000000000000000000
--- a/tangostationcontrol/tangostationcontrol/toolkit/archiver_config/lofar2_dev.json
+++ /dev/null
@@ -1,50 +0,0 @@
-{
-    "global": {
-        "polling_time": "1000",
-        "archive_abs_change": "1",
-        "archive_rel_change": null,    
-        "archive_period": "10000",
-        "event_period": "1000",
-        "strategy": "RUN",
-        "infixes":[],
-        "suffixes":[
-            {"attribute": "_error_R",   "archive_period": "10000", "event_period": "1000",  "abs_change": "1", "rel_change": null},
-            {"attribute": "_good_R",    "archive_period": "10000", "event_period": "1000",  "abs_change": "1", "rel_change": null},
-            {"attribute": "_mask_RW",   "archive_period": "10000", "event_period": "1000",  "abs_change": "1", "rel_change": null},
-            {"attribute": "_version_R", "archive_period": "60000", "event_period": "10000", "abs_change": "1", "rel_change": null}
-        ]
-    },
-    "devices": {
-        "STAT/Beamlet/1": {
-            "exclude": ["/*"],
-            "include": []
-        },
-        "STAT/Observation/*": {
-            "exclude": ["/*"],
-            "include": []
-        },
-        "STAT/RECV/1": {
-            "exclude": ["/*"],
-            "include": []
-        },
-        "STAT/SDP/1": {
-            "exclude": ["/*"],
-            "include": [
-                {"attribute":"FPGA_temp_R",             "archive_period": "10000", "event_period": "1000",   "abs_change": "1", "rel_change": null},
-                {"attribute":"FPGA_firmware_version_R", "archive_period": "60000", "event_period": "10000",  "abs_change": "1", "rel_change": null}
-            ]
-        },
-        "STAT/SST/1": {
-            "exclude": ["/*"],
-            "include": []
-        },
-        "STAT/UNB2/1": {
-            "exclude": ["/*"],
-            "include": []
-        },
-        "STAT/XST/1": {
-            "exclude": ["/*"],
-            "include": []
-        }
-    }
-}
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/archiver_config/lofar2_prod.json b/tangostationcontrol/tangostationcontrol/toolkit/archiver_config/lofar2_prod.json
deleted file mode 100644
index f9a7992194b7aad1319679c27f54621011d32b48..0000000000000000000000000000000000000000
--- a/tangostationcontrol/tangostationcontrol/toolkit/archiver_config/lofar2_prod.json
+++ /dev/null
@@ -1,63 +0,0 @@
-{
-    "global": {
-        "polling_time": "1000",
-        "archive_abs_change": "1",
-        "archive_rel_change": null,    
-        "archive_period": "3600000",
-        "event_period": "60000",
-        "strategy": "RUN",
-        "infixes":[
-            {"attribute": "/*IOUT*",   "archive_period": "60000",  "event_period": "1000",  "abs_change": null, "rel_change": "5"},
-            {"attribute": "/*VOUT*",   "archive_period": "60000",  "event_period": "1000",  "abs_change": null, "rel_change": "5"},
-            {"attribute": "/*TEMP*",   "archive_period": "60000",  "event_period": "1000",  "abs_change": "0.5",  "rel_change": "5"}
-        ],
-        "suffixes":[
-            {"attribute": "_error_R",   "archive_period": "60000",      "event_period": "1000",  "abs_change": "1", "rel_change": null},
-            {"attribute": "_good_R",    "archive_period": "60000",      "event_period": "1000",  "abs_change": "1", "rel_change": null},
-            {"attribute": "_mask_RW",   "archive_period": "3600000",    "event_period": "1000",  "abs_change": "1", "rel_change": null}
-        ]
-    },
-    "devices": {
-        "STAT/Beamlet/1": {
-            "exclude": [
-                "FPGA_bf_weights_*"
-            ],
-            "include": ["/*"]
-        },
-        "STAT/Observation/*": {
-            "exclude": [],
-            "include": ["/*"]
-        },
-        "STAT/RECV/1": {
-            "exclude": [],
-            "include": ["/*"]
-        },
-        "STAT/SDP/1": {
-            "exclude": [
-                "FPGA_scrap_*",
-                "FPGA_signal_input_*"
-            ],
-            "include": ["/*"]
-        },
-        "STAT/SST/1": {
-            "exclude": [
-                "sst_R",
-                "sst_timestamp_R",
-                "integration_interval_R",
-                "subbands_calibrated_R"
-            ],
-            "include": ["/*"]
-        },
-        "STAT/UNB2/1": {
-            "exclude": [],
-            "include": ["/*"]
-        },
-        "STAT/XST/1": {
-            "exclude": [
-                "xst_*_R",
-                "integration_interval_R"
-            ],
-            "include": ["/*"]
-        }
-    }
-}
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/archiver_configurator.py b/tangostationcontrol/tangostationcontrol/toolkit/archiver_configurator.py
deleted file mode 100644
index 87bcaf7a5c2f8c42302dd6407519562c443a74c2..0000000000000000000000000000000000000000
--- a/tangostationcontrol/tangostationcontrol/toolkit/archiver_configurator.py
+++ /dev/null
@@ -1,133 +0,0 @@
-#! /usr/bin/env python3
-# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
-# SPDX-License-Identifier: Apache-2.0
-
-"""
-
-Functions related to the managing of the archiver configuration JSON file
-
-"""
-import logging
-import re
-
-from tango import Database
-from tangostationcontrol.toolkit.archiver_util import (
-    get_attributes_from_suffix,
-    get_attributes_from_infix,
-    retrieve_attributes_from_wildcards,
-)
-
-logger = logging.getLogger()
-
-
-def _get_archiving_parameters(attribute: str):
-    """Helper function that returns the following archiving parameters defined in a JSON file:
-    archive period [ms], event period [ms], absolute change, relative change
-    """
-    archive_period = int(attribute["archive_period"])
-    event_period = int(attribute["event_period"])
-    abs_change = attribute["abs_change"] and float(attribute["abs_change"])
-    rel_change = attribute["rel_change"] and int(attribute["rel_change"])
-    return archive_period, event_period, abs_change, rel_change
-
-
-def get_parameters_from_attribute(
-    device_name: str, attribute_name: str, config_dict: dict
-):
-    """
-    Return the archiving parameters  ( see ref.: '_get_archiving_parameters' )
-    defined in the configuration file for a certain attribute
-    """
-    # Search if the attribute parameters are listed inside the device configuration
-    include_node = config_dict["devices"][device_name].get("include", [])
-    if include_node != ["/*"]:
-        for a in include_node:
-            if attribute_name.lower() == a["attribute"].lower():
-                return _get_archiving_parameters(a)
-    # Search if the archiving parameters are listed inside the global infixes attributes
-    infixes = config_dict["global"]["infixes"]
-    for a in infixes:
-        # Match regular expression with attribute name
-        if re.compile(a["attribute"].lower()).search(attribute_name.lower()):
-            return _get_archiving_parameters(a)
-    # Search if the archiving parameters are listed inside the global suffixes attributes
-    suffixes = config_dict["global"]["suffixes"]
-    for a in suffixes:
-        if attribute_name.lower().endswith(a["attribute"].lower()):
-            return _get_archiving_parameters(a)
-    return None, None, None, None
-
-
-def get_include_attribute_list(device: str, config_dict: dict):
-    """
-    Return the list of attributes that must be archived from the JSON configuration file
-    """
-    suffixes = config_dict["global"]["suffixes"]
-    infixes = config_dict["global"]["infixes"]
-    # Attributes to be included in archiving stategy
-    include_att_list = []
-    # Add attributes with defined suffixes
-    include_att_list.extend(get_attributes_from_suffix(device, suffixes))
-    include_att_list.extend(get_attributes_from_infix(device, infixes))
-    # Add attributes explicitly defined in JSON included list
-    include_node = config_dict["devices"][device].get("include", [])
-    if include_node != ["/*"]:
-        for a in include_node:
-            include_att_list.append(a["attribute"])
-    return [f"{device}/{a}".lower() for a in include_att_list]
-
-
-def get_exclude_attribute_list(device: str, config_dict: dict):
-    """
-    Return the list of attributes that must not be archived from the JSON configuration file
-    """
-    exclude_list = config_dict["devices"][device].get(
-        "exclude", []
-    )  # may contain wildcards
-    exclude_att_list = retrieve_attributes_from_wildcards(device, exclude_list)
-    return exclude_att_list
-
-
-def get_global_env_parameters(config_dict: dict):
-    """Return the following archiving parameters defined in the 'global_variable' section of the JSON configuration file:
-    polling time [ms], absolute change, relative change, archive period [ms], event period [ms] and strategy
-    """
-    var_dict = config_dict["global"]
-    # Archiving parameters retrieved from JSON file
-    polling_time = int(var_dict["polling_time"])
-    archive_abs_change = var_dict["archive_abs_change"] and int(
-        var_dict["archive_abs_change"]
-    )
-    archive_rel_change = var_dict["archive_rel_change"] and int(
-        var_dict["archive_rel_change"]
-    )
-    archive_period = int(var_dict["archive_period"])
-    event_period = int(var_dict["event_period"])
-    strategy = var_dict["strategy"]
-    return (
-        polling_time,
-        archive_abs_change,
-        archive_rel_change,
-        archive_period,
-        event_period,
-        strategy,
-    )
-
-
-def get_multimember_devices(env_dict: dict):
-    """Given a regular expression, return multi-member device configuration if they are stored in TangoDB"""
-    # Get a Tango DB reference
-    tangodb = Database()
-    # Scan configuration dictionary for possible multi-member devices
-    matched_devices_dict = {}
-    for device in env_dict:
-        # Search for asterisk in device names
-        if re.match(".*/.*/[*]", device):
-            # Return a list of string with the members of the matched device name (f.e. ['1','2'])
-            members = tangodb.get_device_member(device)
-            # Add to matched devices list the device name in the form 'domain/family/member'
-            retrieved_devices = [f"{device[:-1]}{m}" for m in members]
-            # Append device-names and relative configuration to dictionary
-            for d in retrieved_devices:
-                matched_devices_dict[d] = env_dict[device]
-    return matched_devices_dict
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/archiver_util.py b/tangostationcontrol/tangostationcontrol/toolkit/archiver_util.py
deleted file mode 100644
index 3209ed88c99ca5d81d5f8d31ba057647c63924b4..0000000000000000000000000000000000000000
--- a/tangostationcontrol/tangostationcontrol/toolkit/archiver_util.py
+++ /dev/null
@@ -1,211 +0,0 @@
-#! /usr/bin/env python3
-# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
-# SPDX-License-Identifier: Apache-2.0
-
-"""
-   Utility functions for the Archiver functionality.
-"""
-
-import os
-import re
-
-from tango import DeviceProxy, CmdArgType
-
-"""
-A dictionary whose keys are the Tango datatypes mapping, and the values are the relative byte size
-See reference https://tango-controls.readthedocs.io/en/latest/development/advanced/reference.html#tango-data-type
-and https://www.tutorialspoint.com/cplusplus/cpp_data_types.htm
-TODO: manage String attributes
-"""
-DATATYPES_SIZE_DICT = {
-    CmdArgType.DevBoolean: 1,
-    CmdArgType.DevShort: 2,
-    CmdArgType.DevLong: 8,
-    CmdArgType.DevFloat: 4,
-    CmdArgType.DevDouble: 8,
-    CmdArgType.DevUShort: 2,
-    CmdArgType.DevULong: 8,
-    CmdArgType.DevString: 20,
-    CmdArgType.DevVarCharArray: None,
-    CmdArgType.DevVarShortArray: None,
-    CmdArgType.DevVarLongArray: None,
-    CmdArgType.DevVarFloatArray: None,
-    CmdArgType.DevVarDoubleArray: None,
-    CmdArgType.DevVarUShortArray: None,
-    CmdArgType.DevVarULongArray: None,
-    CmdArgType.DevVarStringArray: None,
-    CmdArgType.DevVarLongStringArray: None,
-    CmdArgType.DevVarDoubleStringArray: None,
-    CmdArgType.DevState: 3,
-    CmdArgType.ConstDevString: None,
-    CmdArgType.DevVarBooleanArray: None,
-    CmdArgType.DevUChar: 1,
-    CmdArgType.DevLong64: 8,
-    CmdArgType.DevULong64: 8,
-    CmdArgType.DevVarLong64Array: None,
-    CmdArgType.DevVarULong64Array: None,
-    CmdArgType.DevInt: 4,
-    CmdArgType.DevEncoded: None,
-    CmdArgType.DevEnum: None,
-    CmdArgType.DevPipeBlob: None,
-}
-
-TANGO_HOST = os.environ.get("TANGO_HOST", None)
-
-
-def get_db_config(device_name: str) -> dict:
-    """
-    Retrieve the DB credentials from the Tango properties of Configuration Manager or EventSubscribers
-    """
-    device = DeviceProxy(device_name)
-    # example LibConfiguration property value:
-    # ['connect_string= user=postgres password=password host=archiver-timescale port=5432 dbname=hdb', 'host=archiver-timescale', 'libname=libhdb++timescale.so', 'dbname=hdb', 'port=5432', 'user=postgres', 'password=password']
-    config_strs = device.get_property("LibConfiguration")["LibConfiguration"]
-
-    config = dict(config_str.split("=", 1) for config_str in config_strs)
-    return config
-
-
-def get_attribute_from_fqdn(attribute_name: str):
-    """
-    For some operations Tango attribute must be transformed from the form 'tango://db:port/domain/family/name/attribute'
-    to canonical 'domain/family/name/attribute'
-    """
-    if attribute_name.startswith("tango://"):
-        return "/".join(attribute_name.split("/")[3:])
-
-    if len(attribute_name.split("/")) != 4:
-        raise ValueError(
-            f"Expected attribute of format 'domain/family/name/attribute', got {attribute_name}"
-        )
-
-    return attribute_name
-
-
-def device_fqdn(device_name: str, tango_host: str = TANGO_HOST):
-    """
-    For some operations Tango devices must be transformed from the form 'domain/family/name'
-    to 'tango://db:port/domain/family/name'
-    """
-    if device_name.startswith("tango://"):
-        return device_name.lower()
-
-    if len(device_name.split("/")) != 3:
-        raise ValueError(
-            f"Expected device name of format 'domain/family/name', got {device_name}"
-        )
-
-    return f"tango://{tango_host}/{device_name}".lower()
-
-
-def attribute_fqdn(attribute_name: str, tango_host: str = TANGO_HOST):
-    """
-    For some operations Tango devices must be transformed from the form 'domain/family/name/attribute'
-    to 'tango://db:port/domain/family/name/attribute'
-    """
-    if attribute_name.startswith("tango://"):
-        return attribute_name.lower()
-
-    if len(attribute_name.split("/")) != 4:
-        raise ValueError(
-            f"Expected attribute name of format 'domain/family/name/attribute', got {attribute_name}"
-        )
-
-    return f"tango://{tango_host}/{attribute_name}".lower()
-
-
-def split_tango_name(tango_fqname: str, tango_type: str):
-    """
-    Helper function to split device or attribute Tango full qualified domain names
-    into its components
-    """
-    if tango_type.lower() == "device":
-        try:
-            domain, family, member = tango_fqname.split("/")
-            return domain, family, member
-        except ValueError as e:
-            raise ValueError(
-                f"Could not parse device name {tango_fqname}. Please provide FQDN, e.g. STAT/Device/1"
-            ) from e
-    elif tango_type.lower() == "attribute":
-        try:
-            domain, family, member, name = tango_fqname.split("/")
-            return domain, family, member, name
-        except ValueError as e:
-            raise ValueError(
-                f"Could not parse attribute name {tango_fqname}. Please provide FQDN, e.g. STAT/Device/1/Attribute"
-            ) from e
-    else:
-        raise ValueError(
-            f"Invalid value: {tango_type}. Please provide 'device' or 'attribute'."
-        )
-
-
-def get_attributes_from_suffix(device_name: str, suffixes: list):
-    """
-    Return a list of device attributes whose suffix is present in the input suffixes list
-    """
-    device = DeviceProxy(device_name)
-    attribute_list = device.get_attribute_list()
-    result = []
-    for s in suffixes:
-        att_name = s["attribute"]
-        # Search suffix substring in the device attribute list
-        result.extend(
-            [a for a in attribute_list if a.lower().endswith(att_name.lower())]
-        )
-    return result
-
-
-def get_attributes_from_infix(device_name: str, infixes: list):
-    """
-    Return a list of device attributes whose infix is present in the input infixes list
-    """
-    device = DeviceProxy(device_name)
-    attribute_list = device.get_attribute_list()
-    result = []
-    for inf in infixes:
-        att_name = inf["attribute"]
-        # Search infix substring in the device attribute list
-        result.extend(
-            [
-                a
-                for a in attribute_list
-                if re.compile(att_name.lower()).search(a.lower())
-            ]
-        )
-    return result
-
-
-def retrieve_attributes_from_wildcards(device_name: str, matching_list: list):
-    """
-    Return a list of device attibutes based on given wildcards and/or attribute names
-    """
-    device = DeviceProxy(device_name)
-    attribute_list = device.get_attribute_list()
-    matched_list = []
-    for m in matching_list:
-        pattern = re.compile(m)
-        for a in attribute_list:
-            if pattern.search(a):
-                matched_list.append(f"{device_name}/{a}".lower())
-    return matched_list
-
-
-def get_size_from_datatype(datatype: int) -> int:
-    """
-    Return the number of bytes for a given Tango datatype
-    """
-    try:
-        return DATATYPES_SIZE_DICT[datatype]
-    except IndexError:
-        return 1
-
-
-def filter_attribute_list(device_name: str, exclude: list) -> list:
-    """
-    Filter out the attributes in exclude-list
-    """
-    device_attrs_list = DeviceProxy(device_name).get_attribute_list()
-    # Filter out excluded attributes
-    return [a for a in device_attrs_list if f"{device_name}/{a}".lower() not in exclude]
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/retriever.py b/tangostationcontrol/tangostationcontrol/toolkit/retriever.py
deleted file mode 100644
index 16bf752fbc096a566c73323635257b233ae035f5..0000000000000000000000000000000000000000
--- a/tangostationcontrol/tangostationcontrol/toolkit/retriever.py
+++ /dev/null
@@ -1,330 +0,0 @@
-#! /usr/bin/env python3
-# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
-# SPDX-License-Identifier: Apache-2.0
-
-import importlib
-from abc import ABC, abstractmethod
-from datetime import datetime, timedelta
-
-from sqlalchemy import create_engine, and_
-from sqlalchemy.orm import sessionmaker
-from sqlalchemy.orm.exc import NoResultFound
-from tangostationcontrol.toolkit.archiver_util import get_db_config, split_tango_name
-
-
-class Retriever(ABC):
-    """
-    The Retriever abstract class implements retrieve operations on a given DBMS
-    """
-
-    def __init__(self):
-        self.session = self.connect_to_archiving_db()
-        self.ab = self.set_archiver_base()
-
-    def create_session(self, creds):
-        """
-        Returns a session to a DBMS using the given credentials.
-        """
-        libname = creds["libname"]
-        user = creds["user"]
-        password = creds["password"]
-        host = creds["host"]
-        port = creds["port"]
-        dbname = creds["dbname"]
-
-        connection_string = f"{libname}://{user}:{password}@{host}:{port}/{dbname}"
-        engine = create_engine(connection_string)
-        Session = sessionmaker(bind=engine)
-        return Session
-
-    @abstractmethod
-    def set_archiver_base(self):
-        return
-
-    @abstractmethod
-    def connect_to_archiving_db(self):
-        return
-
-    def get_all_archived_attributes(self):
-        """
-        Returns a list of the archived attributes in the DB.
-        """
-        attrs = (
-            self.session.query(self.ab.Attribute)
-            .order_by(self.ab.Attribute.att_conf_id)
-            .all()
-        )
-        # Returns the representation as set in __repr__ method of the mapper class
-        return attrs
-
-    def get_archived_attributes_by_device(self, device_fqname: str):
-        """
-        Takes as input the fully-qualified name of a device and returns a list of its archived attributes
-        """
-        domain, family, member = split_tango_name(device_fqname, "device")
-        attrs = (
-            self.session.query(self.ab.Attribute)
-            .filter(
-                and_(
-                    self.ab.Attribute.domain == domain,
-                    self.ab.Attribute.family == family,
-                    self.ab.Attribute.member == member,
-                )
-            )
-            .all()
-        )
-        # Returns the representation as set in __repr__ method of the mapper class
-        return attrs
-
-    def get_attribute_id(self, attribute_fqname: str):
-        """
-        Takes as input the fully-qualified name of an attribute and returns its id.
-        """
-        domain, family, member, name = split_tango_name(attribute_fqname, "attribute")
-        try:
-            result = (
-                self.session.query(self.ab.Attribute.att_conf_id)
-                .filter(
-                    and_(
-                        self.ab.Attribute.domain == domain,
-                        self.ab.Attribute.family == family,
-                        self.ab.Attribute.member == member,
-                        self.ab.Attribute.name == name,
-                    )
-                )
-                .one()
-            )
-            return result[0]
-        except (TypeError, NoResultFound) as e:
-            raise ValueError(f"Attribute {attribute_fqname} not found!") from e
-
-    @abstractmethod
-    def get_attribute_datatype(self, attribute_fqname: str):
-        return
-
-    def get_attribute_value_by_hours(
-        self, attribute_fqname: str, hours: float, tablename: str
-    ):
-        """
-        Takes as input the attribute fully-qualified name and the number of past hours since the actual time
-        (e.g. hours=1 retrieves values in the last hour, hours=8.5 retrieves values in the last eight hours and half).
-        Returns a list of timestamps and a list of values
-        """
-        attr_id = self.get_attribute_id(attribute_fqname)
-        # Retrieves the class that maps the DB table given the tablename
-        base_class = self.ab.get_class_by_tablename(tablename)
-        # Retrieves the timestamp
-        time_now = datetime.now()
-        time_delta = time_now - timedelta(hours=hours)
-        # Converts the timestamps in the right format for the query
-        time_now_db = str(time_now.strftime("%Y-%m-%d %X"))
-        time_delta_db = str(time_delta.strftime("%Y-%m-%d %X"))
-        try:
-            result = (
-                self.session.query(base_class)
-                .join(
-                    self.ab.Attribute,
-                    self.ab.Attribute.att_conf_id == base_class.att_conf_id,
-                )
-                .filter(
-                    and_(
-                        self.ab.Attribute.att_conf_id == attr_id,
-                        base_class.quality == 0,
-                        base_class.data_time >= time_delta_db,
-                        base_class.data_time <= time_now_db,
-                    )
-                )
-                .order_by(base_class.data_time)
-                .all()
-            )
-        except (AttributeError, TypeError, NoResultFound) as e:
-            raise ValueError(f"Attribute {attribute_fqname} not found!") from e
-        return result
-
-    def get_attribute_value_by_interval(
-        self,
-        attribute_fqname: str,
-        start_time: datetime,
-        stop_time: datetime,
-        tablename: str,
-    ):
-        """
-        Takes as input the attribute name and a certain starting and ending point-time.
-        The datetime format is pretty flexible (e.g. "YYYY-MM-dd hh:mm:ss").
-        Returns a list of timestamps and a list of values
-        """
-        attr_id = self.get_attribute_id(attribute_fqname)
-        # Retrieves the class that maps the DB table given the tablename
-        base_class = self.ab.get_class_by_tablename(tablename)
-        try:
-            result = (
-                self.session.query(base_class)
-                .join(
-                    self.ab.Attribute,
-                    self.ab.Attribute.att_conf_id == base_class.att_conf_id,
-                )
-                .filter(
-                    and_(
-                        self.ab.Attribute.att_conf_id == attr_id,
-                        base_class.quality == 0,
-                        base_class.data_time >= str(start_time),
-                        base_class.data_time <= str(stop_time),
-                    )
-                )
-                .order_by(base_class.data_time)
-                .all()
-            )
-        except (AttributeError, TypeError, NoResultFound) as e:
-            raise ValueError(f"Attribute {attribute_fqname} not found!") from e
-        return result
-
-
-class RetrieverTimescale(Retriever):
-    def __init__(self, cm_name: str = "archiving/hdbppts/confmanager01"):
-        self.cm_name = cm_name
-
-        super().__init__()
-
-    def connect_to_archiving_db(self):
-        """
-        Returns a session to a TimescaleDB using default credentials.
-        """
-        creds = get_db_config(self.cm_name)
-
-        # Set sqlalchemy library connection
-        if creds["host"] == "archiver-timescale":
-            creds["libname"] = "postgresql+psycopg2"
-        else:
-            raise ValueError(
-                f"Invalid hostname: {creds['host']}, we only support 'archiver-timescale'"
-            )
-
-        Session = self.create_session(creds)
-        return Session()
-
-    def set_archiver_base(self):
-        """
-        Sets the right mapper class following the DBMS connection
-        """
-        return importlib.import_module(".archiver_base_ts", package=__package__)
-
-    def get_attribute_datatype(self, attribute_fqname: str):
-        """
-        Takes as input the fully-qualified name of an attribute and returns its Data-Type.
-        Data Type name indicates the type (e.g. string, int, ...) and the read/write property. The name is used
-        as DB table name suffix in which values are stored.
-        """
-        domain, family, member, name = split_tango_name(attribute_fqname, "attribute")
-        try:
-            result = (
-                self.session.query(self.ab.DataType.type)
-                .join(
-                    self.ab.Attribute,
-                    self.ab.Attribute.att_conf_type_id
-                    == self.ab.DataType.att_conf_type_id,
-                )
-                .filter(
-                    and_(
-                        self.ab.Attribute.domain == domain,
-                        self.ab.Attribute.family == family,
-                        self.ab.Attribute.member == member,
-                        self.ab.Attribute.name == name,
-                    )
-                )
-                .one()
-            )
-            return result[0]
-        except (AttributeError, TypeError, NoResultFound) as e:
-            raise ValueError(f"Attribute {attribute_fqname} not found!") from e
-
-    def get_attribute_format(self, attribute_fqname: str):
-        """
-        Takes as input the fully-qualified name of an attribute and returns its format.
-        Formats are basically three: Scalar, Spectrum and Image.
-        * Works only for POSTGRESQL *
-        """
-        domain, family, member, name = split_tango_name(attribute_fqname, "attribute")
-        try:
-            result = (
-                self.session.query(self.ab.Format.format)
-                .join(
-                    self.ab.Attribute,
-                    self.ab.Attribute.att_conf_format_id
-                    == self.ab.Format.att_conf_format_id,
-                )
-                .filter(
-                    and_(
-                        self.ab.Attribute.domain == domain,
-                        self.ab.Attribute.family == family,
-                        self.ab.Attribute.member == member,
-                        self.ab.Attribute.name == name,
-                    )
-                )
-                .one()
-            )
-            return result[0]
-        except (AttributeError, TypeError, NoResultFound) as e:
-            raise ValueError(f"Attribute {attribute_fqname} not found!") from e
-
-    def get_attribute_tablename(self, attribute_fqname: str):
-        """
-        Takes as input the fully-qualified name of an attribute and returns the tablename where it is stored.
-        * Works only for POSTGRESQL *
-        """
-        domain, family, member, name = split_tango_name(attribute_fqname, "attribute")
-        try:
-            result = (
-                self.session.query(self.ab.Attribute.table_name)
-                .filter(
-                    and_(
-                        self.ab.Attribute.domain == domain,
-                        self.ab.Attribute.family == family,
-                        self.ab.Attribute.member == member,
-                        self.ab.Attribute.name == name,
-                    )
-                )
-                .one()
-            )
-            return result[0]
-        except (AttributeError, TypeError, NoResultFound) as e:
-            raise ValueError(f"Attribute {attribute_fqname} not found!") from e
-
-    def get_attribute_value_by_hours(self, attribute_fqname: str, hours: float = 1.0):
-        """
-        Takes as input the attribute fully-qualified name and the number of past hours since the actual time
-        (e.g. hours=1 retrieves values in the last hour, hours=8.5 retrieves values in the last eight hours and half).
-        Returns a list of timestamps and a list of values
-        """
-        tablename = self.get_attribute_tablename(attribute_fqname)
-        return super().get_attribute_value_by_hours(attribute_fqname, hours, tablename)
-
-    def get_attribute_value_by_interval(
-        self, attribute_fqname: str, start_time: datetime, stop_time: datetime
-    ):
-        """
-        Takes as input the attribute name and a certain starting and ending point-time.
-        The datetime format is pretty flexible (e.g. "YYYY-MM-dd hh:mm:ss").
-        Returns a list of timestamps and a list of values
-        """
-        tablename = self.get_attribute_tablename(attribute_fqname)
-        return super().get_attribute_value_by_interval(
-            attribute_fqname, start_time, stop_time, tablename
-        )
-
-    def get_lofar_attribute(self, attribute_fqname: str):
-        """
-        Takes as input the attribute fully-qualified name and queries the customized lofar attribute views
-        Returns a list of rows containing device name, attribute name, timestamp and value
-        """
-        # Retrieves the attribute tablename
-        tablename = self.get_attribute_tablename(attribute_fqname)
-        # Retrieves the class that maps the DB table given the tablename
-        base_class = self.ab.get_viewclass_by_tablename(tablename)
-        domain, family, member, name = split_tango_name(attribute_fqname, "attribute")
-        try:
-            result = (
-                self.session.query(base_class).filter(base_class.name == name).all()
-            )
-        except (AttributeError, TypeError, NoResultFound) as e:
-            raise ValueError(f"Attribute {attribute_fqname} not found!") from e
-        return result
diff --git a/tangostationcontrol/test/toolkit/test_archiver_config_file.py b/tangostationcontrol/test/toolkit/test_archiver_config_file.py
deleted file mode 100644
index 34b8b8d548df6f492a171b76226c98f38e9a52f6..0000000000000000000000000000000000000000
--- a/tangostationcontrol/test/toolkit/test_archiver_config_file.py
+++ /dev/null
@@ -1,78 +0,0 @@
-# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
-# SPDX-License-Identifier: Apache-2.0
-
-import json
-
-import pkg_resources
-from test import base
-from tangostationcontrol.toolkit.archiver_configurator import (
-    get_global_env_parameters,
-    get_parameters_from_attribute,
-)
-
-
-class TestArchiverConfigFile(base.TestCase):
-    dev_config_dict = json.load(
-        pkg_resources.resource_stream(
-            "tangostationcontrol.toolkit", f"archiver_config/lofar2_dev.json"
-        )
-    )
-    prod_config_dict = json.load(
-        pkg_resources.resource_stream(
-            "tangostationcontrol.toolkit", f"archiver_config/lofar2_prod.json"
-        )
-    )
-
-    def test_separate_config_files(self):
-        self.assertIsNotNone(self.dev_config_dict)
-        self.assertIsNotNone(self.prod_config_dict)
-
-    def test_get_global_variables(self):
-        config_dicts = [self.dev_config_dict, self.prod_config_dict]
-        for d in config_dicts:
-            (
-                polling_time,
-                archive_abs_change,
-                archive_rel_change,
-                archive_period,
-                event_period,
-                strategy,
-            ) = get_global_env_parameters(d)
-            self.assertEqual(type(polling_time), int)
-            self.assertEqual(type(archive_abs_change), int)
-            self.assertEqual(f"{type(archive_rel_change)}", f"<class 'NoneType'>")
-            self.assertEqual(type(archive_period), int)
-            self.assertEqual(type(event_period), int)
-            self.assertEqual(type(strategy), str)
-
-    def test_get_parameters_from_infixes_list(self):
-        device_name = "STAT/RECV/1"
-        attribute_name = "RCU_TEMP_R"
-        (
-            archive_period,
-            event_period,
-            abs_change,
-            rel_change,
-        ) = get_parameters_from_attribute(
-            device_name, attribute_name, self.prod_config_dict
-        )
-        self.assertEqual(archive_period, 60000)
-        self.assertEqual(event_period, 1000)
-        self.assertEqual(abs_change, 0.5)
-        self.assertEqual(rel_change, 5.0)
-
-    def test_get_parameters_from_suffixes_list(self):
-        device_name = "STAT/RECV/1"
-        attribute_name = "RECVTR_I2C_error_R"
-        (
-            archive_period,
-            event_period,
-            abs_change,
-            rel_change,
-        ) = get_parameters_from_attribute(
-            device_name, attribute_name, self.prod_config_dict
-        )
-        self.assertEqual(archive_period, 60000)
-        self.assertEqual(event_period, 1000)
-        self.assertEqual(abs_change, 1)
-        self.assertEqual(rel_change, None)
diff --git a/tangostationcontrol/test/toolkit/test_archiver_configurator.py b/tangostationcontrol/test/toolkit/test_archiver_configurator.py
deleted file mode 100644
index f9c9d7b6223c61b264821325d49f02eb9829707c..0000000000000000000000000000000000000000
--- a/tangostationcontrol/test/toolkit/test_archiver_configurator.py
+++ /dev/null
@@ -1,86 +0,0 @@
-# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
-# SPDX-License-Identifier: Apache-2.0
-
-import json
-
-import pkg_resources
-from test import base
-from tangostationcontrol.toolkit.archiver_configurator import (
-    get_parameters_from_attribute,
-    get_global_env_parameters,
-)
-
-
-class TestArchiverConfigurator(base.TestCase):
-    DEVICE_NAME = "STAT/RECV/1"
-    ATTRIBUTE_NAME = "ant_mask_rw"
-    PROD_CONFIG_DICT = json.load(
-        pkg_resources.resource_stream(
-            "tangostationcontrol.toolkit", f"archiver_config/lofar2_prod.json"
-        )
-    )
-    DEV_CONFIG_DICT = json.load(
-        pkg_resources.resource_stream(
-            "tangostationcontrol.toolkit", f"archiver_config/lofar2_dev.json"
-        )
-    )
-    DEV_SUFFIXES = DEV_CONFIG_DICT["global"]["suffixes"]
-    PROD_SUFFIXES = PROD_CONFIG_DICT["global"]["suffixes"]
-    PROD_INFIXES = PROD_CONFIG_DICT["global"]["infixes"]
-
-    def test_get_parameters_from_attribute(self):
-        """Test if the attribute archiving parameters are correctly retrieved from the JSON config file"""
-        self.assertIsNotNone(self.DEV_CONFIG_DICT)
-        (
-            archive_period,
-            event_period,
-            abs_change,
-            rel_change,
-        ) = get_parameters_from_attribute(
-            self.DEVICE_NAME, self.ATTRIBUTE_NAME, self.DEV_CONFIG_DICT
-        )
-        self.assertEqual(archive_period, int(self.DEV_SUFFIXES[2]["archive_period"]))
-        self.assertEqual(event_period, int(self.DEV_SUFFIXES[2]["event_period"]))
-        self.assertEqual(abs_change, float(self.DEV_SUFFIXES[2]["abs_change"]))
-        self.assertEqual(
-            rel_change,
-            self.DEV_SUFFIXES[2]["rel_change"]
-            and int(self.DEV_SUFFIXES[2]["rel_change"]),
-        )
-
-        """Test if the attribute archiving parameters are correctly retrieved from the infixes list (production environment)"""
-        attribute_name = "rcu_temp_r"  # 'TEMP' is in the infixes list
-        (
-            archive_period,
-            event_period,
-            abs_change,
-            rel_change,
-        ) = get_parameters_from_attribute(
-            self.DEVICE_NAME, attribute_name, self.PROD_CONFIG_DICT
-        )
-        self.assertEqual(archive_period, int(self.PROD_INFIXES[2]["archive_period"]))
-        self.assertEqual(event_period, int(self.PROD_INFIXES[2]["event_period"]))
-        self.assertEqual(abs_change, float(self.PROD_INFIXES[2]["abs_change"]))
-        self.assertEqual(
-            rel_change,
-            self.PROD_INFIXES[2]["rel_change"]
-            and int(self.PROD_INFIXES[2]["rel_change"]),
-        )
-
-    def test_get_global_env_parameters(self):
-        """Test if the include attribute list is correctly retrieved from the JSON config file"""
-        self.assertIsNotNone(self.PROD_CONFIG_DICT)
-        (
-            polling_time,
-            archive_abs_change,
-            archive_rel_change,
-            archive_period,
-            event_period,
-            strategy,
-        ) = get_global_env_parameters(self.PROD_CONFIG_DICT)
-        self.assertEqual(type(polling_time), int)
-        self.assertEqual(type(archive_abs_change), int)
-        self.assertEqual(f"{type(archive_rel_change)}", f"<class 'NoneType'>")
-        self.assertEqual(type(archive_period), int)
-        self.assertEqual(type(event_period), int)
-        self.assertEqual(type(strategy), str)
diff --git a/tangostationcontrol/test/toolkit/test_archiver_util.py b/tangostationcontrol/test/toolkit/test_archiver_util.py
deleted file mode 100644
index 587ee73ae15d58b7cda42c0a8b95c5ac39872894..0000000000000000000000000000000000000000
--- a/tangostationcontrol/test/toolkit/test_archiver_util.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
-# SPDX-License-Identifier: Apache-2.0
-
-from test import base
-from tangostationcontrol.toolkit.archiver_util import (
-    get_attribute_from_fqdn,
-    split_tango_name,
-    device_fqdn,
-    attribute_fqdn,
-    get_size_from_datatype,
-)
-
-
-class TestArchiverUtil(base.TestCase):
-    DEVICE_NAME = "STAT/RECV/1"
-    ATTRIBUTE_NAME = "ant_mask_rw"
-
-    def test_get_attribute_from_fqdn(self):
-        """Test if a Tango attribute name is correctly retrieved from a Tango FQDN"""
-        fqdn = f"tango://databaseds:10000/{self.DEVICE_NAME}/{self.ATTRIBUTE_NAME}"
-        self.assertEqual("STAT/RECV/1/ant_mask_rw", get_attribute_from_fqdn(fqdn))
-
-    def test_device_fqdn(self):
-        """Test if a device name is correctly converted in a Tango FQDN"""
-        self.assertEqual(
-            f"tango://databaseds:10000/{self.DEVICE_NAME}".lower(),
-            device_fqdn(self.DEVICE_NAME, "databaseds:10000"),
-        )
-
-    def test_attribute_fqdn(self):
-        """Test if an attribute name is correctly converted in a Tango FQDN"""
-        self.assertEqual(
-            f"tango://databaseds:10000/{self.DEVICE_NAME}/{self.ATTRIBUTE_NAME}".lower(),
-            attribute_fqdn(
-                f"{self.DEVICE_NAME}/{self.ATTRIBUTE_NAME}", "databaseds:10000"
-            ),
-        )
-        self.assertRaises(ValueError, lambda: attribute_fqdn(self.ATTRIBUTE_NAME))
-
-    def test_split_tango_name(self):
-        """Test if the Tango full qualified domain names are correctly splitted"""
-        self.assertEqual(
-            ("STAT", "RECV", "1"), split_tango_name(self.DEVICE_NAME, "device")
-        )
-        self.assertEqual(
-            ("STAT", "RECV", "1", "ant_mask_rw"),
-            split_tango_name(f"{self.DEVICE_NAME}/{self.ATTRIBUTE_NAME}", "attribute"),
-        )
-
-    def test_get_size_from_datatype(self):
-        """Test if the bytesize of a certain datatype is correctly retrieved"""
-        datatype_boolean = 1  # 1 byte
-        self.assertEqual(1, get_size_from_datatype(datatype_boolean))
-        datatype_double = 5  # 8 bytes
-        self.assertEqual(8, get_size_from_datatype(datatype_double))