Skip to content
Snippets Groups Projects
Unverified Commit 8e7e0e6e authored by dlacoste-esrf's avatar dlacoste-esrf Committed by GitHub
Browse files

Revert "Enum support"

parent b3909cd2
Branches
No related tags found
No related merge requests found
Showing
with 1288 additions and 552 deletions
...@@ -7,18 +7,6 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ...@@ -7,18 +7,6 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
## [Unreleased] ## [Unreleased]
### Changed
- Moved some system documentation to hdbpp-timescale-project (the consolidated project).
- Consolidated remaining build/install instructions into README
- Modified build system to use fetch libhdbpp and include it when requested. This is an aid to development.
- Supported LIBHDBPP_PROJECT_BUILD flag, that is injected into the build from hdbpp-timescale-project
- Made compatible with new libhdbpp (namespace, function and path changes)
### Removed
- Removed the embedded version of libhdbpp (the build can now source it at build time)
## [0.11.2] - 2020-01-23 ## [0.11.2] - 2020-01-23
### Fixed ### Fixed
......
...@@ -9,7 +9,7 @@ if ( ${CMAKE_SOURCE_DIR} STREQUAL ${CMAKE_BINARY_DIR} ) ...@@ -9,7 +9,7 @@ if ( ${CMAKE_SOURCE_DIR} STREQUAL ${CMAKE_BINARY_DIR} )
endif() endif()
# Start Build Config ----------------------------------- # Start Build Config -----------------------------------
cmake_minimum_required(VERSION 3.11) cmake_minimum_required(VERSION 3.6)
set(CMAKE_SKIP_RPATH true) set(CMAKE_SKIP_RPATH true)
set(CMAKE_VERBOSE_MAKEFILE ON) set(CMAKE_VERBOSE_MAKEFILE ON)
set(CMAKE_COLOR_MAKEFILE ON) set(CMAKE_COLOR_MAKEFILE ON)
...@@ -18,9 +18,9 @@ set(CMAKE_COLOR_MAKEFILE ON) ...@@ -18,9 +18,9 @@ set(CMAKE_COLOR_MAKEFILE ON)
set(LIBHDBPP_TIMESCALE_NAME "libhdb++timescale") set(LIBHDBPP_TIMESCALE_NAME "libhdb++timescale")
# Versioning # Versioning
set(VERSION_MAJOR "1") set(VERSION_MAJOR "0")
set(VERSION_MINOR "0") set(VERSION_MINOR "11")
set(VERSION_PATCH "0") set(VERSION_PATCH "2")
set(VERSION_METADATA "") set(VERSION_METADATA "")
set(VERSION_STRING ${VERSION_MAJOR}.${VERSION_MINOR}.${VERSION_PATCH}) set(VERSION_STRING ${VERSION_MAJOR}.${VERSION_MINOR}.${VERSION_PATCH})
...@@ -38,9 +38,6 @@ set(CMAKE_CXX_STANDARD_REQUIRED ON) ...@@ -38,9 +38,6 @@ set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_CXX_EXTENSIONS OFF) set(CMAKE_CXX_EXTENSIONS OFF)
# Build options # Build options
set(FETCH_LIBHDBPP_TAG "exp-refactor" CACHE STRING "Libhdbpp branch/tag to clone 'master'")
option(FETCH_LIBHDBPP "Download and build using a local copy of libhdb++" ON)
option(FETCH_LIBHDBPP_TAG "When FETCH_LIBHDBPP is enabled, this is the tag fetch ('master')")
option(BUILD_UNIT_TESTS "Build unit tests" OFF) option(BUILD_UNIT_TESTS "Build unit tests" OFF)
option(BUILD_BENCHMARK_TESTS "Build benchmarking tests (Forces RELEASE build)" OFF) option(BUILD_BENCHMARK_TESTS "Build benchmarking tests (Forces RELEASE build)" OFF)
option(ENABLE_CLANG "Enable clang code and layout analysis" OFF) option(ENABLE_CLANG "Enable clang code and layout analysis" OFF)
...@@ -69,6 +66,12 @@ list(APPEND CMAKE_PREFIX_PATH "/usr") ...@@ -69,6 +66,12 @@ list(APPEND CMAKE_PREFIX_PATH "/usr")
# Find Dependencies --------------------- # Find Dependencies ---------------------
include(cmake/FindLibraries.cmake) include(cmake/FindLibraries.cmake)
# Attempt to find the various libraries the project is dependent on
if(TDB_LIBRARIES)
find_libraries(LIBRARIES ${TDB_LIBRARIES} SEARCH_PATHS ${LIBRARY_PATHS})
set(TDB_FOUND_LIBRARIES ${FOUND_LIBRARIES})
endif(TDB_LIBRARIES)
# First find tango if it has not already been found. Returns an interface library # First find tango if it has not already been found. Returns an interface library
# called TangoInterfaceLibrary # called TangoInterfaceLibrary
set(CMAKE_MODULE_PATH "${CMAKE_MODULE_PATH};${CMAKE_CURRENT_SOURCE_DIR}/cmake") set(CMAKE_MODULE_PATH "${CMAKE_MODULE_PATH};${CMAKE_CURRENT_SOURCE_DIR}/cmake")
...@@ -77,12 +80,6 @@ find_package(Tango) ...@@ -77,12 +80,6 @@ find_package(Tango)
set(THREADS_PREFER_PTHREAD_FLAG ON) set(THREADS_PREFER_PTHREAD_FLAG ON)
find_package(Threads REQUIRED) find_package(Threads REQUIRED)
# Attempt to find the various libraries the project is dependent on
if(TDB_LIBRARIES)
find_libraries(LIBRARIES ${TDB_LIBRARIES} SEARCH_PATHS ${LIBRARY_PATHS})
set(TDB_FOUND_LIBRARIES ${FOUND_LIBRARIES})
endif(TDB_LIBRARIES)
# Thirdparty Integration ----------------------------------- # Thirdparty Integration -----------------------------------
# build google benchmark (target: benchmark) # build google benchmark (target: benchmark)
...@@ -94,6 +91,7 @@ add_subdirectory(thirdparty/google/benchmark EXCLUDE_FROM_ALL) ...@@ -94,6 +91,7 @@ add_subdirectory(thirdparty/google/benchmark EXCLUDE_FROM_ALL)
add_subdirectory(thirdparty/google/googletest/googletest EXCLUDE_FROM_ALL) add_subdirectory(thirdparty/google/googletest/googletest EXCLUDE_FROM_ALL)
# Include the thirdparty projects # Include the thirdparty projects
add_subdirectory(thirdparty/libhdbpp EXCLUDE_FROM_ALL)
add_subdirectory(thirdparty/libpqxx EXCLUDE_FROM_ALL) add_subdirectory(thirdparty/libpqxx EXCLUDE_FROM_ALL)
add_subdirectory(thirdparty/spdlog EXCLUDE_FROM_ALL) add_subdirectory(thirdparty/spdlog EXCLUDE_FROM_ALL)
add_subdirectory(thirdparty/Catch2 EXCLUDE_FROM_ALL) add_subdirectory(thirdparty/Catch2 EXCLUDE_FROM_ALL)
...@@ -139,7 +137,7 @@ add_subdirectory(src) ...@@ -139,7 +137,7 @@ add_subdirectory(src)
add_library(libhdbpp_timescale_shared_library SHARED ${SRC_FILES}) add_library(libhdbpp_timescale_shared_library SHARED ${SRC_FILES})
target_link_libraries(libhdbpp_timescale_shared_library target_link_libraries(libhdbpp_timescale_shared_library
PUBLIC ${TDB_FOUND_LIBRARIES} pqxx_static spdlog::spdlog_header_only Threads::Threads PUBLIC ${TDB_FOUND_LIBRARIES} pqxx_static libhdbpp_headers spdlog::spdlog_header_only Threads::Threads
PRIVATE TangoInterfaceLibrary) PRIVATE TangoInterfaceLibrary)
target_include_directories(libhdbpp_timescale_shared_library target_include_directories(libhdbpp_timescale_shared_library
...@@ -148,7 +146,6 @@ target_include_directories(libhdbpp_timescale_shared_library ...@@ -148,7 +146,6 @@ target_include_directories(libhdbpp_timescale_shared_library
$<BUILD_INTERFACE:${PROJECT_SOURCE_DIR}/include> $<BUILD_INTERFACE:${PROJECT_SOURCE_DIR}/include>
PRIVATE PRIVATE
$<BUILD_INTERFACE:${PROJECT_SOURCE_DIR}/src> $<BUILD_INTERFACE:${PROJECT_SOURCE_DIR}/src>
${INCLUDE_PATHS}
"${PROJECT_BINARY_DIR}") "${PROJECT_BINARY_DIR}")
set_target_properties(libhdbpp_timescale_shared_library set_target_properties(libhdbpp_timescale_shared_library
...@@ -173,7 +170,7 @@ target_compile_options(libhdbpp_timescale_shared_library ...@@ -173,7 +170,7 @@ target_compile_options(libhdbpp_timescale_shared_library
add_library(libhdbpp_timescale_static_library STATIC EXCLUDE_FROM_ALL ${SRC_FILES}) add_library(libhdbpp_timescale_static_library STATIC EXCLUDE_FROM_ALL ${SRC_FILES})
target_link_libraries(libhdbpp_timescale_static_library target_link_libraries(libhdbpp_timescale_static_library
PUBLIC ${TDB_FOUND_LIBRARIES} pqxx_static spdlog Threads::Threads PUBLIC ${TDB_FOUND_LIBRARIES} pqxx_static libhdbpp_headers spdlog Threads::Threads
PRIVATE TangoInterfaceLibrary) PRIVATE TangoInterfaceLibrary)
target_include_directories(libhdbpp_timescale_static_library target_include_directories(libhdbpp_timescale_static_library
......
...@@ -3,29 +3,13 @@ ...@@ -3,29 +3,13 @@
[![TangoControls](https://img.shields.io/badge/-Tango--Controls-7ABB45.svg?style=flat&logo=%20data%3Aimage%2Fpng%3Bbase64%2CiVBORw0KGgoAAAANSUhEUgAAACAAAAAkCAYAAADo6zjiAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEwAACxMBAJqcGAAAAsFJREFUWIXtl01IFVEYht9zU%2FvTqOxShLowlOgHykWUGEjUKqiocB1FQURB0KJaRdGiaFM7gzZRLWpTq2olhNQyCtpYCP1gNyIoUTFNnxZzRs8dzvw4Q6564XLnfOf73vedc2a%2BmZEKALgHrC3CUUR8CxZFeEoFalsdM4uLmMgFoIlZLJp3A9ZE4S2oKehhlaR1BTnyg2ocnW%2FxsxEDhbYij4EPVncaeASMAavnS%2FwA8NMaqACNQCew3f4as3KZOYh2SuqTVJeQNiFpn6QGSRVjTH9W%2FiThvcCn6H6n4BvQDvQWFT%2BSIDIFDAKfE3KOAQeBfB0XGPeQvgE67P8ZoB44DvTHmFgJdOQRv%2BUjc%2BavA9siNTWemgfA3TwGquCZ3w8szFIL1ALngIZorndvgJOR0GlP2gtJkzH%2Bd0fGFxW07NqY%2FCrx5QRXcYjbCbmxF1dkBSbi8kpACah3Yi2Sys74cVyxMWY6bk5BTwgRe%2BYlSzLmxNpU3aBeJogk4XWWpJKUeiap3RJYCpQj4QWZDQCuyIAk19Auj%2BAFYGZZjTGjksaBESB8P9iaxUBIaJzjZcCQcwHdj%2BS2Al0xPOeBYYKHk4vfmQ3Y8YkIwRUb7wQGU7j2ePrA1URx93ayd8UpD8klyPbSQfCOMIO05MbI%2BDvwBbjsMdGTwlX21AAMZzEerkaI9zFkP4AeYCPBg6gNuEb6I%2FthFgN1KSQupqzoRELOSed4DGiJala1UmOMr2U%2Bl%2FTWEy9Japa%2Fy41IWi%2FJ3d4%2FkkaAw0Bz3AocArqApwTvet3O3GbgV8qqjAM7bf4N4KMztwTodcYVyelywKSCD5V3xphNXoezuTskNSl4bgxJ6jPGVJJqbN0aSV%2Bd0M0aO7FCs19Jo2lExphXaTkxdRVgQFK7DZVDZ8%2BcpdmQh3wuILh7ut3AEyt%2B51%2BL%2F0cUfwFOX0t0StltmQAAAABJRU5ErkJggg%3D%3D)](http://www.tango-controls.org) [![License: GPL v3](https://img.shields.io/badge/License-GPL%20v3-blue.svg)](https://www.gnu.org/licenses/gpl-3.0) [![](https://img.shields.io/github/release/tango-controls-hdbpp/libhdbpp-timescale.svg)](https://github.com/tango-controls-hdbpp/libhdbpp-timescale/releases) [![TangoControls](https://img.shields.io/badge/-Tango--Controls-7ABB45.svg?style=flat&logo=%20data%3Aimage%2Fpng%3Bbase64%2CiVBORw0KGgoAAAANSUhEUgAAACAAAAAkCAYAAADo6zjiAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEwAACxMBAJqcGAAAAsFJREFUWIXtl01IFVEYht9zU%2FvTqOxShLowlOgHykWUGEjUKqiocB1FQURB0KJaRdGiaFM7gzZRLWpTq2olhNQyCtpYCP1gNyIoUTFNnxZzRs8dzvw4Q6564XLnfOf73vedc2a%2BmZEKALgHrC3CUUR8CxZFeEoFalsdM4uLmMgFoIlZLJp3A9ZE4S2oKehhlaR1BTnyg2ocnW%2FxsxEDhbYij4EPVncaeASMAavnS%2FwA8NMaqACNQCew3f4as3KZOYh2SuqTVJeQNiFpn6QGSRVjTH9W%2FiThvcCn6H6n4BvQDvQWFT%2BSIDIFDAKfE3KOAQeBfB0XGPeQvgE67P8ZoB44DvTHmFgJdOQRv%2BUjc%2BavA9siNTWemgfA3TwGquCZ3w8szFIL1ALngIZorndvgJOR0GlP2gtJkzH%2Bd0fGFxW07NqY%2FCrx5QRXcYjbCbmxF1dkBSbi8kpACah3Yi2Sys74cVyxMWY6bk5BTwgRe%2BYlSzLmxNpU3aBeJogk4XWWpJKUeiap3RJYCpQj4QWZDQCuyIAk19Auj%2BAFYGZZjTGjksaBESB8P9iaxUBIaJzjZcCQcwHdj%2BS2Al0xPOeBYYKHk4vfmQ3Y8YkIwRUb7wQGU7j2ePrA1URx93ayd8UpD8klyPbSQfCOMIO05MbI%2BDvwBbjsMdGTwlX21AAMZzEerkaI9zFkP4AeYCPBg6gNuEb6I%2FthFgN1KSQupqzoRELOSed4DGiJala1UmOMr2U%2Bl%2FTWEy9Japa%2Fy41IWi%2FJ3d4%2FkkaAw0Bz3AocArqApwTvet3O3GbgV8qqjAM7bf4N4KMztwTodcYVyelywKSCD5V3xphNXoezuTskNSl4bgxJ6jPGVJJqbN0aSV%2Bd0M0aO7FCs19Jo2lExphXaTkxdRVgQFK7DZVDZ8%2BcpdmQh3wuILh7ut3AEyt%2B51%2BL%2F0cUfwFOX0t0StltmQAAAABJRU5ErkJggg%3D%3D)](http://www.tango-controls.org) [![License: GPL v3](https://img.shields.io/badge/License-GPL%20v3-blue.svg)](https://www.gnu.org/licenses/gpl-3.0) [![](https://img.shields.io/github/release/tango-controls-hdbpp/libhdbpp-timescale.svg)](https://github.com/tango-controls-hdbpp/libhdbpp-timescale/releases)
- [libhdbpp-timescale](#libhdbpp-timescale) - [libhdbpp-timescale](#libhdbpp-timescale)
- [v0.9.0 To v0.10.0 Update](#v090-to-v0100-update) - [v0.9.0 To v0.10.0 Update](#v090-To-v0100-Update)
- [Cloning](#cloning) - [Cloning](#Cloning)
- [Bug Reports + Feature Requests](#bug-reports--feature-requests) - [Bug Reports + Feature Requests](#Bug-Reports--Feature-Requests)
- [Documentation](#documentation) - [Documentation](#Documentation)
- [Building](#building) - [Building](#Building)
- [Dependencies](#dependencies) - [Installing](#Installing)
- [Toolchain Dependencies](#toolchain-dependencies) - [License](#License)
- [Build Dependencies](#build-dependencies)
- [Building Process](#building-process)
- [Ubuntu](#ubuntu)
- [Build Flags](#build-flags)
- [Standard CMake Flags](#standard-cmake-flags)
- [Project Flags](#project-flags)
- [Running Tests](#running-tests)
- [Unit Tests](#unit-tests)
- [Benchmark Tests](#benchmark-tests)
- [Installing](#installing)
- [System Dependencies](#system-dependencies)
- [Installation](#installation)
- [Configuration](#configuration)
- [Library Configuration Parameters](#library-configuration-parameters)
- [Configuration Example](#configuration-example)
- [License](#license)
HDB++ backend library for the TimescaleDb extenstion to Postgresql. This library is loaded by libhdbpp to archive events from a Tango Controls system. Currently in a pre v1 release phase. HDB++ backend library for the TimescaleDb extenstion to Postgresql. This library is loaded by libhdbpp to archive events from a Tango Controls system. Currently in a pre v1 release phase.
...@@ -62,215 +46,11 @@ Please file the bug reports and feature requests in the issue tracker ...@@ -62,215 +46,11 @@ Please file the bug reports and feature requests in the issue tracker
## Building ## Building
To build the shared library please read the following. See [build.md](doc/build.md) in the doc folder
### Dependencies
The project has two types of dependencies, those required by the toolchain, and those to do the actual build. Other dependencies are integrated directly into the project as submodules. The following thirdparty modules exists:
* libpqxx - Modern C++ Postgresql library (submodule)
* spdlog - Logging system (submodule)
* Catch2 - Unit test subsystem (submodule)
* libhdbpp - Configuration can now fetch [original](https://github.com/tango-controls-hdbpp/libhdbpp) to aid development. See build flags.
#### Toolchain Dependencies
If wishing to build the project, ensure the following dependencies are met:
* CMake 3.11 or higher (for FetchContent)
* C++14 compatible compiler (code base is using c++14)
#### Build Dependencies
Ensure the development version of the dependencies are installed. These are as follows:
* Tango Controls 9 or higher development headers and libraries
* omniORB release 4 or higher development headers and libraries
* libzmq3-dev or libzmq5-dev
* libpq-dev - Postgres C development library
### Building Process
To compile this library, first ensure it has been recursively cloned so all submodules are present in /thirdparty. The build system uses pkg-config to find some dependencies, for example Tango. If Tango is not installed to a standard location, set PKG_CONFIG_PATH, i.e.
```bash
export PKG_CONFIG_PATH=/non/standard/tango/install/location
```
Then to build just the library:
```bash
mkdir -p build
cd build
cmake ..
make
```
The pkg-config path can also be set with the cmake argument CMAKE_PREFIX_PATH. This can be set on the command line at configuration time, i.e.:
```bash
...
cmake -DCMAKE_PREFIX_PATH=/non/standard/tango/install/location ..
...
```
#### Ubuntu
When using Postgres from the Ubuntu repositoris, it appears to install its development libraries in a slightly different location. Some info on this issue [here](https://gitlab.kitware.com/cmake/cmake/issues/17223). In this case, we set the PostgreSQL_TYPE_INCLUDE_DIR variable directly when calling cmake:
```
cmake -DPostgreSQL_TYPE_INCLUDE_DIR=/usr/include/postgresql/ ..
```
This should replace the call to cmake in the previous section.
### Build Flags
The following build flags are available
#### Standard CMake Flags
The following is a list of common useful CMake flags and their use:
| Flag | Setting | Description |
|------|-----|-----|
| CMAKE_INSTALL_PREFIX | PATH | Standard CMake flag to modify the install prefix. |
| CMAKE_INCLUDE_PATH | PATH[S] | Standard CMake flag to add include paths to the search path. |
| CMAKE_LIBRARY_PATH | PATH[S] | Standard CMake flag to add paths to the library search path |
| CMAKE_BUILD_TYPE | Debug/Release | Build type to produce |
#### Project Flags
| Flag | Setting | Default | Description |
|------|-----|-----|-----|
| BUILD_UNIT_TESTS | ON/OFF | OFF | Build unit tests |
| BUILD_BENCHMARK_TESTS | ON/OFF | OFF | Build benchmark tests (Forces a Release build) |
| ENABLE_CLANG | ON/OFF | OFF | Clang code static analysis, readability, and cppcore guideline enforcement |
| FETCH_LIBHDBPP | ON/OFF | OFF | Enable to have the build fetch and use a local version of libhdbpp |
| FETCH_LIBHDBPP_TAG | | master | When FETCH_LIBHDBPP is enabled, this is the git tag to fetch |
### Running Tests
#### Unit Tests
The project has extensive unit tests to ensure its functioning as expect. Build the project with testing enabled:
```bash
mkdir -p build
cd build
cmake -DBUILD_UNIT_TESTS=ON ..
make
```
To run all unit tests, a postgresql database node is required with the project schema loaded up. There is a default connection string inside test/TestHelpers.hpp:
```
user=postgres host=localhost port=5432 dbname=hdb password=password
```
If you run the hdb timescale docker image associated with this project locally then this will connect automatically. If you wish to use a different database, edit the string in test/TestHelpers.hpp.
To run all tests:
```bash
./test/unit-tests
```
To look at the available tests and tags, should you wish to run a subset of the test suite (for example, you do not have a postgresql node to test against), then tests and be listed:
```bash
./bin/unit-tests --list-tests
```
Or:
```bash
./bin/unit-tests --list-tags
```
To see more options for the unit-test command line binary:
```bash
./bin/unit-tests --help
```
#### Benchmark Tests
These are a work in progress to explore future optimisation point. If built, they can be run as follows:
```bash
mkdir -p build
cd build
cmake -DBUILD_BENCHMARK_TESTS=ON ..
make
```
```bash
./benchmark/benchmark-tests
```
## Installing ## Installing
All submodules are combined into the final library for ease of deployment. This means just the libhdbpp-timescale.so binary needs deploying to the target system. See [install.md](doc/install.md) in the doc folder
### System Dependencies
The running system requires libpq5 installed to support the calls Postgresql. On Debian/Ubuntu this can be deployed as follows:
```bash
sudo apt-get install libpq5
```
### Installation
After the build has completed, simply run:
```
sudo make install
```
The shared library will be installed to /usr/local/lib on Debian/Ubuntu systems.
## Configuration
### Library Configuration Parameters
Configuration parameters are as follows:
| Parameter | Mandatory | Default | Description |
|------|-----|-----|-----|
| libname | true | None | Must be "libhdb++timescale.so" |
| connect_string | true | None | Postgres connection string, eg user=postgres host=localhost port=5432 dbname=hdb password=password |
| logging_level | false | error | Logging level. See table below |
| log_file | false | false | Enable logging to file |
| log_console | false | false | Enable logging to the console |
| log_syslog | false | false | Enable logging to syslog |
| log_file_name | false | None | When logging to file, this is the path and name of file to use. Ensure the path exists otherwise this is an error conditions. |
The logging_level parameter is case insensitive. Logging levels are as follows:
| Level | Description |
|------|-----|
| error | Log only error level events (recommended unless debugging) |
| warning | Log only warning level events |
| info | Log only warning level events |
| debug | Log only warning level events. Good for early install debugging |
| trace | Trace level logging. Excessive level of debug, good for involved debugging |
| disabled | Disable logging subsystem |
### Configuration Example
Short example LibConfiguration property value on an EventSubscriber or ConfigManager. You will HAVE to change the various parts to match your system:
```bash
connect_string=user=hdb-user password=password host=hdb-database port=5432 dbname=hdb
logging_level=debug
log_file=true
log_syslog=false
log_console=false
libname=libhdb++timescale.so
log_file_name=/tmp/hdb/es-name.log
```
## License ## License
......
ALTER TABLE att_scalar_devboolean CLUSTER ON att_scalar_devboolean_att_conf_id_data_time_idx;
ALTER TABLE att_array_devboolean CLUSTER ON att_array_devboolean_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devuchar CLUSTER ON att_scalar_devuchar_att_conf_id_data_time_idx;
ALTER TABLE att_array_devuchar CLUSTER ON att_array_devuchar_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devshort CLUSTER ON att_scalar_devshort_att_conf_id_data_time_idx;
ALTER TABLE att_array_devshort CLUSTER ON att_array_devshort_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devushort CLUSTER ON att_scalar_devushort_att_conf_id_data_time_idx;
ALTER TABLE att_array_devushort CLUSTER ON att_array_devushort_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devlong CLUSTER ON att_scalar_devlong_att_conf_id_data_time_idx;
ALTER TABLE att_array_devlong CLUSTER ON att_array_devlong_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devulong CLUSTER ON att_scalar_devulong_att_conf_id_data_time_idx;
ALTER TABLE att_array_devulong CLUSTER ON att_array_devulong_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devlong64 CLUSTER ON att_scalar_devlong64_att_conf_id_data_time_idx;
ALTER TABLE att_array_devlong64 CLUSTER ON att_array_devlong64_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devulong64 CLUSTER ON att_scalar_devulong64_att_conf_id_data_time_idx;
ALTER TABLE att_array_devulong64 CLUSTER ON att_array_devulong64_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devfloat CLUSTER ON att_scalar_devfloat_att_conf_id_data_time_idx;
ALTER TABLE att_array_devfloat CLUSTER ON att_array_devfloat_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devdouble CLUSTER ON att_scalar_devdouble_att_conf_id_data_time_idx;
ALTER TABLE att_array_devdouble CLUSTER ON att_array_devdouble_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devstring CLUSTER ON att_scalar_devstring_att_conf_id_data_time_idx;
ALTER TABLE att_array_devstring CLUSTER ON att_array_devstring_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devstate CLUSTER ON att_scalar_devstate_att_conf_id_data_time_idx;
ALTER TABLE att_array_devstate CLUSTER ON att_array_devstate_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devencoded CLUSTER ON att_scalar_devencoded_att_conf_id_data_time_idx;
ALTER TABLE att_array_devencoded CLUSTER ON att_array_devencoded_att_conf_id_data_time_idx;
ALTER TABLE att_scalar_devenum CLUSTER ON att_scalar_devenum_att_conf_id_data_time_idx;
ALTER TABLE att_array_devenum CLUSTER ON att_array_devenum_att_conf_id_data_time_idx;
CLUSTER att_scalar_devboolean;
CLUSTER att_array_devboolean;
CLUSTER att_scalar_devuchar;
CLUSTER att_array_devuchar;
CLUSTER att_scalar_devshort;
CLUSTER att_array_devshort;
CLUSTER att_scalar_devushort;
CLUSTER att_array_devushort;
CLUSTER att_scalar_devlong;
CLUSTER att_array_devlong;
CLUSTER att_scalar_devulong;
CLUSTER att_array_devulong;
CLUSTER att_scalar_devlong64;
CLUSTER att_array_devlong64;
CLUSTER att_scalar_devulong64;
CLUSTER att_array_devulong64;
CLUSTER att_scalar_devfloat;
CLUSTER att_array_devfloat;
CLUSTER att_scalar_devdouble;
CLUSTER att_array_devdouble;
CLUSTER att_scalar_devstring;
CLUSTER att_array_devstring;
CLUSTER att_scalar_devstate;
CLUSTER att_array_devstate;
CLUSTER att_scalar_devencoded;
CLUSTER att_array_devencoded;
CLUSTER att_scalar_devenum;
CLUSTER att_array_devenum;
\ No newline at end of file
This diff is collapsed.
-- Roles
CREATE ROLE readonly;
CREATE ROLE readwrite;
-- Permissions - readonly
GRANT CONNECT ON DATABASE hdb TO readonly;
GRANT USAGE ON SCHEMA public TO readonly;
GRANT SELECT ON ALL TABLES IN SCHEMA public TO readonly;
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT ON TABLES TO readonly;
-- Permissions - readwrite
GRANT CONNECT ON DATABASE hdb TO readwrite;
GRANT USAGE ON SCHEMA public TO readwrite;
GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA public TO readwrite;
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO readwrite;
GRANT USAGE ON ALL SEQUENCES IN SCHEMA public TO readwrite;
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT USAGE ON SEQUENCES TO readwrite;
GRANT ALL ON SCHEMA public TO readwrite;
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO readwrite;
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO readwrite;
-- Users
CREATE ROLE hdb_cfg_man WITH LOGIN PASSWORD 'hdbpp';
GRANT readwrite TO hdb_cfg_man;
CREATE ROLE hdb_event_sub WITH LOGIN PASSWORD 'hdbpp';
GRANT readwrite TO hdb_event_sub;
CREATE ROLE hdb_java_reporter WITH LOGIN PASSWORD 'hdbpp';
GRANT readonly TO hdb_java_reporter;
\ No newline at end of file
# Table of Contents
The documentation is purely about getting the shared library running on a correctly configured database. Setup of the TimescaleDb cluster and its stack is left to the user.
- [Table of Contents](#Table-of-Contents)
- [About](#About)
- [Building and Installation](#Building-and-Installation)
- [DB Schema](#DB-Schema)
- [Configuration](#Configuration)
## About
The overview is in the main project [README](../README.md).
## Building and Installation
* [Build](build.md) instructions.
* [Installation](install.md) guidelines.
## DB Schema
* [Schema](db-schema-config) guidelines and setup.
## Configuration
* [Configuration](configuration) parameter details.
\ No newline at end of file
# Build Instructions
To build the shared library please read the following.
## Dependencies
The project has two types of dependencies, those required by the toolchain, and those to do the actual build. Other dependencies are integrated directly into the project as submodules. The following thirdparty modules exists:
* libpqxx - Modern C++ Postgresql library (Submodule)
* spdlog - Logging system (Submodule)
* Catch2 - Unit test subsystem (Submodule)
* libhdbpp - Part of the hdb++ library loading chain (Modified version of [original](https://github.com/tango-controls-hdbpp/libhdbpp) project. This will be pushed back to the original repository in time)
### Toolchain Dependencies
If wishing to build the project, ensure the following dependencies are met:
* CMake 3.6 or higher
* C++14 compatible compiler (code base is using c++14)
### Build Dependencies
Ensure the development version of the dependencies are installed. These are as follows:
* Tango Controls 9 or higher development headers and libraries
* omniORB release 4 or higher development headers and libraries
* libzmq3-dev or libzmq5-dev
* libpq-dev - Postgres C development library
## Building and Installation
To compile this library, first ensure it has been recursively cloned so all submodules are present in /thirdparty. The build system uses pkg-config to find some dependencies, for example Tango. If Tango is not installed to a standard location, set PKG_CONFIG_PATH, i.e.
```bash
export PKG_CONFIG_PATH=/non/standard/tango/install/location
```
Then to build just the library:
```bash
mkdir -p build
cd build
cmake ..
make
```
The pkg-config path can also be set with the cmake argument CMAKE_PREFIX_PATH. This can be set on the command line at configuration time, i.e.:
```bash
...
cmake -DCMAKE_PREFIX_PATH=/non/standard/tango/install/location ..
...
```
## Build Flags
The following build flags are available
### Standard CMake Flags
The following is a list of common useful CMake flags and their use:
| Flag | Setting | Description |
|------|-----|-----|
| CMAKE_INSTALL_PREFIX | PATH | Standard CMake flag to modify the install prefix. |
| CMAKE_INCLUDE_PATH | PATH[S] | Standard CMake flag to add include paths to the search path. |
| CMAKE_LIBRARY_PATH | PATH[S] | Standard CMake flag to add paths to the library search path |
| CMAKE_BUILD_TYPE | Debug/Release | Build type to produce |
### Project Flags
| Flag | Setting | Default | Description |
|------|-----|-----|-----|
| BUILD_UNIT_TESTS | ON/OFF | OFF | Build unit tests |
| BUILD_BENCHMARK_TESTS | ON/OFF | OFF | Build benchmark tests (Forces a Release build) |
| ENABLE_CLANG | ON/OFF | OFF | Clang code static analysis, readability, and cppcore guideline enforcement |
## Running Tests
### Unit Tests
The project has extensive unit tests to ensure its functioning as expect. Build the project with testing enabled:
```bash
mkdir -p build
cd build
cmake -DBUILD_UNIT_TESTS=ON ..
make
```
To run all unit tests, a postgresql database node is required with the project schema loaded up. There is a default connection string inside test/TestHelpers.hpp:
```
user=postgres host=localhost port=5432 dbname=hdb password=password
```
If you run the hdb timescale docker image associated with this project locally then this will connect automatically. If you wish to use a different database, edit the string in test/TestHelpers.hpp.
To run all tests:
```bash
./test/unit-tests
```
To look at the available tests and tags, should you wish to run a subset of the test suite (for example, you do not have a postgresql node to test against), then tests and be listed:
```bash
./bin/unit-tests --list-tests
```
Or:
```bash
./bin/unit-tests --list-tags
```
To see more options for the unit-test command line binary:
```bash
./bin/unit-tests --help
```
### Benchmark Tests
These are a work in progress to explore future optimisation point. If built, they can be run as follows:
```bash
mkdir -p build
cd build
cmake -DBUILD_BENCHMARK_TESTS=ON ..
make
```
```bash
./benchmark/benchmark-tests
```
\ No newline at end of file
# Configuration
## Library Configuration Parameters
Configuration parameters are as follows:
| Parameter | Mandatory | Default | Description |
|------|-----|-----|-----|
| libname | true | None | Must be "libhdb++timescale.so" |
| connect_string | true | None | Postgres connection string, eg user=postgres host=localhost port=5432 dbname=hdb password=password |
| logging_level | false | error | Logging level. See table below |
| log_file | false | false | Enable logging to file |
| log_console | false | false | Enable logging to the console |
| log_syslog | false | false | Enable logging to syslog |
| log_file_name | false | None | When logging to file, this is the path and name of file to use. Ensure the path exists otherwise this is an error conditions. |
The logging_level parameter is case insensitive. Logging levels are as follows:
| Level | Description |
|------|-----|
| error | Log only error level events (recommended unless debugging) |
| warning | Log only warning level events |
| info | Log only warning level events |
| debug | Log only warning level events. Good for early install debugging |
| trace | Trace level logging. Excessive level of debug, good for involved debugging |
| disabled | Disable logging subsystem |
## Configuration Example
Short example LibConfiguration property value on an EventSubscriber or ConfigManager. You will HAVE to change the various parts to match your system:
```
connect_string=user=hdb-user password=password host=hdb-database port=5432 dbname=hdb
logging_level=debug
log_file=true
log_syslog=false
log_console=false
libname=libhdb++timescale.so
log_file_name=/tmp/hdb/es-name.log
````
\ No newline at end of file
# Database Schema Configuration
Schema setup and management is a very important aspect to running the HDB++ system with TimescaleDb. The following presents guidelines and a setup plan, but it is not exhaustive and additional information is welcome.
Some of the information assumes familiarity with TimescaleDb terms and technologies. Please to TimescaleDb [documentation](www.timescaledb.com) for more information.
- [Database Schema Configuration](#Database-Schema-Configuration)
- [Hypperchunk Sizes](#Hypperchunk-Sizes)
- [Schema Import](#Schema-Import)
- [Admin User](#Admin-User)
- [Table Creation](#Table-Creation)
- [Users](#Users)
- [Clean-up](#Clean-up)
- [Clustering](#Clustering)
## Hypperchunk Sizes
The [schema](../db-schema/schema.sql) file has default values set for all hyper table chunk sizes. It is assumed initial deployment data load will be smaller than the final fully operational system, so chunk sizes are as follows:
- 28 days for all data tables, except:
- 14 days for att_scalar_devdouble, since this appears to be used more often than other tables.
These values can, and should be, adjusted to the deployment situation. Please see the TimescaleDb [documentation](www.timescaledb.com) for information on choosing chunk sizes.
Important: These are initial values, the expectation is the database will be monitored and values adjusted as it takes on its full load.
## Schema Import
General setup steps.
### Admin User
Rather than create and manage the tables via a superuser, we create and admin user and have them create the tables:
```sql
CREATE ROLE hdb_admin WITH LOGIN PASSWORD 'hdbpp';
ALTER USER hdb_admin CREATEDB;
ALTER USER hdb_admin CREATEROLE;
ALTER USER hdb_admin SUPERUSER;
```
Note the SUPERUSER role will be stripped after the tables are set up.
### Table Creation
Now import the schema.sql as the hdb_admin user. From pqsl:
```bash
psql -U hdb_admin -h HOST -p PORT-f schema.sql -d template1
```
Note: we use database template1 since hdb_admin currently has no database to connect to.
We should now have a hdb database owned by hdb_admin.
### Users
Next we need to set up the users (this may require some improvements, pull requests welcome). Connect as a superuser and create two roles, a readonly and a readwrite role:
```sql
-- Roles
CREATE ROLE readonly;
CREATE ROLE readwrite;
-- Permissions - readonly
GRANT CONNECT ON DATABASE hdb TO readonly;
GRANT USAGE ON SCHEMA public TO readonly;
GRANT SELECT ON ALL TABLES IN SCHEMA public TO readonly;
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT ON TABLES TO readonly;
-- Permissions - readwrite
GRANT CONNECT ON DATABASE hdb TO readwrite;
GRANT USAGE ON SCHEMA public TO readwrite;
GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA public TO readwrite;
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO readwrite;
GRANT USAGE ON ALL SEQUENCES IN SCHEMA public TO readwrite;
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT USAGE ON SEQUENCES TO readwrite;
GRANT ALL ON SCHEMA public TO readwrite;
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO readwrite;
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO readwrite;
-- Users
CREATE ROLE hdb_cfg_man WITH LOGIN PASSWORD 'hdbpp';
GRANT readwrite TO hdb_cfg_man;
CREATE ROLE hdb_event_sub WITH LOGIN PASSWORD 'hdbpp';
GRANT readwrite TO hdb_event_sub;
CREATE ROLE hdb_java_reporter WITH LOGIN PASSWORD 'hdbpp';
GRANT readonly TO hdb_java_reporter;
```
Here we created three users that external applications will use to connect to the database. You may create as many and in what ever role you want.
## Clean-up
Finally, strip the SUPERUSER trait from hdb_admin:
```sql
ALTER USER hdb_admin NOSUPERUSER;
```
## Clustering
To get the levels of performance required to make the solution viable we MUST cluster on the composite index of each data table. the file [cluster.sql](../db-schema/cluster.sql) contains the commands that must be run after the database has been setup.
Without this step, select performance will degrade on large tables.
As data is added, the tables will require the new data to be clustered on the index. You may choose the period and time when to do this. The process does lock the tables. Options:
- Manually
- Cron job
TimescaleDb supports a more fine grained cluster process. A tool is being developed to utilities this and run as a process to cluster on the index at regular intervals.
# Installation Instructions
All submodules are combined into the final library for ease of deployment. This means just the libhdbpp-timescale.so binary needs deploying to the target system.
## System Dependencies
The running system requires libpq5 installed to support the calls Postgresql. On Debian/Ubuntu this can be deployed as follows:
```bash
sudo apt-get install libpq5
```
## Installation
After the build has completed, simply run:
```
sudo make install
```
The shared library will be installed to /usr/local/lib on Debian/Ubuntu systems.
\ No newline at end of file
/* Copyright (C) 2014-2017
Elettra - Sincrotrone Trieste S.C.p.A.
Strada Statale 14 - km 163,5 in AREA Science Park
34149 Basovizza, Trieste, Italy.
This file is part of libhdb++.
libhdb++ is free software: you can redistribute it and/or modify
it under the terms of the Lesser GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libhdb++ is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the Lesser
GNU General Public License for more details.
You should have received a copy of the Lesser GNU General Public License
along with libhdb++. If not, see <http://www.gnu.org/licenses/>. */
#ifndef _HDBPP_ABSTRACTDB_H
#define _HDBPP_ABSTRACTDB_H
#include <tango.h>
#include <tuple>
#include <vector>
namespace hdbpp
{
#define DB_INSERT 0
#define DB_START 1
#define DB_STOP 2
#define DB_REMOVE 3
#define DB_INSERT_PARAM 4
#define DB_PAUSE 5
#define DB_UPDATETTL 6
#define DB_ADD 7
// Data struct used to pass information to the backend
typedef struct HdbEventDataType_
{
std::string attr_name;
int max_dim_x;
int max_dim_y;
int data_type;
Tango::AttrDataFormat data_format;
int write_type;
} HdbEventDataType;
enum class HdbppFeatures
{
// Time to live feature. Attributes can be timed out by the database based
// on the configured ttl value
TTL,
// Backend supports passing of multiple events and batching them into
// the database. This is a performance improvement.
BATCH_INSERTS,
};
// Abstract base class that backends are required to implement when offering
// a storage backend to the hdb++ system
class AbstractDB
{
public:
virtual ~AbstractDB() {}
// Inserts an attribute archive event for the EventData into the database. If the attribute
// does not exist in the database, then an exception will be raised. If the attr_value
// field of the data parameter if empty, then the attribute is in an error state
// and the error message will be archived.
virtual void insert_event(Tango::EventData *event, const HdbEventDataType &data_type) = 0;
// Insert multiple attribute archive events. Any attributes that do not exist will
// cause an exception. On failure the fall back is to insert events individually
virtual void insert_events(std::vector<std::tuple<Tango::EventData *, HdbEventDataType>> events) = 0;
// Inserts the attribute configuration data (Tango Attribute Configuration event data)
// into the database. The attribute must be configured to be stored in HDB++,
// otherwise an exception will be thrown.
virtual void insert_param_event(Tango::AttrConfEventData *param_event, const HdbEventDataType &data_type) = 0;
// Add an attribute to the database. Trying to add an attribute that already exists will
// cause an exception
virtual void add_attribute(const std::string &name, int type, int format, int write_type) = 0;
// Update the attribute ttl. The attribute must have been configured to be stored in
// HDB++, otherwise an exception is raised
virtual void update_ttl(const std::string &name, unsigned int ttl) = 0;
// Inserts a history event for the attribute name passed to the function. The attribute
// must have been configured to be stored in HDB++, otherwise an exception is raised.
virtual void insert_history_event(const std::string &name, unsigned char event) = 0;
// Check what hdbpp features this library supports.
virtual bool supported(HdbppFeatures feature) = 0;
};
// Abstract factory class that backend must implement to help create an instance
// of the storage class deriving from AbstractDB
class DBFactory
{
public:
// Create a backend database object, and return it as a pointer
virtual AbstractDB *create_db(const string &id, const std::vector<std::string> &configuration) = 0;
virtual ~DBFactory() {};
};
} // namespace hdbpp
extern "C"
{
typedef hdbpp::DBFactory *getDBFactory_t();
hdbpp::DBFactory *getDBFactory();
}
#endif // _HDBPP_ABSTRACTDB_H
/* Copyright (C) : 2014-2019
European Synchrotron Radiation Facility
BP 220, Grenoble 38043, FRANCE
This file is part of libhdb++timescale.
libhdb++timescale is free software: you can redistribute it and/or modify
it under the terms of the Lesser GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libhdb++timescale is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the Lesser
GNU General Public License for more details.
You should have received a copy of the Lesser GNU General Public License
along with libhdb++timescale. If not, see <http://www.gnu.org/licenses/>. */
#ifndef _HDBPP_TIMESCALE_CLIENT_HPP
#define _HDBPP_TIMESCALE_CLIENT_HPP
#include "hdb++/AbstractDB.h"
#include <memory>
#include <string>
#include <vector>
namespace hdbpp
{
class HdbClient : public AbstractDB
{
public:
HdbClient(const string &id, const std::vector<std::string> &configuration);
virtual ~HdbClient() {}
// Inserts an attribute archive event for the EventData into the database. If the attribute
// does not exist in the database, then an exception will be raised. If the attr_value
// field of the data parameter if empty, then the attribute is in an error state
// and the error message will be archived.
void insert_event(Tango::EventData *event, const HdbEventDataType &data_type) override;
// Insert multiple attribute archive events. Any attributes that do not exist will
// cause an exception. On failure the fall back is to insert events individually
void insert_events(std::vector<std::tuple<Tango::EventData *, HdbEventDataType>> events) override;
// Inserts the attribute configuration data (Tango Attribute Configuration event data)
// into the database. The attribute must be configured to be stored in HDB++,
// otherwise an exception will be thrown.
void insert_param_event(Tango::AttrConfEventData *data, const HdbEventDataType &data_type) override;
// Add an attribute to the database. Trying to add an attribute that already exists will
// cause an exception
void add_attribute(const std::string &name, int type, int format, int write_type) override;
// Update the attribute ttl. The attribute must have been configured to be stored in
// HDB++, otherwise an exception is raised
void update_ttl(const std::string &name, unsigned int ttl) override;
// Inserts a history event for the attribute name passed to the function. The attribute
// must have been configured to be stored in HDB++, otherwise an exception is raised.
void insert_history_event(const std::string &name, unsigned char event) override;
// Check what hdbpp features this library supports.
bool supported(HdbppFeatures feature) override;
private:
std::unique_ptr<AbstractDB> _db;
};
} // namespace hdbpp
#endif // _HDBPP_TIMESCALE_CLIENT_HPP
...@@ -20,17 +20,106 @@ ...@@ -20,17 +20,106 @@
#ifndef _HDBPP_TIMESCALE_HPP #ifndef _HDBPP_TIMESCALE_HPP
#define _HDBPP_TIMESCALE_HPP #define _HDBPP_TIMESCALE_HPP
#include <hdb++/AbstractDB.h> #include <libhdb++/LibHdb++.h>
#include <string> #include <string>
#include <tango.h>
#include <vector> #include <vector>
namespace hdbpp namespace hdbpp
{ {
class HdbppTimescaleDb : public AbstractDB
{
public:
/**
* @brief HdbppTimescaleDb constructor
*
* The configuration parameters must contain the following strings:
*
* @param configuration A list of configuration parameters to start the driver with.
*/
HdbppTimescaleDb(const std::vector<std::string> &configuration);
/**
* @brief Destroy the HdbppTimescaleDb library object
*/
virtual ~HdbppTimescaleDb();
/**
* @brief Insert an attribute archive event into the database
*
* Inserts an attribute archive event for the EventData into the database. If the attribute
* does not exist in the database, then an exception will be raised. If the attr_value
* field of the data parameter if empty, then the attribute is in an error state
* and the error message will be archived.
*
* @param event_data Tango event data about the attribute.
* @param event_data_type HDB event data for the attribute.
* @throw Tango::DevFailed
*/
virtual void insert_Attr(Tango::EventData *event_data, HdbEventDataType event_data_type);
/**
* @brief Inserts the attribute configuration data.
*
* Inserts the attribute configuration data (Tango Attribute Configuration event data)
* into the database. The attribute must be configured to be stored in HDB++,
* otherwise an exception will be thrown.
*
* @param conf_event_data Tango event data about the attribute.
* @param event_data_type HDB event data for the attribute.
* @throw Tango::DevFailed
*/
virtual void insert_param_Attr(Tango::AttrConfEventData *conf_event_data, HdbEventDataType /* event_data_type */);
/**
* @brief Add and configure an attribute in the database.
*
* Trying to reconfigure an existing attribute will result in an exception, and if an
* attribute already exists with the same configuration then the ttl will be updated if
* different.
*
* @param fqdn_attr_name Fully qualified attribute name
* @param type The type of the attribute.
* @param format The format of the attribute.
* @param write_type The read/write access of the type.
* @param ttl The time to live in hour, 0 for infinity
* @throw Tango::DevFailed
*/
virtual void configure_Attr(
std::string fqdn_attr_name, int type, int format, int write_type, unsigned int ttl);
/**
* @brief Update the ttl value for an attribute.
*
* The attribute must have been configured to be stored in HDB++, otherwise an exception
* is raised
*
* @param fqdn_attr_name Fully qualified attribute nam
* @param ttl The time to live in hours, 0 for infinity
* @throw Tango::DevFailed
*/
virtual void updateTTL_Attr(std::string fqdn_attr_name, unsigned int ttl);
/**
* @brief Record a start, Stop, Pause or Remove history event for an attribute.
*
* Inserts a history event for the attribute name passed to the function. The attribute
* must have been configured to be stored in HDB++, otherwise an exception is raised.
* This function will also insert an additional CRASH history event before the START
* history event if the given event parameter is DB_START and if the last history event
* stored was also a START event.
*
* @param fqdn_attr_name Fully qualified attribute name
* @param event
* @throw Tango::DevFailed
*/
virtual void event_Attr(std::string fqdn_attr_name, unsigned char event);
};
class HdbppTimescaleDbFactory : public DBFactory class HdbppTimescaleDbFactory : public DBFactory
{ {
public: public:
// return a new HdbppTimescaleDb object virtual AbstractDB *create_db(std::vector<std::string> configuration);
virtual AbstractDB *create_db(const string &id, const std::vector<std::string> &configuration);
}; };
} // namespace hdbpp } // namespace hdbpp
......
...@@ -59,14 +59,14 @@ void AttributeName::clear() noexcept ...@@ -59,14 +59,14 @@ void AttributeName::clear() noexcept
//============================================================================= //=============================================================================
//============================================================================= //=============================================================================
auto AttributeName::tangoHost() -> const std::string & const string &AttributeName::tangoHost()
{ {
validate(); validate();
if (_tango_host_cache.empty()) if (_tango_host_cache.empty())
{ {
// if tango:// exists on the std::string, strip it off by moving the start in 8 characters // if tango:// exists on the string, strip it off by moving the start in 8 characters
auto start = _fqdn_attr_name.find("tango://") == std::string::npos ? 0 : 8; auto start = _fqdn_attr_name.find("tango://") == string::npos ? 0 : 8;
auto end = _fqdn_attr_name.find('/', start); auto end = _fqdn_attr_name.find('/', start);
_tango_host_cache = _fqdn_attr_name.substr(start, end - start); _tango_host_cache = _fqdn_attr_name.substr(start, end - start);
} }
...@@ -76,17 +76,17 @@ auto AttributeName::tangoHost() -> const std::string & ...@@ -76,17 +76,17 @@ auto AttributeName::tangoHost() -> const std::string &
//============================================================================= //=============================================================================
//============================================================================= //=============================================================================
auto AttributeName::tangoHostWithDomain() -> const std::string & const string &AttributeName::tangoHostWithDomain()
{ {
validate(); validate();
if (_tango_host_with_domain_cache.empty()) if (_tango_host_with_domain_cache.empty())
{ {
std::string tango_host = tangoHost(); string tango_host = tangoHost();
if (tango_host.find('.') == std::string::npos) if (tango_host.find('.') == string::npos)
{ {
std::string server_name_with_domain; string server_name_with_domain;
auto server_name = tango_host.substr(0, tango_host.find(':', 0)); auto server_name = tango_host.substr(0, tango_host.find(':', 0));
struct addrinfo hints = {}; struct addrinfo hints = {};
...@@ -107,24 +107,18 @@ auto AttributeName::tangoHostWithDomain() -> const std::string & ...@@ -107,24 +107,18 @@ auto AttributeName::tangoHostWithDomain() -> const std::string &
if (result == nullptr) if (result == nullptr)
{ {
spdlog::error("Error: Unable to add domain to tango host {}: getaddrinfo didn't return the canonical " spdlog::error("Error: Unable to add domain to tango host {}: getaddrinfo didn't return the canonical name (result == nullptr)", tango_host);
"name (result == nullptr)",
tango_host);
return tangoHost(); return tangoHost();
} }
if (result->ai_canonname == nullptr) if (result->ai_canonname == nullptr)
{ {
spdlog::error("Error: Unable to add domain to tango host {}: getaddrinfo didn't return the canonical " spdlog::error("Error: Unable to add domain to tango host {}: getaddrinfo didn't return the canonical name (result->ai_canonname == nullptr)", tango_host);
"name (result->ai_canonname == nullptr)",
tango_host);
freeaddrinfo(result); freeaddrinfo(result);
return tangoHost(); return tangoHost();
} }
server_name_with_domain = std::string(result->ai_canonname) + tango_host.substr(tango_host.find(':', 0)); server_name_with_domain = string(result->ai_canonname) + tango_host.substr(tango_host.find(':', 0));
freeaddrinfo(result); // all done with this structure freeaddrinfo(result); // all done with this structure
_tango_host_with_domain_cache = server_name_with_domain; _tango_host_with_domain_cache = server_name_with_domain;
...@@ -140,14 +134,14 @@ auto AttributeName::tangoHostWithDomain() -> const std::string & ...@@ -140,14 +134,14 @@ auto AttributeName::tangoHostWithDomain() -> const std::string &
//============================================================================= //=============================================================================
//============================================================================= //=============================================================================
auto AttributeName::fullAttributeName() -> const std::string & const string &AttributeName::fullAttributeName()
{ {
validate(); validate();
if (_full_attribute_name_cache.empty()) if (_full_attribute_name_cache.empty())
{ {
// if tango:// exists on the std::string, strip it off by moving the start in 8 characters // if tango:// exists on the string, strip it off by moving the start in 8 characters
auto start = _fqdn_attr_name.find("tango://") == std::string::npos ? 0 : 8; auto start = _fqdn_attr_name.find("tango://") == string::npos ? 0 : 8;
start = _fqdn_attr_name.find('/', start); start = _fqdn_attr_name.find('/', start);
start++; start++;
_full_attribute_name_cache = _fqdn_attr_name.substr(start); _full_attribute_name_cache = _fqdn_attr_name.substr(start);
...@@ -158,7 +152,7 @@ auto AttributeName::fullAttributeName() -> const std::string & ...@@ -158,7 +152,7 @@ auto AttributeName::fullAttributeName() -> const std::string &
//============================================================================= //=============================================================================
//============================================================================= //=============================================================================
auto AttributeName::domain() -> const std::string & const std::string &AttributeName::domain()
{ {
validate(); validate();
...@@ -170,7 +164,7 @@ auto AttributeName::domain() -> const std::string & ...@@ -170,7 +164,7 @@ auto AttributeName::domain() -> const std::string &
//============================================================================= //=============================================================================
//============================================================================= //=============================================================================
auto AttributeName::family() -> const std::string & const std::string &AttributeName::family()
{ {
validate(); validate();
...@@ -182,7 +176,7 @@ auto AttributeName::family() -> const std::string & ...@@ -182,7 +176,7 @@ auto AttributeName::family() -> const std::string &
//============================================================================= //=============================================================================
//============================================================================= //=============================================================================
auto AttributeName::member() -> const std::string & const std::string &AttributeName::member()
{ {
validate(); validate();
...@@ -194,7 +188,7 @@ auto AttributeName::member() -> const std::string & ...@@ -194,7 +188,7 @@ auto AttributeName::member() -> const std::string &
//============================================================================= //=============================================================================
//============================================================================= //=============================================================================
auto AttributeName::name() -> const std::string & const std::string &AttributeName::name()
{ {
validate(); validate();
...@@ -206,31 +200,31 @@ auto AttributeName::name() -> const std::string & ...@@ -206,31 +200,31 @@ auto AttributeName::name() -> const std::string &
//============================================================================= //=============================================================================
//============================================================================= //=============================================================================
void AttributeName::setDomainFamilyMemberName(const std::string &full_attr_name) void AttributeName::setDomainFamilyMemberName(const string &full_attr_name)
{ {
auto first_slash = full_attr_name.find('/'); auto first_slash = full_attr_name.find('/');
if (first_slash == std::string::npos) if (first_slash == string::npos)
{ {
std::string msg {"Invalid attribute name: " + full_attr_name + ". There is no slash in attribute name"}; string msg {"Invalid attribute name: " + full_attr_name + ". There is no slash in attribute name"};
spdlog::error("Error: {}", msg); spdlog::error("Error: {}", msg);
Tango::Except::throw_exception("Invalid Argument", msg, LOCATION_INFO); Tango::Except::throw_exception("Invalid Argument", msg, LOCATION_INFO);
} }
auto second_slash = full_attr_name.find('/', first_slash + 1); auto second_slash = full_attr_name.find('/', first_slash + 1);
if (second_slash == std::string::npos) if (second_slash == string::npos)
{ {
std::string msg {"Invalid attribute name: " + full_attr_name + ". There is only one slash in attribute name"}; string msg {"Invalid attribute name: " + full_attr_name + ". There is only one slash in attribute name"};
spdlog::error("Error: {}", msg); spdlog::error("Error: {}", msg);
Tango::Except::throw_exception("Invalid Argument", msg, LOCATION_INFO); Tango::Except::throw_exception("Invalid Argument", msg, LOCATION_INFO);
} }
auto third_slash = full_attr_name.find('/', second_slash + 1); auto third_slash = full_attr_name.find('/', second_slash + 1);
if (third_slash == std::string::npos) if (third_slash == string::npos)
{ {
std::string msg {"Invalid attribute name: " + full_attr_name + ". There are only two slashes in attribute name"}; string msg {"Invalid attribute name: " + full_attr_name + ". There are only two slashes in attribute name"};
spdlog::error("Error: {}", msg); spdlog::error("Error: {}", msg);
Tango::Except::throw_exception("Invalid Argument", msg, LOCATION_INFO); Tango::Except::throw_exception("Invalid Argument", msg, LOCATION_INFO);
} }
...@@ -239,35 +233,35 @@ void AttributeName::setDomainFamilyMemberName(const std::string &full_attr_name) ...@@ -239,35 +233,35 @@ void AttributeName::setDomainFamilyMemberName(const std::string &full_attr_name)
if (last_slash != third_slash) if (last_slash != third_slash)
{ {
std::string msg {"Invalid attribute name: " + full_attr_name + ". Too many slashes provided in attribute name"}; string msg {"Invalid attribute name: " + full_attr_name + ". Too many slashes provided in attribute name"};
spdlog::error("Error: {}", msg); spdlog::error("Error: {}", msg);
Tango::Except::throw_exception("Invalid Argument", msg, LOCATION_INFO); Tango::Except::throw_exception("Invalid Argument", msg, LOCATION_INFO);
} }
if (first_slash == 0) if (first_slash == 0)
{ {
std::string msg {"Invalid attribute name: " + full_attr_name + ". Empty domain"}; string msg {"Invalid attribute name: " + full_attr_name + ". Empty domain"};
spdlog::error("Error: {}", msg); spdlog::error("Error: {}", msg);
Tango::Except::throw_exception("Invalid Argument", msg, LOCATION_INFO); Tango::Except::throw_exception("Invalid Argument", msg, LOCATION_INFO);
} }
if (second_slash - first_slash - 1 == 0) if (second_slash - first_slash - 1 == 0)
{ {
std::string msg {"Invalid attribute name: " + full_attr_name + ". Empty family"}; string msg {"Invalid attribute name: " + full_attr_name + ". Empty family"};
spdlog::error("Error: {}", msg); spdlog::error("Error: {}", msg);
Tango::Except::throw_exception("Invalid Argument", msg, LOCATION_INFO); Tango::Except::throw_exception("Invalid Argument", msg, LOCATION_INFO);
} }
if (third_slash - second_slash - 1 == 0) if (third_slash - second_slash - 1 == 0)
{ {
std::string msg {"Invalid attribute name: " + full_attr_name + ". Empty member"}; string msg {"Invalid attribute name: " + full_attr_name + ". Empty member"};
spdlog::error("Error: {}", msg); spdlog::error("Error: {}", msg);
Tango::Except::throw_exception("Invalid Argument", msg, LOCATION_INFO); Tango::Except::throw_exception("Invalid Argument", msg, LOCATION_INFO);
} }
if (third_slash + 1 == full_attr_name.length()) if (third_slash + 1 == full_attr_name.length())
{ {
std::string msg {"Invalid attribute name: " + full_attr_name + ". Empty name"}; string msg {"Invalid attribute name: " + full_attr_name + ". Empty name"};
spdlog::error("Error: {}", msg); spdlog::error("Error: {}", msg);
Tango::Except::throw_exception("Invalid Argument", msg, LOCATION_INFO); Tango::Except::throw_exception("Invalid Argument", msg, LOCATION_INFO);
} }
...@@ -286,7 +280,7 @@ void AttributeName::validate() ...@@ -286,7 +280,7 @@ void AttributeName::validate()
// it means we just tried to execute a complex operation // it means we just tried to execute a complex operation
if (empty()) if (empty())
{ {
std::string msg {"AttributeName is empty."}; string msg {"AttributeName is empty."};
spdlog::error("Failed validation for attribute: {}", msg); spdlog::error("Failed validation for attribute: {}", msg);
Tango::Except::throw_exception("Invalid Argument", msg, LOCATION_INFO); Tango::Except::throw_exception("Invalid Argument", msg, LOCATION_INFO);
} }
...@@ -301,7 +295,7 @@ void AttributeName::print(ostream &os) const ...@@ -301,7 +295,7 @@ void AttributeName::print(ostream &os) const
//============================================================================= //=============================================================================
//============================================================================= //=============================================================================
auto AttributeName::operator=(const AttributeName &other) -> AttributeName & AttributeName &AttributeName::operator=(const AttributeName &other)
{ {
// clear the cache // clear the cache
clear(); clear();
...@@ -313,7 +307,7 @@ auto AttributeName::operator=(const AttributeName &other) -> AttributeName & ...@@ -313,7 +307,7 @@ auto AttributeName::operator=(const AttributeName &other) -> AttributeName &
//============================================================================= //=============================================================================
//============================================================================= //=============================================================================
auto AttributeName::operator=(AttributeName &&other) noexcept -> AttributeName & AttributeName &AttributeName::operator=(AttributeName &&other) noexcept
{ {
// clear the cache // clear the cache
clear(); clear();
......
...@@ -45,49 +45,49 @@ public: ...@@ -45,49 +45,49 @@ public:
AttributeName(const AttributeName &attr_name) { *this = attr_name; } AttributeName(const AttributeName &attr_name) { *this = attr_name; }
AttributeName(const std::string &fqdn_attr_name); AttributeName(const std::string &fqdn_attr_name);
auto fqdnAttributeName() const noexcept -> const std::string & { return _fqdn_attr_name; } const std::string &fqdnAttributeName() const noexcept { return _fqdn_attr_name; }
auto fullAttributeName() -> const std::string &; const std::string &fullAttributeName();
// tango host info // tango host info
auto tangoHost() -> const std::string &; const std::string &tangoHost();
auto tangoHostWithDomain() -> const std::string &; const std::string &tangoHostWithDomain();
// attribute name elements // attribute name elements
auto domain() -> const std::string &; const std::string &domain();
auto family() -> const std::string &; const std::string &family();
auto member() -> const std::string &; const std::string &member();
auto name() -> const std::string &; const std::string &name();
// utility functions // utility functions
void set(const std::string &fqdn_attr_name); void set(const std::string &fqdn_attr_name);
void clear() noexcept; void clear() noexcept;
auto empty() const noexcept -> bool { return _fqdn_attr_name.empty(); } bool empty() const noexcept { return _fqdn_attr_name.empty(); }
void print(std::ostream &os) const; void print(std::ostream &os) const;
auto operator==(const AttributeName &other) -> bool { return _fqdn_attr_name == other._fqdn_attr_name; } bool operator==(const AttributeName &other) const { return _fqdn_attr_name == other._fqdn_attr_name; }
auto operator!=(const AttributeName &other) -> bool { return !(_fqdn_attr_name == other._fqdn_attr_name); } bool operator!=(const AttributeName &other) const { return !(_fqdn_attr_name == other._fqdn_attr_name); }
auto operator=(const AttributeName &other) -> AttributeName &; AttributeName &operator=(const AttributeName &other);
auto operator=(AttributeName &&other) noexcept -> AttributeName &; AttributeName &operator=(AttributeName &&other) noexcept;
private: private:
// extract the full attribute name, i.e. domain/family/member/name // extract the full attribute name, i.e. domain/family/member/name
auto getFullAttributeName(const std::string &fqdn_attr_name) -> std::string; std::string getFullAttributeName(const std::string &fqdn_attr_name);
// takes the fqdn and breaks out the various component parts, such // takes the fqdn and breaks out the various component parts, such
// as domain, family etc // as domain, family etc
void setDomainFamilyMemberName(const std::string &full_attr_name); void setDomainFamilyMemberName(const std::string &full_attr_name);
// combine the local domain and tango host as a std::string // combine the local domain and tango host as a string
auto addDomainToTangoHost(const std::string &tango_host) -> std::string; std::string addDomainToTangoHost(const std::string &tango_host);
// check if the AttributeName is empty before executing a complex // check if the AttributeName is empty before executing a complex
// operation, such as returning the tango host // operation, such as returning the tango host
void validate(); void validate();
// the fully qualified domain name std::string // the fully qualified domain name string
std::string _fqdn_attr_name; std::string _fqdn_attr_name;
// each std::string is a cache, and generated only once to save // each string is a cache, and generated only once to save
// on performance // on performance
std::string _full_attribute_name_cache; std::string _full_attribute_name_cache;
std::string _tango_host_cache; std::string _tango_host_cache;
......
...@@ -25,7 +25,7 @@ namespace hdbpp_internal ...@@ -25,7 +25,7 @@ namespace hdbpp_internal
{ {
//============================================================================= //=============================================================================
//============================================================================= //=============================================================================
auto AttributeTraits::isValid() const noexcept -> bool bool AttributeTraits::isValid() const noexcept
{ {
// ensure all the type information is valid // ensure all the type information is valid
return _attr_write_type != Tango::WT_UNKNOWN && _attr_format != Tango::FMT_UNKNOWN && return _attr_write_type != Tango::WT_UNKNOWN && _attr_format != Tango::FMT_UNKNOWN &&
......
...@@ -48,42 +48,44 @@ public: ...@@ -48,42 +48,44 @@ public:
~AttributeTraits() = default; ~AttributeTraits() = default;
AttributeTraits(Tango::AttrWriteType write_type, Tango::AttrDataFormat format, Tango::CmdArgType data_type) : AttributeTraits(Tango::AttrWriteType write_type, Tango::AttrDataFormat format, Tango::CmdArgType data_type) :
_attr_write_type(write_type), _attr_format(format), _attr_type(data_type) _attr_write_type(write_type),
_attr_format(format),
_attr_type(data_type)
{} {}
// general validation // general validation
auto isValid() const noexcept -> bool; bool isValid() const noexcept;
auto isInvalid() const noexcept -> bool { return !isValid(); } bool isInvalid() const noexcept { return !isValid(); }
// format type information // format type information
auto isArray() const noexcept -> bool { return _attr_format == Tango::SPECTRUM; } bool isArray() const noexcept { return _attr_format == Tango::SPECTRUM; }
auto isScalar() const noexcept -> bool { return _attr_format == Tango::SCALAR; } bool isScalar() const noexcept { return _attr_format == Tango::SCALAR; }
auto isImage() const noexcept -> bool { return _attr_format == Tango::IMAGE; } bool isImage() const noexcept { return _attr_format == Tango::IMAGE; }
// write type information // write type information
auto isReadOnly() const noexcept -> bool { return _attr_write_type == Tango::READ; } bool isReadOnly() const noexcept { return _attr_write_type == Tango::READ; }
auto isWriteOnly() const noexcept -> bool { return _attr_write_type == Tango::WRITE; } bool isWriteOnly() const noexcept { return _attr_write_type == Tango::WRITE; }
auto isReadWrite() const noexcept -> bool { return _attr_write_type == Tango::READ_WRITE; } bool isReadWrite() const noexcept { return _attr_write_type == Tango::READ_WRITE; }
auto isReadWithWrite() const noexcept -> bool { return _attr_write_type == Tango::READ_WITH_WRITE; } bool isReadWithWrite() const noexcept { return _attr_write_type == Tango::READ_WITH_WRITE; }
auto hasReadData() const noexcept -> bool { return isReadOnly() || isReadWrite() || isReadWithWrite(); } bool hasReadData() const noexcept { return isReadOnly() || isReadWrite() || isReadWithWrite(); }
auto hasWriteData() const noexcept -> bool { return isWriteOnly() || isReadWrite() || isReadWithWrite(); } bool hasWriteData() const noexcept { return isWriteOnly() || isReadWrite() || isReadWithWrite(); }
// type access // type access
auto type() const noexcept -> Tango::CmdArgType { return _attr_type; } Tango::CmdArgType type() const noexcept { return _attr_type; }
auto writeType() const noexcept -> Tango::AttrWriteType { return _attr_write_type; } Tango::AttrWriteType writeType() const noexcept { return _attr_write_type; }
auto formatType() const noexcept -> Tango::AttrDataFormat { return _attr_format; } Tango::AttrDataFormat formatType() const noexcept { return _attr_format; }
// various utilities // various utilities
auto operator=(const AttributeTraits &) -> AttributeTraits & = default; AttributeTraits &operator=(const AttributeTraits &) = default;
auto operator=(AttributeTraits &&) -> AttributeTraits & = default; AttributeTraits &operator=(AttributeTraits &&) = default;
auto operator==(const AttributeTraits &other) const -> bool bool operator==(const AttributeTraits &other) const
{ {
return _attr_write_type == other.writeType() && _attr_format == other.formatType() && return _attr_write_type == other.writeType() && _attr_format == other.formatType() &&
_attr_type == other.type(); _attr_type == other.type();
} }
auto operator!=(const AttributeTraits &other) const -> bool { return !(*this == other); } bool operator!=(const AttributeTraits &other) const { return !(*this == other); }
void print(std::ostream &os) const noexcept; void print(std::ostream &os) const noexcept;
......
cmake_minimum_required(VERSION 3.6) cmake_minimum_required(VERSION 3.6)
# source files # source files
set(LOCAL_SRC_FILES set(SRC_FILES ${SRC_FILES}
${CMAKE_CURRENT_SOURCE_DIR}/AttributeName.cpp ${CMAKE_CURRENT_SOURCE_DIR}/AttributeName.cpp
${CMAKE_CURRENT_SOURCE_DIR}/AttributeName.hpp ${CMAKE_CURRENT_SOURCE_DIR}/AttributeName.hpp
${CMAKE_CURRENT_SOURCE_DIR}/AttributeTraits.cpp ${CMAKE_CURRENT_SOURCE_DIR}/AttributeTraits.cpp
${CMAKE_CURRENT_SOURCE_DIR}/HdbppTimescaleDbApi.cpp ${CMAKE_CURRENT_SOURCE_DIR}/HdbppTimescaleDb.cpp
${CMAKE_CURRENT_SOURCE_DIR}/LibUtils.cpp ${CMAKE_CURRENT_SOURCE_DIR}/LibUtils.cpp
${CMAKE_CURRENT_SOURCE_DIR}/DbConnection.cpp ${CMAKE_CURRENT_SOURCE_DIR}/DbConnection.cpp
${CMAKE_CURRENT_SOURCE_DIR}/QueryBuilder.cpp ${CMAKE_CURRENT_SOURCE_DIR}/QueryBuilder.cpp
${CMAKE_CURRENT_SOURCE_DIR}/PqxxExtension.cpp) ${CMAKE_CURRENT_SOURCE_DIR}/PqxxExtension.cpp
if(NOT BYPASS_LIBHDBPP)
set(LOCAL_SRC_FILES ${LOCAL_SRC_FILES}
${CMAKE_CURRENT_SOURCE_DIR}/HdbppTimescaleDb.cpp)
endif()
if(BYPASS_LIBHDBPP)
set(LOCAL_SRC_FILES ${LOCAL_SRC_FILES}
${CMAKE_CURRENT_SOURCE_DIR}/HdbClient.cpp)
endif()
set(SRC_FILES
${SRC_FILES}
${LOCAL_SRC_FILES}
PARENT_SCOPE) PARENT_SCOPE)
\ No newline at end of file
...@@ -44,13 +44,13 @@ namespace pqxx_conn ...@@ -44,13 +44,13 @@ namespace pqxx_conn
// query if the reference has a value, if its not cached it will be // query if the reference has a value, if its not cached it will be
// loaded from the database // loaded from the database
auto valueExists(const TRef &reference) -> bool; bool valueExists(const TRef &reference);
// get the value associated with the reference, throws and exception if it does not // get the value associated with the reference, throws and exception if it does not
// exist either in the cache or database. The caller can check valueExists() // exist either in the cache or database. The caller can check valueExists()
// before calling this function to know if its valid to attempt to return // before calling this function to know if its valid to attempt to return
// the value // the value
auto value(const TRef &reference) -> TValue; TValue value(const TRef &reference);
// cache a value in the internal maps // cache a value in the internal maps
void cacheValue(const TValue &value, const TRef &reference); void cacheValue(const TValue &value, const TRef &reference);
...@@ -60,7 +60,7 @@ namespace pqxx_conn ...@@ -60,7 +60,7 @@ namespace pqxx_conn
// utility functions // utility functions
void clear() noexcept { _values.clear(); } void clear() noexcept { _values.clear(); }
auto size() const noexcept -> int { return _values.size(); } int size() const noexcept { return _values.size(); }
void print(std::ostream &os) const noexcept; void print(std::ostream &os) const noexcept;
private: private:
...@@ -157,7 +157,7 @@ namespace pqxx_conn ...@@ -157,7 +157,7 @@ namespace pqxx_conn
//============================================================================= //=============================================================================
//============================================================================= //=============================================================================
template<typename TValue, typename TRef> template<typename TValue, typename TRef>
auto ColumnCache<TValue, TRef>::valueExists(const TRef &reference) -> bool bool ColumnCache<TValue, TRef>::valueExists(const TRef &reference)
{ {
assert(_conn != nullptr); assert(_conn != nullptr);
...@@ -231,7 +231,7 @@ namespace pqxx_conn ...@@ -231,7 +231,7 @@ namespace pqxx_conn
//============================================================================= //=============================================================================
//============================================================================= //=============================================================================
template<typename TValue, typename TRef> template<typename TValue, typename TRef>
auto ColumnCache<TValue, TRef>::value(const TRef &reference) -> TValue TValue ColumnCache<TValue, TRef>::value(const TRef &reference)
{ {
assert(_conn != nullptr); assert(_conn != nullptr);
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment