diff --git a/CDB/LOFAR_ConfigDb.json b/CDB/LOFAR_ConfigDb.json
index 0bb4ae996caa6c6400f9141b3ebe8ff4d780bef9..07ec1d19cbc03ba3345083ab1743bbf5cb8b1a70 100644
--- a/CDB/LOFAR_ConfigDb.json
+++ b/CDB/LOFAR_ConfigDb.json
@@ -17,20 +17,12 @@
                                     "600000"
                                 ]
                             },
-                            "HBA_element_beamformer_delays_R": {
+                            "CLK_Enable_PWR_R": {
                                 "archive_period": [
                                     "600000"
-                                ],
-                                "archive_rel_change": [
-                                    "-1",
-                                    "1"
-                                ],
-                                "rel_change": [
-                                    "-1",
-                                    "1"
                                 ]
                             },
-                            "HBA_element_beamformer_delays_RW": {
+                            "CLK_I2C_STATUS_R": {
                                 "archive_period": [
                                     "600000"
                                 ],
@@ -43,20 +35,17 @@
                                     "1"
                                 ]
                             },
-                            "HBA_element_led_R": {
+                            "CLK_PLL_error_R": {
                                 "archive_period": [
                                     "600000"
-                                ],
-                                "archive_rel_change": [
-                                    "-1",
-                                    "1"
-                                ],
-                                "rel_change": [
-                                    "-1",
-                                    "1"
                                 ]
                             },
-                            "HBA_element_led_RW": {
+                            "CLK_PLL_locked_R": {
+                                "archive_period": [
+                                    "600000"
+                                ]
+                            },
+                            "CLK_monitor_rate_RW": {
                                 "archive_period": [
                                     "600000"
                                 ],
@@ -69,7 +58,12 @@
                                     "1"
                                 ]
                             },
-                            "HBA_element_pwr_R": {
+                            "CLK_translator_busy_R": {
+                                "archive_period": [
+                                    "600000"
+                                ]
+                            },
+                            "HBA_element_LNA_pwr_R": {
                                 "archive_period": [
                                     "600000"
                                 ],
@@ -82,7 +76,7 @@
                                     "1"
                                 ]
                             },
-                            "HBA_element_pwr_RW": {
+                            "HBA_element_LNA_pwr_RW": {
                                 "archive_period": [
                                     "600000"
                                 ],
@@ -95,7 +89,7 @@
                                     "1"
                                 ]
                             },
-                            "RCU_ADC_CML_R": {
+                            "HBA_element_beamformer_delays_R": {
                                 "archive_period": [
                                     "600000"
                                 ],
@@ -108,7 +102,7 @@
                                     "1"
                                 ]
                             },
-                            "RCU_ADC_JESD_R": {
+                            "HBA_element_beamformer_delays_RW": {
                                 "archive_period": [
                                     "600000"
                                 ],
@@ -121,7 +115,7 @@
                                     "1"
                                 ]
                             },
-                            "RCU_ADC_SYNC_R": {
+                            "HBA_element_led_R": {
                                 "archive_period": [
                                     "600000"
                                 ],
@@ -134,7 +128,7 @@
                                     "1"
                                 ]
                             },
-                            "RCU_ADC_lock_R": {
+                            "HBA_element_led_RW": {
                                 "archive_period": [
                                     "600000"
                                 ],
@@ -147,7 +141,7 @@
                                     "1"
                                 ]
                             },
-                            "RCU_ID_R": {
+                            "HBA_element_pwr_R": {
                                 "archive_period": [
                                     "600000"
                                 ],
@@ -160,7 +154,7 @@
                                     "1"
                                 ]
                             },
-                            "RCU_LED0_R": {
+                            "HBA_element_pwr_RW": {
                                 "archive_period": [
                                     "600000"
                                 ],
@@ -173,7 +167,7 @@
                                     "1"
                                 ]
                             },
-                            "RCU_LED0_RW": {
+                            "RCU_ADC_lock_R": {
                                 "archive_period": [
                                     "600000"
                                 ],
@@ -186,7 +180,7 @@
                                     "1"
                                 ]
                             },
-                            "RCU_OUT1_R": {
+                            "RCU_I2C_STATUS_R": {
                                 "archive_period": [
                                     "600000"
                                 ],
@@ -199,7 +193,7 @@
                                     "1"
                                 ]
                             },
-                            "RCU_OUT2_R": {
+                            "RCU_ID_R": {
                                 "archive_period": [
                                     "600000"
                                 ],
@@ -212,17 +206,29 @@
                                     "1"
                                 ]
                             },
+                            "RCU_LED0_R": {
+                                "archive_period": [
+                                    "600000"
+                                ]
+                            },
+                            "RCU_LED0_RW": {
+                                "archive_period": [
+                                    "600000"
+                                ]
+                            },
+                            "RCU_LED1_R": {
+                                "archive_period": [
+                                    "600000"
+                                ]
+                            },
+                            "RCU_LED1_RW": {
+                                "archive_period": [
+                                    "600000"
+                                ]
+                            },
                             "RCU_Pwr_dig_R": {
                                 "archive_period": [
                                     "600000"
-                                ],
-                                "archive_rel_change": [
-                                    "-1",
-                                    "1"
-                                ],
-                                "rel_change": [
-                                    "-1",
-                                    "1"
                                 ]
                             },
                             "RCU_attenuator_R": {
@@ -295,11 +301,6 @@
                                     "1.0"
                                 ]
                             },
-                            "RCU_state_R": {
-                                "archive_period": [
-                                    "600000"
-                                ]
-                            },
                             "RCU_temperature_R": {
                                 "archive_period": [
                                     "600000"
@@ -313,6 +314,11 @@
                                     "1.0"
                                 ]
                             },
+                            "RCU_translator_busy_R": {
+                                "archive_period": [
+                                    "600000"
+                                ]
+                            },
                             "RCU_version_R": {
                                 "archive_period": [
                                     "600000"
@@ -333,19 +339,6 @@
                                 "event_period": [
                                     "0"
                                 ]
-                            },
-                            "uC_ID_R": {
-                                "archive_period": [
-                                    "600000"
-                                ],
-                                "archive_rel_change": [
-                                    "-1",
-                                    "1"
-                                ],
-                                "rel_change": [
-                                    "-1",
-                                    "1"
-                                ]
                             }
                         },
                         "properties": {
@@ -365,14 +358,8 @@
                                 "1000",
                                 "ant_mask_rw",
                                 "1000",
-                                "rcu_adc_cml_r",
-                                "1000",
-                                "rcu_adc_jesd_r",
-                                "1000",
                                 "rcu_adc_lock_r",
                                 "1000",
-                                "rcu_adc_sync_r",
-                                "1000",
                                 "rcu_attenuator_r",
                                 "1000",
                                 "rcu_attenuator_rw",
@@ -391,10 +378,6 @@
                                 "1000",
                                 "rcu_monitor_rate_rw",
                                 "1000",
-                                "rcu_out1_r",
-                                "1000",
-                                "rcu_out2_r",
-                                "1000",
                                 "rcu_pwr_dig_r",
                                 "1000",
                                 "rcu_temperature_r",
@@ -413,9 +396,29 @@
                                 "1000",
                                 "hba_element_pwr_rw",
                                 "1000",
-                                "rcu_state_r",
+                                "clk_enable_pwr_r",
+                                "1000",
+                                "clk_i2c_status_r",
+                                "1000",
+                                "clk_monitor_rate_rw",
+                                "1000",
+                                "clk_pll_error_r",
+                                "1000",
+                                "clk_pll_locked_r",
+                                "1000",
+                                "clk_translator_busy_r",
+                                "1000",
+                                "hba_element_lna_pwr_r",
+                                "1000",
+                                "hba_element_lna_pwr_rw",
+                                "1000",
+                                "rcu_i2c_status_r",
+                                "1000",
+                                "rcu_led1_r",
+                                "1000",
+                                "rcu_led1_rw",
                                 "1000",
-                                "uc_id_r",
+                                "rcu_translator_busy_r",
                                 "1000"
                             ]
                         }
diff --git a/CDB/thijs_ConfigDb.json b/CDB/thijs_ConfigDb.json
new file mode 100644
index 0000000000000000000000000000000000000000..b7e508732bf03b8919683b3cf2e34a52765a5ca3
--- /dev/null
+++ b/CDB/thijs_ConfigDb.json
@@ -0,0 +1,140 @@
+{
+    "servers": {
+        "PCC": {
+            "1": {
+                "PCC": {
+                    "LTS/PCC/1": {
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "host.docker.internal"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+        "SDP": {
+            "1": {
+                "SDP": {
+                    "LTS/SDP/1": {
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "dop36.astron.nl"
+                            ],
+                            "OPC_Server_Port": [
+                                "4840"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+        "example_device": {
+            "1": {
+                "example_device": {
+                    "LTS/example_device/1": {
+                         "attribute_properties": {
+                            "Ant_mask_RW": {
+                                "archive_period": [
+                                    "600000"
+                                ]
+                            }
+						},
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "host.docker.internal"
+                            ],
+                            "OPC_Server_Port": [
+                                "4842"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+        "ini_device": {
+            "1": {
+                "ini_device": {
+                    "LTS/ini_device/1": {
+                         "attribute_properties": {
+                            "Ant_mask_RW": {
+                                "archive_period": [
+                                    "600000"
+                                ]
+                            }
+						},
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "host.docker.internal"
+                            ],
+                            "OPC_Server_Port": [
+                                "4844"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+        "APSCTL": {
+            "1": {
+                "APSCTL": {
+                    "LTS/APSCTL/1": {
+                         "attribute_properties": {
+                            "Ant_mask_RW": {
+                                "archive_period": [
+                                    "600000"
+                                ]
+                            }
+						},
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "ltspi.astron.nl"
+                            ],
+                            "OPC_Server_Port": [
+                                "4844"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+        "SNMP": {
+            "1": {
+                "SNMP": {
+                    "LTS/SNMP/1": {
+                         "attribute_properties": {
+                            "Ant_mask_RW": {
+                                "archive_period": [
+                                    "600000"
+                                ]
+                            }
+						},
+                        "properties": {
+                            "SNMP_community": [
+                                "public"
+                            ],
+                            "SNMP_host": [
+                                "192.168.178.17"
+                            ],
+                            "SNMP_timeout": [
+                                "5.0"
+                            ]
+                        }
+                    }
+                }
+            }
+        }
+    }
+}
diff --git a/bootstrap/bin/updatePythonEnv.sh b/bootstrap/bin/updatePythonEnv.sh
new file mode 100755
index 0000000000000000000000000000000000000000..0f0d1becdcc2db08a6a7c3f93682df63124a9c67
--- /dev/null
+++ b/bootstrap/bin/updatePythonEnv.sh
@@ -0,0 +1,36 @@
+#! /usr/bin/env bash
+
+venv=${VIRTUAL_ENV:?This is currently not a Python3 venv!  Will not upgrade venv packages.}
+echo -e "\nFound a Python3 venv in \"${VIRTUAL_ENV}\".\nWill now proceed with package upgrades.\n"
+
+pip="python3 -m pip"
+upgrade_command="${pip} install --upgrade"
+
+if [ -z ${OS} ]; then
+    OS=$(uname)
+fi
+if [ ${OS} = Darwin ]; then
+    find=gfind
+else
+    find=find
+fi
+
+function upgrade()
+{
+    if [ ${#} -ge 1 ]; then
+        echo "Upgrading ${@}..."
+        ${upgrade_command} ${@}
+        if [ ${?} -eq 0 ]; then
+            echo -e "\nUpgrading ${@} done.\n\nNow \"deactivate\" and \"source ${venv}/bin/activate\".\n"
+        else
+            echo -e "\nSomething went wrong during the upgrade.\nCheck the output!\n"
+        fi
+    else
+        echo -e "\nNo package for upgrading given.\n"
+    fi
+}
+
+upgrade pip wheel
+
+installed_packages=$(for i in $(python3 -m pip freeze | cut -d'=' -f1); do printf "%s " ${i}; done)
+upgrade ${installed_packages}
diff --git a/bootstrap/etc/lofar20rc.sh b/bootstrap/etc/lofar20rc.sh
new file mode 100755
index 0000000000000000000000000000000000000000..e3c75df48457342a6ba9d8e061a322e324ee17c4
--- /dev/null
+++ b/bootstrap/etc/lofar20rc.sh
@@ -0,0 +1,56 @@
+# Set up the LOFAR2.0 environment.
+# For the time being it is assumend that the LOFAR2.0 environment has to
+# co-exist with a LOFAR1 environment.
+
+# Set these for the host where you run SKA's Tango Docker images.
+# And export those directories for LOFAR in Tango Docker images.
+
+# Pass a directory as first parameter to this script.  This will
+# then be used as LOFAR20_DIR.  Otherwise the current directory will
+# be used.
+export LOFAR20_DIR=${1:-${PWD}}
+
+# This needs to be modified for a development environment.
+# Example:  ~/lofar2.0/tango
+# The current setting is for a production environment.
+export TANGO_LOFAR_LOCAL_DIR=${LOFAR20_DIR}/
+
+export TANGO_LOFAR_CONTAINER_DIR=${LOFAR20_DIR}/
+export TANGO_LOFAR_CONTAINER_MOUNT=${TANGO_LOFAR_LOCAL_DIR}:${TANGO_LOFAR_CONTAINER_DIR}:rw
+
+# This needs to be modified for a development environment.
+# In case you run multiple Docker networks on the same host in parallel, you need to specify a unique
+# network name for each of them.
+export NETWORK_MODE=lofar
+
+# It is assumed that the Tango host, the computer that runs the TangoDB, is this host.
+# If this is not true, then modify to the Tango host's FQDN and port.
+# Example:  export TANGO_HOST=station-xk25.astron.nl:10000
+export TANGO_HOST=$(hostname):10000
+
+
+#
+# NO MODIFICATION BEYOND THIS POINT!
+#
+
+# Provide the -v parameters for Docker and the -e ENV variables.
+export TANGO_CONTAINER_ENV="-e TANGO_LOFAR_CONTAINER_DIR=${TANGO_LOFAR_CONTAINER_DIR}"
+
+# Remove all LOFAR1 related environment modifications
+function remove_lofar()
+{
+    tmp=${1//:/ }
+    echo "$(for new in $(for i in ${tmp}; do printf "%s\n" ${i}; done | egrep -v '/opt/lofar/|/opt/WinCC|/opt/stationtest|/opt/operations'); do printf "%s:" ${new}; done)"
+}
+
+unset LOFARROOT
+export PATH=$(remove_lofar ${PATH})
+export LD_LIBRARY_PATH=$(remove_lofar ${LD_LIBRARY_PATH})
+export PYTHON_PATH=$(remove_lofar ${PYTHON_PATH})
+
+
+# Allow read access for everybody to allow Docker the forwarding of X11.
+chmod a+r ~/.Xauthority
+
+# Source the LOFAR2.0 Python3 venv if it exists.
+[ -z ${VIRTUAL_ENV} ] && [ -d ${LOFAR20_DIR}/lofar2.0_venv ] && source ${LOFAR20_DIR}/lofar2.0_venv/bin/activate
diff --git a/bootstrap/etc/requirements.txt b/bootstrap/etc/requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..5502737a6308c9939be7a2fa4981707f965918ac
--- /dev/null
+++ b/bootstrap/etc/requirements.txt
@@ -0,0 +1,9 @@
+astropy
+jupyter
+matplotlib
+numpy
+opcua-client
+pyqtgraph
+PyQt5
+asyncua
+dataclasses
diff --git a/bootstrap/sbin/checkout_shallow_copy_lofar_repos.sh b/bootstrap/sbin/checkout_shallow_copy_lofar_repos.sh
new file mode 100755
index 0000000000000000000000000000000000000000..eec4919f44e0b2c36faae7ebc796b4ce41c12315
--- /dev/null
+++ b/bootstrap/sbin/checkout_shallow_copy_lofar_repos.sh
@@ -0,0 +1,43 @@
+#! /usr/bin/env bash
+
+# Clean out local dirs and then clone a shallow copy
+# of LOFAR2.0 repos.
+
+# 2020-10-13, thomas
+# Currently correct.
+astron_gitlab=https://git.astron.nl/
+repo_dir=lofar2.0/
+branch=master
+
+for repo in lmc-base-classes ska-logging ska-docker opcua tango pypcc; do
+    cd ${repo}
+    # This will return 2 if the repo is clean.
+    clean="$(git status -u | wc -l)"
+    cd -
+    if [ "${clean}" = "2" ]; then
+        # The repo is clean, no new files, no modifications.
+        rm -rf ${repo}
+        git clone --depth 1 --branch ${branch} ${astron_gitlab}${repo_dir}${repo}.git
+    else
+        echo -e "*********\n\tThe repository ${repo} contains modifications.\n\tRestore the original content first before continuing.\nAborting now.\n"
+        exit -1
+    fi
+done
+
+# 2020-10-13, thomas
+# I need to move this repo to lofar2.0.
+repo=crossecho
+repo_dir=jurges
+rm -rf crossecho && git clone --depth 1 --branch add_simulation_mode_plus_patches ${astron_gitlab}${repo_dir}/${repo}.git
+
+if [ ! -s ska_logging -o $(readlink ska_logging) != ska-logging ]; then
+    echo "Creating symlink ska-logging -> ska_logging"
+    rm -f ska_logging
+    ln -s ska-logging ska_logging
+fi
+
+if [ ! -s skabase -o $(readlink skabase) != lmc-base-classes ]; then
+    echo "Creating symlink lmc-base-classes -> skabase"
+    rm -f skabase
+    ln -s lmc-base-classes skabase
+fi
diff --git a/bootstrap/sbin/delete_all_docker_images.sh b/bootstrap/sbin/delete_all_docker_images.sh
new file mode 100755
index 0000000000000000000000000000000000000000..7cccb90c3ede668f9e97eeb9956eac82176ac9a9
--- /dev/null
+++ b/bootstrap/sbin/delete_all_docker_images.sh
@@ -0,0 +1,31 @@
+#! /usr/bin/env bash
+
+function help()
+{
+    echo "You need to add a parameter \"YES_DELETE_ALL\" in order to really remove all Docker images."
+    exit 0
+}
+
+if [ ${#} -ne 1 ]; then
+    help
+elif [ "${1}" != "YES_DELETE_ALL" ]; then
+    help
+fi
+
+read -p "If you certain that you want to delete all Docker images, then enter now \"YES_DO_IT\" " reply
+if [ "${reply}" != "YES_DO_IT" ]; then
+    echo "You really need to enter \"YES_DO_IT\" in order to confirm."
+    exit 0
+else
+    images="$(for i in $(docker images | egrep -v "REPOSITORY" | awk '{printf "%s:%s\n", $1, $2}'); do printf "%s " ${i}; done)"
+    if [ ${#images} -eq 0 ]; then
+        echo "There are no images to delete."
+    else
+        echo -e "Will now delete the following Docker images:"
+        for image in ${images}; do
+            printf "\t%s\n" "${image}"
+        done
+        docker rmi ${images}
+    fi
+fi
+
diff --git a/bootstrap/sbin/rebuild_system_from_scratch.sh b/bootstrap/sbin/rebuild_system_from_scratch.sh
new file mode 100755
index 0000000000000000000000000000000000000000..8840c5a11d1863abe2e94cdda6268b9801d14d10
--- /dev/null
+++ b/bootstrap/sbin/rebuild_system_from_scratch.sh
@@ -0,0 +1,152 @@
+#! /usr/bin/env bash -e
+
+HOME_DIR=${LOFAR20_DIR:-${PWD}}
+if [ ! -d ${HOME_DIR}/bootstrap ]; then
+    # HOME_DIR/bootstrap needs to exist for this script to work.
+    echo -e "$(basename ${0}):\n\nERROR\n\tCannot run this script because the \"bootstrap\" cannot be found!\n\n"
+    exit 1
+fi
+
+# Save for the moment when tango will be cloned.
+OLD_HOME_DIR=${HOME_DIR}.old
+
+trap ' exit ${?} ' ABRT EXIT HUP INT TERM QUIT ERR
+
+
+function pull_images()
+{
+    (cd ${HOME_DIR}/docker-compose && make pull)
+}
+
+function build_lofar_images()
+{
+    (cd ${HOME_DIR}/docker-compose && make build)
+}
+
+function move_tango_dir_out_of_the_way()
+{
+    mv ${HOME_DIR} ${OLD_HOME_DIR}
+}
+
+function remove_images()
+{
+    ${HOME_DIR}/bootstrap/sbin/delete_all_docker_images.sh YES_DELETE_ALL
+}
+
+function pull_tango()
+{
+    git clone https://git.astron.nl/lofar2.0/tango.git ${HOME_DIR}
+    # Now remove the old tango dir.
+    rm -rf ${OLD_HOME_DIR}
+}
+
+function clean_images()
+{
+    # This can return a non-0 return code if the system
+    # has already been shut down.
+    # Therefore disable Bash's exit on error flag
+    set +e
+    (cd ${HOME_DIR}/docker-compose && make clean)
+    # And enable it again.
+    set -e
+}
+
+function start_minimal_tango()
+{
+    (cd ${HOME_DIR}/docker-compose
+    make minimal
+    # There is an issue with the dsconfig container that it every
+    # other time just exits without telling why.
+    # Therefore start dsconfig, then wait for 10s to allow it
+    # to die and just start it again.
+    # Don't they say that the second time is always a charm?
+    make start dsconfig
+    echo -e "\tWaiting for dsconfig to settle down..."
+    sleep 10
+    echo -e "\tDone.\n\tStarting dsconfig again."
+    make start dsconfig)
+}
+
+function configure_tango_db()
+{
+    # This somehow returns with a non-0 return code, make Bash happy.
+    ${HOME_DIR}/sbin/update_ConfigDb.sh ${HOME_DIR}/CDB/LOFAR_ConfigDb.json || true
+}
+
+function configure_elk()
+{
+    (cd ${HOME_DIR}/docker-compose && make start elk-configure-host)
+}
+
+function start_support_images()
+{
+    (cd ${HOME_DIR}/docker-compose && make start elk
+    make start jupyter)
+}
+
+function start_lofar_images()
+{
+    (cd ${HOME_DIR}/docker-compose
+    make start device-pcc
+    make start device-sdp)
+}
+
+
+# Clean out the Docker volumes.
+echo "-> Stopping Docker images and cleaning cleaning up their volumes..."
+clean_images
+echo -e "\tDone.\n"
+
+# Move the tango repo dir out of the way.
+echo "-> Moving the tango directory ${HOME_DIR} out of the way..."
+move_tango_dir_out_of_the_way
+echo -e "\tDone.\n"
+
+# Now pull the repo.
+echo "-> Cloning tango's master branch to ${HOME_DIR}..."
+pull_tango
+echo -e "\tDone.\n"
+
+# Now clean out the docker images.
+echo "-> Deleting all Docker images from this host..."
+remove_images
+echo -e "\tDone.\n"
+
+# Pull SKA's Docker images.
+echo "-> Installing latest SKA Docker images on this host..."
+pull_images
+echo -e "\tDone.\n"
+
+# Build all of the LOFAR Docker images.
+echo "-> Building LOFAR2.0 Docker images..."
+build_lofar_images
+echo -e "\tDone.\n"
+
+# Now start the basic Tango system, including dsconfig.
+echo "-> Staring up a minimal Tango session..."
+start_minimal_tango
+echo -e "\tDone.\n"
+
+# Load LOFAR's TangoDB.
+echo "-> Configuring the Tango DB for LOFAR2.0..."
+configure_tango_db
+echo -e "\tDone.\n"
+
+# Now configure the ELK container.
+echo "-> Configuring the ELK container for this host..."
+configure_elk
+echo -e "\tDone.\n"
+
+# Here I start ELK & Jupyter.
+echo "-> Start LOFAR2.0 support containers (ELK, Jupyter, etc.)..."
+start_support_images
+echo -e "\tDone.\n"
+
+# And finally start all the LOFAR images.
+echo "-> Start LOFAR2.0 containers..."
+start_lofar_images
+echo -e "\tDone.\n"
+
+# Now the system should be ready to use.
+# For instance the cold start script could now be executed.
+echo -e "-> LOFAR2.0 system rebuild was successful.\n\n"
diff --git a/devices/APSCTL.py b/devices/APSCTL.py
index e4c4dd38eaa9ab3aa18665093275749a54f3d94f..b9d47680875422b60bc124917e342dbf177d7ac2 100644
--- a/devices/APSCTL.py
+++ b/devices/APSCTL.py
@@ -13,20 +13,22 @@
 
 # PyTango imports
 from tango.server import run
-from tango.server import device_property
+from tango.server import device_property, attribute
 from tango import AttrWriteType
-
-#attribute extention and hardware device imports
-from src.attribute_wrapper import attribute_wrapper
-from src.hardware_device import hardware_device
-import numpy
 # Additional import
 
 from clients.opcua_connection import OPCUAConnection
+from util.attribute_wrapper import attribute_wrapper
+from util.hardware_device import hardware_device
 
+from util.lofar_logging import device_logging_to_python, log_exceptions
+from util.lofar_git import get_version
+
+import numpy
 
 __all__ = ["APSCTL", "main"]
 
+@device_logging_to_python({"device": "APSCTL"})
 class APSCTL(hardware_device):
     """
 
@@ -63,76 +65,76 @@ class APSCTL(hardware_device):
     # ----------
     # Attributes
     # ----------
+
+    version_R = attribute(dtype=str, access=AttrWriteType.READ, fget=lambda self: get_version())
+
     N_unb = 2
     N_fpga = 4
     N_ddr = 2
     N_qsfp = 6
 
-
     # Central CP per Uniboard
-    UNB2_Power_ON_OFF_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_Power_ON_OFF_RW"], datatype=numpy.bool_, dims=(N_unb,), access=AttrWriteType.READ_WRITE)
+    UNB2_FPGA_DDR4_SLOT_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_DDR4_SLOT_TEMP_R"], datatype=numpy.double, dims=((N_unb * N_ddr), N_fpga))
+    UNB2_I2C_bus_QSFP_STATUS_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_I2C_bus_QSFP_STATUS_R"], datatype=numpy.int64, dims=((N_unb * N_fpga), N_qsfp))
+    UNB2_I2C_bus_DDR4_STATUS_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_I2C_bus_DDR4_STATUS_R"], datatype=numpy.int64, dims=(N_ddr, N_fpga))
+    UNB2_I2C_bus_FPGA_PS_STATUS_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_I2C_bus_FPGA_PS_STATUS_R"], datatype=numpy.int64, dims=(N_unb * N_fpga,))
+    UNB2_translator_busy_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_translator_busy_R"], datatype=numpy.bool_)
+
     UNB2_Front_Panel_LED_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_Front_Panel_LED_RW"], datatype=numpy.uint8, dims=(N_unb,), access=AttrWriteType.READ_WRITE)
-    UNB2_Mask_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_Mask_RW"], datatype=numpy.bool_, dims=(N_unb,), access=AttrWriteType.READ_WRITE)
-    # Central MP per Uniboard
-    UNB2_I2C_bus_OK_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_I2C_bus_OK_R"], datatype=numpy.bool_, dims=(N_unb,))
     UNB2_Front_Panel_LED_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_Front_Panel_LED_R"], datatype=numpy.uint8, dims=(N_unb,))
     UNB2_EEPROM_Serial_Number_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_EEPROM_Serial_Number_R"], datatype=numpy.str, dims=(N_unb,))
     UNB2_EEPROM_Unique_ID_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_EEPROM_Unique_ID_R"], datatype=numpy.uint32, dims=(N_unb,))
-    UNB2_DC_DC_48V_12V_VIN_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_DC_DC_48V_12V_VIN_R"], datatype=numpy.double, dims=(N_unb,))
-    UNB2_DC_DC_48V_12V_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_DC_DC_48V_12V_VOUT_R"], datatype=numpy.double, dims=(N_unb,))
-    UNB2_DC_DC_48V_12V_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_DC_DC_48V_12V_IOUT_R"], datatype=numpy.double, dims=(N_unb,))
-    UNB2_DC_DC_48V_12V_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_DC_DC_48V_12V_TEMP_R"], datatype=numpy.double, dims=(N_unb,))
-    UNB2_POL_QSFP_N01_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_QSFP_N01_VOUT_R"], datatype=numpy.double, dims=(N_unb,))
-    UNB2_POL_QSFP_N01_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_QSFP_N01_IOUT_R"], datatype=numpy.double, dims=(N_unb,))
-    UNB2_POL_QSFP_N01_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_QSFP_N01_TEMP_R"], datatype=numpy.double, dims=(N_unb,))
-    UNB2_POL_QSFP_N23_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_QSFP_N23_VOUT_R"], datatype=numpy.double, dims=(N_unb,))
-    UNB2_POL_QSFP_N23_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_QSFP_N23_IOUT_R"], datatype=numpy.double, dims=(N_unb,))
-    UNB2_POL_QSFP_N23_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_QSFP_N23_TEMP_R"], datatype=numpy.double, dims=(N_unb,))
+    UNB2_FPGA_DDR4_SLOT_PART_NUMBER_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_DDR4_SLOT_PART_NUMBER_R"], datatype=numpy.str, dims=(N_unb * N_qsfp, N_fpga))
+    UNB2_monitor_rate_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_monitor_rate_RW"], datatype=numpy.double, dims=(N_unb,), access=AttrWriteType.READ_WRITE)
+    UNB2_I2C_bus_STATUS_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_I2C_bus_STATUS_R"], datatype=numpy.double, dims=(N_unb,))
+    UNB2_I2C_bus_PS_STATUS_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_I2C_bus_PS_STATUS_R"], datatype=numpy.double, dims=(N_unb,))
+    UNB2_mask_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_mask_RW"], datatype=numpy.double, dims=(N_unb,), access=AttrWriteType.READ_WRITE)
+    UNB2_Power_ON_OFF_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_Power_ON_OFF_R"], datatype=numpy.double, dims=(N_unb,), access=AttrWriteType.READ_WRITE)
+
+    UNB2_FPGA_QSFP_CAGE_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_TEMP_R"], datatype=numpy.double, dims=(N_unb * N_qsfp,N_fpga))
+    UNB2_FPGA_QSFP_CAGE_LOS_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_LOS_R"], datatype=numpy.uint8, dims=(N_unb * N_qsfp,N_fpga))
+    UNB2_FPGA_POL_HGXB_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_HGXB_VOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
+    UNB2_FPGA_POL_HGXB_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_HGXB_IOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
+    UNB2_FPGA_POL_HGXB_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_HGXB_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
+    UNB2_FPGA_POL_PGM_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_PGM_VOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
+    UNB2_FPGA_POL_PGM_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_PGM_IOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
+    UNB2_FPGA_POL_PGM_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_PGM_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
+    UNB2_FPGA_POL_RXGXB_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_RXGXB_VOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
+    UNB2_FPGA_POL_RXGXB_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_RXGXB_IOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
+    UNB2_FPGA_POL_RXGXB_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_RXGXB_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
+    UNB2_FPGA_POL_TXGXB_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_TXGXB_VOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
+    UNB2_FPGA_POL_TXGXB_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_TXGXB_IOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
+    UNB2_POL_FPGA_TXGXB_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_FPGA_TXGXB_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
+    UNB2_POL_FPGA_CORE_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_FPGA_CORE_VOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
+    UNB2_FPGA_POL_CORE_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_CORE_IOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
+    UNB2_FPGA_POL_CORE_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_CORE_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
+    UNB2_FPGA_POL_ERAM_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_ERAM_VOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
+    UNB2_FPGA_POL_ERAM_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_ERAM_IOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
+    UNB2_FPGA_POL_ERAM_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_ERAM_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
+    UNB2_POL_CLOCK_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_CLOCK_VOUT_R"], datatype=numpy.double, dims=(N_unb,))
+    UNB2_POL_CLOCK_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_CLOCK_IOUT_R"], datatype=numpy.double, dims=(N_unb,))
+    UNB2_POL_CLOCK_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_CLOCK_TEMP_R"], datatype=numpy.double, dims=(N_unb,))
     UNB2_POL_SWITCH_1V2_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_SWITCH_1V2_VOUT_R"], datatype=numpy.double, dims=(N_unb,))
     UNB2_POL_SWITCH_1V2_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_SWITCH_1V2_IOUT_R"], datatype=numpy.double, dims=(N_unb,))
     UNB2_POL_SWITCH_1V2_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_SWITCH_1V2_TEMP_R"], datatype=numpy.double, dims=(N_unb,))
     UNB2_POL_SWITCH_PHY_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_SWITCH_PHY_VOUT_R"], datatype=numpy.double, dims=(N_unb,))
     UNB2_POL_SWITCH_PHY_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_SWITCH_PHY_IOUT_R"], datatype=numpy.double, dims=(N_unb,))
     UNB2_POL_SWITCH_PHY_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_SWITCH_PHY_TEMP_R"], datatype=numpy.double, dims=(N_unb,))
-    UNB2_POL_CLOCK_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_CLOCK_VOUT_R"], datatype=numpy.double, dims=(N_unb,))
-    UNB2_POL_CLOCK_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_CLOCK_IOUT_R"], datatype=numpy.double, dims=(N_unb,))
-    UNB2_POL_CLOCK_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_CLOCK_TEMP_R"], datatype=numpy.double, dims=(N_unb,))
-
-    # monitor points per FPGA
-    UNB2_FPGA_DDR4_SLOT_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_DDR4_SLOT_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
-    UNB2_FPGA_DDR4_SLOT_PART_NUMBER_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_DDR4_SLOT_PART_NUMBER_R"], datatype=numpy.str, dims=(N_unb * N_qsfp,N_fpga))
-    UNB2_FPGA_QSFP_CAGE_0_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_0_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
-    UNB2_FPGA_QSFP_CAGE_1_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_1_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
-    UNB2_FPGA_QSFP_CAGE_2_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_2_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
-    UNB2_FPGA_QSFP_CAGE_3_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_3_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
-    UNB2_FPGA_QSFP_CAGE_4_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_4_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
-    UNB2_FPGA_QSFP_CAGE_5_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_5_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
-    UNB2_FPGA_QSFP_CAGE_0_LOS_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_0_LOS_R"], datatype=numpy.uint8, dims=(N_unb,N_fpga))
-    UNB2_FPGA_QSFP_CAGE_1_LOS_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_1_LOS_R"], datatype=numpy.uint8, dims=(N_unb,N_fpga))
-    UNB2_FPGA_QSFP_CAGE_2_LOS_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_2_LOS_R"], datatype=numpy.uint8, dims=(N_unb,N_fpga))
-    UNB2_FPGA_QSFP_CAGE_3_LOS_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_3_LOS_R"], datatype=numpy.uint8, dims=(N_unb,N_fpga))
-    UNB2_FPGA_QSFP_CAGE_4_LOS_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_4_LOS_R"], datatype=numpy.uint8, dims=(N_unb,N_fpga))
-    UNB2_FPGA_QSFP_CAGE_5_LOS_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_5_LOS_R"], datatype=numpy.uint8, dims=(N_unb,N_fpga))
-    UNB2_FPGA_POL_CORE_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_CORE_VOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
-    UNB2_FPGA_POL_CORE_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_CORE_IOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
-    UNB2_FPGA_POL_CORE_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_CORE_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
-    UNB2_FPGA_POL_ERAM_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_ERAM_VOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
-    UNB2_FPGA_POL_ERAM_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_ERAM_IOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
-    UNB2_FPGA_POL_ERAM_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_ERAM_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
-    UNB2_FPGA_POL_RXGXB_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_RXGXB_VOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
-    UNB2_FPGA_POL_RXGXB_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_RXGXB_IOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
-    UNB2_FPGA_POL_RXGXB_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_RXGXB_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
-    UNB2_FPGA_POL_TXGXB_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_TXGXB_VOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
-    UNB2_FPGA_POL_TXGXB_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_TXGXB_IOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
-    UNB2_FPGA_POL_TXGXB_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_TXGXB_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
-    UNB2_FPGA_POL_HGXB_VOUT_R = attribute_wrapper(comms_annotation=["2:UNB2_FPGA_POL_HGXB_VOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
-    UNB2_FPGA_POL_HGXB_IOUT_R = attribute_wrapper(comms_annotation=["2:UNB2_FPGA_POL_HGXB_IOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
-    UNB2_FPGA_POL_HGXB_TEMP_R = attribute_wrapper(comms_annotation=["2:UNB2_FPGA_POL_HGXB_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
-    UNB2_FPGA_POL_PGM_VOUT_R = attribute_wrapper(comms_annotation=["2:UNB2_FPGA_POL_PGM_VOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
-    UNB2_FPGA_POL_PGM_IOUT_R = attribute_wrapper(comms_annotation=["2:UNB2_FPGA_POL_PGM_IOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
-    UNB2_FPGA_POL_PGM_TEMP_R = attribute_wrapper(comms_annotation=["2:UNB2_FPGA_POL_PGM_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga))
+    UNB2_POL_QSFP_N01_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_QSFP_N01_VOUT_R"], datatype=numpy.double, dims=(N_unb,))
+    UNB2_POL_QSFP_N01_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_QSFP_N01_IOUT_R"], datatype=numpy.double, dims=(N_unb,))
+    UNB2_POL_QSFP_N01_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_QSFP_N01_TEMP_R"], datatype=numpy.double, dims=(N_unb,))
+    UNB2_POL_QSFP_N23_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_QSFP_N23_VOUT_R"], datatype=numpy.double, dims=(N_unb,))
+    UNB2_POL_QSFP_N23_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_QSFP_N23_IOUT_R"], datatype=numpy.double, dims=(N_unb,))
+    UNB2_POL_QSFP_N23_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_QSFP_N23_TEMP_R"], datatype=numpy.double, dims=(N_unb,))
+    UNB2_DC_DC_48V_12V_VIN_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_DC_DC_48V_12V_VIN_R"], datatype=numpy.double, dims=(N_unb,))
+    UNB2_DC_DC_48V_12V_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_DC_DC_48V_12V_VOUT_R"], datatype=numpy.double, dims=(N_unb,))
+    UNB2_DC_DC_48V_12V_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_DC_DC_48V_12V_IOUT_R"], datatype=numpy.double, dims=(N_unb,))
+    UNB2_DC_DC_48V_12V_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_DC_DC_48V_12V_TEMP_R"], datatype=numpy.double, dims=(N_unb,))
 
 
+    # QualifiedName(2: UNB2_on)
+    # QualifiedName(2: UNB2_off)
+    @log_exceptions()
     def delete_device(self):
         """Hook to delete resources allocated in init_device.
 
@@ -148,13 +150,17 @@ class APSCTL(hardware_device):
     # --------
     # overloaded functions
     # --------
-    def off(self):
+    @log_exceptions()
+    def configure_for_off(self):
         """ user code here. is called when the state is set to OFF """
-
         # Stop keep-alive
-        self.opcua_connection.stop()
+        try:
+            self.opcua_connection.stop()
+        except Exception as e:
+            self.warn_stream("Exception while stopping OPC ua connection in configure_for_off function: {}. Exception ignored".format(e))
 
-    def initialise(self):
+    @log_exceptions()
+    def configure_for_initialise(self):
         """ user code here. is called when the sate is set to INIT """
         """Initialises the attributes and properties of the PCC."""
 
@@ -165,8 +171,10 @@ class APSCTL(hardware_device):
         for i in self.attr_list():
             try:
                 i.set_comm_client(self.OPCua_client)
-            except:
-                self.debug_stream("error in getting APSCTL attribute: {} from client".format(i))
+            except Exception as e:
+                # use the pass function instead of setting read/write fails
+                i.set_pass_func()
+                self.warn_stream("error while setting the APSCTL attribute {} read/write function. {}".format(i, e))
 
         self.OPCua_client.start()
 
diff --git a/devices/PCC.py b/devices/PCC.py
index 266725ab410b9b56c6d326a61f22cd8e332d54d6..37acfc1d3fa48cb3db4ce1dd4ebdd7a0ffa6b667 100644
--- a/devices/PCC.py
+++ b/devices/PCC.py
@@ -14,7 +14,7 @@
 # PyTango imports
 from tango import DebugIt
 from tango.server import run, command
-from tango.server import device_property
+from tango.server import device_property, attribute
 from tango import AttrWriteType
 import numpy
 # Additional import
@@ -25,6 +25,7 @@ from clients.opcua_connection import OPCUAConnection
 from util.attribute_wrapper import attribute_wrapper
 from util.hardware_device import hardware_device
 from util.lofar_logging import device_logging_to_python, log_exceptions
+from util.lofar_git import get_version
 
 __all__ = ["PCC", "main"]
 
@@ -69,37 +70,40 @@ class PCC(hardware_device):
     # ----------
     # Attributes
     # ----------
-    RCU_mask_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_mask_RW"], datatype=numpy.bool_, dims=(32,), access=AttrWriteType.READ_WRITE)
+    version_R = attribute(dtype=str, access=AttrWriteType.READ, fget=lambda self: get_version())
     Ant_mask_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:Ant_mask_RW"], datatype=numpy.bool_, dims=(3, 32), access=AttrWriteType.READ_WRITE)
+    CLK_Enable_PWR_R = attribute_wrapper(comms_annotation=["2:PCC", "2:CLK_Enable_PWR_R"], datatype=numpy.bool_)
+    CLK_I2C_STATUS_R = attribute_wrapper(comms_annotation=["2:PCC", "2:CLK_I2C_STATUS_R"], datatype=numpy.int64)
+    CLK_PLL_error_R = attribute_wrapper(comms_annotation=["2:PCC", "2:CLK_PLL_error_R"], datatype=numpy.bool_)
+    CLK_PLL_locked_R = attribute_wrapper(comms_annotation=["2:PCC", "2:CLK_PLL_locked_R"], datatype=numpy.bool_)
+    CLK_monitor_rate_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:CLK_monitor_rate_RW"], datatype=numpy.int64, access=AttrWriteType.READ_WRITE)
+    CLK_translator_busy_R = attribute_wrapper(comms_annotation=["2:PCC", "2:CLK_translator_busy_R"], datatype=numpy.bool_)
+    HBA_element_beamformer_delays_R = attribute_wrapper(comms_annotation=["2:PCC", "2:HBA_element_beamformer_delays_R"], datatype=numpy.int64, dims=(32, 96))
+    HBA_element_beamformer_delays_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:HBA_element_beamformer_delays_RW"], datatype=numpy.int64, dims=(32, 96), access=AttrWriteType.READ_WRITE)
+    HBA_element_led_R = attribute_wrapper(comms_annotation=["2:PCC", "2:HBA_element_led_R"], datatype=numpy.int64, dims=(32, 96))
+    HBA_element_led_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:HBA_element_led_RW"], datatype=numpy.int64, dims=(32, 96), access=AttrWriteType.READ_WRITE)
+    HBA_element_LNA_pwr_R = attribute_wrapper(comms_annotation=["2:PCC", "2:HBA_element_LNA_pwr_R"], datatype=numpy.int64, dims=(32, 96))
+    HBA_element_LNA_pwr_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:HBA_element_LNA_pwr_RW"], datatype=numpy.int64, dims=(32, 96), access=AttrWriteType.READ_WRITE)
+    HBA_element_pwr_R = attribute_wrapper(comms_annotation=["2:PCC", "2:HBA_element_pwr_R"], datatype=numpy.int64, dims=(32, 96))
+    HBA_element_pwr_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:HBA_element_pwr_RW"], datatype=numpy.int64, dims=(32, 96), access=AttrWriteType.READ_WRITE)
+    RCU_ADC_lock_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_ADC_lock_R"], datatype=numpy.int64, dims=(3, 32))
     RCU_attenuator_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_attenuator_R"], datatype=numpy.int64, dims=(3, 32))
-    RCU_attenuator_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_attenuator_RW"], datatype=numpy.int64, dims=(3, 32),
-                                          access=AttrWriteType.READ_WRITE)
+    RCU_attenuator_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_attenuator_RW"], datatype=numpy.int64, dims=(3, 32),  access=AttrWriteType.READ_WRITE)
     RCU_band_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_band_R"], datatype=numpy.int64, dims=(3, 32))
     RCU_band_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_band_RW"], datatype=numpy.int64, dims=(3, 32), access=AttrWriteType.READ_WRITE)
-    RCU_temperature_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_temperature_R"], datatype=numpy.float64, dims=(32,))
-    RCU_Pwr_dig_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_Pwr_dig_R"], datatype=numpy.int64, dims=(32,))
-    RCU_LED0_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_LED0_R"], datatype=numpy.int64, dims=(32,))
-    RCU_LED0_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_LED0_RW"], datatype=numpy.int64, dims=(32,), access=AttrWriteType.READ_WRITE)
-    RCU_ADC_lock_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_ADC_lock_R"], datatype=numpy.int64, dims=(3, 32))
-    RCU_ADC_SYNC_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_ADC_SYNC_R"], datatype=numpy.int64, dims=(3, 32))
-    RCU_ADC_JESD_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_ADC_JESD_R"], datatype=numpy.int64, dims=(3, 32))
-    RCU_ADC_CML_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_ADC_CML_R"], datatype=numpy.int64, dims=(3, 32))
-    RCU_OUT1_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_OUT1_R"], datatype=numpy.int64, dims=(3, 32))
-    RCU_OUT2_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_OUT2_R"], datatype=numpy.int64, dims=(3, 32))
+    RCU_I2C_STATUS_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_I2C_STATUS_R"], datatype=numpy.int64, dims=(32,))
     RCU_ID_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_ID_R"], datatype=numpy.int64, dims=(32,))
+    RCU_LED0_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_LED0_R"], datatype=numpy.bool_, dims=(32,))
+    RCU_LED0_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_LED0_RW"], datatype=numpy.bool_, dims=(32,), access=AttrWriteType.READ_WRITE)
+    RCU_LED1_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_LED1_R"], datatype=numpy.bool_, dims=(32,))
+    RCU_LED1_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_LED1_RW"], datatype=numpy.bool_, dims=(32,), access=AttrWriteType.READ_WRITE)
+    RCU_mask_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_mask_RW"], datatype=numpy.bool_, dims=(32,), access=AttrWriteType.READ_WRITE)
+    RCU_monitor_rate_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_monitor_rate_RW"], datatype=numpy.int64, access=AttrWriteType.READ_WRITE)
+    RCU_Pwr_dig_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_Pwr_dig_R"], datatype=numpy.bool_, dims=(32,))
+    RCU_temperature_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_temperature_R"], datatype=numpy.float64, dims=(32,))
+    RCU_translator_busy_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_translator_busy_R"], datatype=numpy.bool_)
     RCU_version_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_version_R"], datatype=numpy.str_, dims=(32,))
 
-    HBA_element_beamformer_delays_R = attribute_wrapper(comms_annotation=["2:PCC", "2:HBA_element_beamformer_delays_R"], datatype=numpy.int64,
-                                                        dims=(32, 96))
-    HBA_element_beamformer_delays_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:HBA_element_beamformer_delays_RW"], datatype=numpy.int64,
-                                                         dims=(32, 96), access=AttrWriteType.READ_WRITE)
-    HBA_element_pwr_R = attribute_wrapper(comms_annotation=["2:PCC", "2:HBA_element_pwr_R"], datatype=numpy.int64, dims=(32, 96))
-    HBA_element_pwr_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:HBA_element_pwr_RW"], datatype=numpy.int64, dims=(32, 96),
-                                           access=AttrWriteType.READ_WRITE)
-
-    RCU_monitor_rate_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_monitor_rate_RW"], datatype=numpy.float64,
-                                            access=AttrWriteType.READ_WRITE)
-
     @log_exceptions()
     def delete_device(self):
         """Hook to delete resources allocated in init_device.
@@ -117,35 +121,37 @@ class PCC(hardware_device):
     # overloaded functions
     # --------
     @log_exceptions()
-    def off(self):
+    def configure_for_off(self):
         """ user code here. is called when the state is set to OFF """
         # Stop keep-alive
-        self.OPCua_client.stop()
+        try:
+            self.opcua_connection.stop()
+        except Exception as e:
+            self.warn_stream("Exception while stopping OPC ua connection in configure_for_off function: {}. Exception ignored".format(e))
 
     @log_exceptions()
-    def initialise(self):
+    def configure_for_initialise(self):
         """ user code here. is called when the state is set to INIT """
 
         # Init the dict that contains function to OPC-UA function mappings.
         self.function_mapping = {}
         self.function_mapping["RCU_on"] = {}
         self.function_mapping["RCU_off"] = {}
-        self.function_mapping["ADC_on"] = {}
-        self.function_mapping["RCU_update"] = {}
         self.function_mapping["CLK_on"] = {}
         self.function_mapping["CLK_off"] = {}
-        self.function_mapping["CLK_PLL_setup"] = {}
 
         # set up the OPC ua client
         self.OPCua_client = OPCUAConnection("opc.tcp://{}:{}/".format(self.OPC_Server_Name, self.OPC_Server_Port), "http://lofar.eu",
                                             self.OPC_Time_Out, self.Fault, self)
 
-        # map the attributes to the OPC ua comm client
+        # map an access helper class
         for i in self.attr_list():
             try:
                 i.set_comm_client(self.OPCua_client)
-            except:
-                pass
+            except Exception as e:
+                # use the pass function instead of setting read/write fails
+                i.set_pass_func()
+                self.warn_stream("error while setting the PCC attribute {} read/write function. {}".format(i, e))
 
         self.OPCua_client.start()
 
diff --git a/devices/SDP.py b/devices/SDP.py
index 25e1accc6e308212fe931a9bb261c428e74732f5..c74c2849d6adc785e05ba2631fe302bed9ccd77e 100644
--- a/devices/SDP.py
+++ b/devices/SDP.py
@@ -13,7 +13,7 @@
 
 # PyTango imports
 from tango.server import run
-from tango.server import device_property
+from tango.server import device_property, attribute
 from tango import AttrWriteType
 # Additional import
 
@@ -22,6 +22,7 @@ from util.attribute_wrapper import attribute_wrapper
 from util.hardware_device import hardware_device
 
 from util.lofar_logging import device_logging_to_python, log_exceptions
+from util.lofar_git import get_version
 
 import numpy
 
@@ -64,19 +65,46 @@ class SDP(hardware_device):
     # ----------
     # Attributes
     # ----------
-    fpga_mask_RW = attribute_wrapper(comms_annotation=["1:fpga_mask_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE)
-    fpga_scrap_R = attribute_wrapper(comms_annotation=["1:fpga_scrap_R"], datatype=numpy.int32, dims=(2048,))
-    fpga_scrap_RW = attribute_wrapper(comms_annotation=["1:fpga_scrap_RW"], datatype=numpy.int32, dims=(2048,), access=AttrWriteType.READ_WRITE)
-    fpga_status_R = attribute_wrapper(comms_annotation=["1:fpga_status_R"], datatype=numpy.bool_, dims=(16,))
-    fpga_temp_R = attribute_wrapper(comms_annotation=["1:fpga_temp_R"], datatype=numpy.float_, dims=(16,))
-    fpga_version_R = attribute_wrapper(comms_annotation=["1:fpga_version_R"], datatype=numpy.str_, dims=(16,))
-    fpga_weights_R = attribute_wrapper(comms_annotation=["1:fpga_weights_R"], datatype=numpy.int16, dims=(16, 12 * 488 * 2))
-    fpga_weights_RW = attribute_wrapper(comms_annotation=["1:fpga_weights_RW"], datatype=numpy.int16, dims=(16, 12 * 488 * 2), access=AttrWriteType.READ_WRITE)
-    tr_busy_R = attribute_wrapper(comms_annotation=["1:tr_busy_R"], datatype=numpy.bool_)
-    # NOTE: typo in node name is 'tr_reload_W' should be 'tr_reload_RW'
-    tr_reload_RW = attribute_wrapper(comms_annotation=["1:tr_reload_W"], datatype=numpy.bool_, access=AttrWriteType.READ_WRITE)
-    tr_tod_R = attribute_wrapper(comms_annotation=["1:tr_tod_R"], datatype=numpy.uint64)
-    tr_uptime_R = attribute_wrapper(comms_annotation=["1:tr_uptime_R"], datatype=numpy.uint64)
+
+    version_R = attribute(dtype=str, access=AttrWriteType.READ, fget=lambda self: get_version())
+
+    # SDP will switch from fpga_mask_RW to tr_fpga_mask_RW, offer both for now as its a critical flag
+    tr_fpga_mask_RW = attribute_wrapper(comms_annotation=["2:tr_fpga_mask_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE)
+    fpga_mask_RW = attribute_wrapper(comms_annotation=["2:fpga_mask_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE)
+    fpga_scrap_R = attribute_wrapper(comms_annotation=["2:fpga_scrap_R"], datatype=numpy.int32, dims=(2048,))
+    fpga_scrap_RW = attribute_wrapper(comms_annotation=["2:fpga_scrap_RW"], datatype=numpy.int32, dims=(2048,), access=AttrWriteType.READ_WRITE)
+    fpga_status_R = attribute_wrapper(comms_annotation=["2:fpga_status_R"], datatype=numpy.bool_, dims=(16,))
+    fpga_temp_R = attribute_wrapper(comms_annotation=["2:fpga_temp_R"], datatype=numpy.float_, dims=(16,))
+    fpga_version_R = attribute_wrapper(comms_annotation=["2:fpga_version_R"], datatype=numpy.str_, dims=(16,))
+    fpga_weights_R = attribute_wrapper(comms_annotation=["2:fpga_weights_R"], datatype=numpy.int16, dims=(16, 12 * 488 * 2))
+    fpga_weights_RW = attribute_wrapper(comms_annotation=["2:fpga_weights_RW"], datatype=numpy.int16, dims=(16, 12 * 488 * 2), access=AttrWriteType.READ_WRITE)
+    fpga_processing_enable_RW = attribute_wrapper(comms_annotation=["2:fpga_processing_enable_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE)
+    fpga_processing_enable_R = attribute_wrapper(comms_annotation=["2:fpga_processing_enable_R"], datatype=numpy.bool_, dims=(16,))
+    fpga_sst_offload_enable_RW = attribute_wrapper(comms_annotation=["2:fpga_sst_offload_enable_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE)
+    fpga_sst_offload_enable_R = attribute_wrapper(comms_annotation=["2:fpga_sst_offload_enable_R"], datatype=numpy.bool_, dims=(16,))
+    fpga_sst_offload_dest_mac_RW = attribute_wrapper(comms_annotation=["2:fpga_sst_offload_dest_mac_RW"], datatype=numpy.str_, dims=(16,), access=AttrWriteType.READ_WRITE)
+    fpga_sst_offload_dest_mac_R = attribute_wrapper(comms_annotation=["2:fpga_sst_offload_dest_mac_R"], datatype=numpy.str_, dims=(16,))
+    fpga_sst_offload_dest_ip_RW = attribute_wrapper(comms_annotation=["2:fpga_sst_offload_dest_ip_RW"], datatype=numpy.str_, dims=(16,), access=AttrWriteType.READ_WRITE)
+    fpga_sst_offload_dest_ip_R = attribute_wrapper(comms_annotation=["2:fpga_sst_offload_dest_ip_R"], datatype=numpy.str_, dims=(16,))
+    fpga_sst_offload_dest_port_RW = attribute_wrapper(comms_annotation=["2:fpga_sst_offload_dest_port_RW"], datatype=numpy.uint16, dims=(16,), access=AttrWriteType.READ_WRITE)
+    fpga_sst_offload_dest_port_R = attribute_wrapper(comms_annotation=["2:fpga_sst_offload_dest_port_R"], datatype=numpy.uint16, dims=(16,))
+    fpga_sdp_info_station_id_RW = attribute_wrapper(comms_annotation=["2:fpga_sdp_info_station_id_RW"], datatype=numpy.uint16, dims=(16,), access=AttrWriteType.READ_WRITE)
+    fpga_sdp_info_station_id_R = attribute_wrapper(comms_annotation=["2:fpga_sdp_info_station_id_R"], datatype=numpy.uint16, dims=(16,))
+    fpga_sdp_info_observation_id_RW = attribute_wrapper(comms_annotation=["2:fpga_sdp_info_observation_id_RW"], datatype=numpy.uint32, dims=(16,), access=AttrWriteType.READ_WRITE)
+    fpga_sdp_info_observation_id_R = attribute_wrapper(comms_annotation=["2:fpga_sdp_info_observation_id_R"], datatype=numpy.uint32, dims=(16,))
+    fpga_sdp_info_nyquist_sampling_zone_index_RW = attribute_wrapper(comms_annotation=["2:fpga_sdp_info_nyquist_sampling_zone_index_RW"], datatype=numpy.uint16, dims=(16,), access=AttrWriteType.READ_WRITE)
+    fpga_sdp_info_nyquist_sampling_zone_index_R = attribute_wrapper(comms_annotation=["2:fpga_sdp_info_nyquist_sampling_zone_index_R"], datatype=numpy.uint16, dims=(16,))
+    fpga_sdp_info_subband_calibrated_flag_R = attribute_wrapper(comms_annotation=["2:fpga_sdp_info_subband_calibrated_flag_R"], datatype=numpy.uint16, dims=(16,)) 
+    fpga_sdp_info_beamlet_scale_R = attribute_wrapper(comms_annotation=["2:fpga_sdp_info_beamlet_scale_R"], datatype=numpy.uint16, dims=(16,)) 
+
+    tr_busy_R = attribute_wrapper(comms_annotation=["2:tr_busy_R"], datatype=numpy.bool_)
+    tr_reload_RW = attribute_wrapper(comms_annotation=["2:tr_reload_RW"], datatype=numpy.bool_, access=AttrWriteType.READ_WRITE)
+    tr_tod_R = attribute_wrapper(comms_annotation=["2:tr_tod_R"], datatype=numpy.uint64)
+    tr_uptime_R = attribute_wrapper(comms_annotation=["2:tr_uptime_R"], datatype=numpy.uint64)
+
+    fpga_firmware_version_R = attribute_wrapper(comms_annotation=["2:fpga_firmware_version_R"], datatype=numpy.str_, dims=(16,))
+    fpga_hardware_version_R = attribute_wrapper(comms_annotation=["2:fpga_hardware_version_R"], datatype=numpy.str_, dims=(16,))
+    tr_software_version_R = attribute_wrapper(comms_annotation=["2:tr_software_version_R"], datatype=numpy.str_)
 
     def always_executed_hook(self):
         """Method always executed before any TANGO command is executed."""
@@ -99,14 +127,17 @@ class SDP(hardware_device):
     # overloaded functions
     # --------
     @log_exceptions()
-    def off(self):
+    def configure_for_off(self):
         """ user code here. is called when the state is set to OFF """
 
         # Stop keep-alive
-        self.opcua_connection.stop()
+        try:
+            self.opcua_connection.stop()
+        except Exception as e:
+            self.warn_stream("Exception while stopping OPC ua connection in configure_for_off function: {}. Exception ignored".format(e))
 
     @log_exceptions()
-    def initialise(self):
+    def configure_for_initialise(self):
         """ user code here. is called when the sate is set to INIT """
         """Initialises the attributes and properties of the SDP."""
 
@@ -115,7 +146,13 @@ class SDP(hardware_device):
 
         # map an access helper class
         for i in self.attr_list():
-            i.set_comm_client(self.OPCua_client)
+            try:
+                i.set_comm_client(self.OPCua_client)
+            except Exception as e:
+                # use the pass function instead of setting read/write fails
+                i.set_pass_func()
+                self.warn_stream("error while setting the SDP attribute {} read/write function. {}".format(i, e))
+                pass
 
         self.OPCua_client.start()
 
@@ -133,4 +170,3 @@ def main(args=None, **kwargs):
 
 if __name__ == '__main__':
     main()
-
diff --git a/devices/SNMP.py b/devices/SNMP.py
new file mode 100644
index 0000000000000000000000000000000000000000..db748639ef3c784792b0d5e2211019b50bac1e9f
--- /dev/null
+++ b/devices/SNMP.py
@@ -0,0 +1,117 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of the PCC project
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+""" SNMP Device for LOFAR2.0
+
+"""
+
+# PyTango imports
+from tango.server import run
+from tango.server import device_property
+from tango import AttrWriteType
+
+# Additional import
+from clients.SNMP_client import SNMP_client
+from util.attribute_wrapper import attribute_wrapper
+from util.hardware_device import hardware_device
+
+import numpy
+
+__all__ = ["SNMP", "main"]
+
+
+class SNMP(hardware_device):
+    """
+
+    **Properties:**
+
+    - Device Property
+        SNMP_community
+        - Type:'DevString'
+        SNMP_host
+        - Type:'DevULong'
+        SNMP_timeout
+        - Type:'DevDouble'
+        """
+
+    # -----------------
+    # Device Properties
+    # -----------------
+
+    SNMP_community = device_property(
+        dtype='DevString',
+        mandatory=True
+    )
+
+    SNMP_host = device_property(
+        dtype='DevString',
+        mandatory=True
+    )
+
+    SNMP_timeout = device_property(
+        dtype='DevDouble',
+        mandatory=True
+    )
+
+    # ----------
+    # Attributes
+    # ----------
+
+    sys_description_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.1.1.0"}, datatype=numpy.str_)
+    sys_objectID_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.1.2.0", "type": "OID"}, datatype=numpy.str_)
+    sys_uptime_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.1.3.0", "type": "TimeTicks"}, datatype=numpy.int64)
+    sys_name_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.1.5.0"}, datatype=numpy.str_)
+    ip_route_mask_127_0_0_1_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.4.21.1.11.127.0.0.1", "type": "IpAddress"}, datatype=numpy.str_)
+    TCP_active_open_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.6.5.0", "type": "Counter32"}, datatype=numpy.int64)
+
+    sys_contact_RW = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.1.4.0"}, datatype=numpy.str_, access=AttrWriteType.READ_WRITE)
+    sys_contact_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.1.4.0"}, datatype=numpy.str_)
+
+    TCP_Curr_estab_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.6.9.0", "type": "Gauge"}, datatype=numpy.int64)
+
+    # inferred spectrum
+    if_index_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.2.2.1.1"}, dims=(10,), datatype=numpy.int64)
+
+
+    # --------
+    # overloaded functions
+    # --------
+    def configure_for_initialise(self):
+        """ user code here. is called when the state is set to STANDBY """
+
+        # set up the SNMP ua client
+        self.snmp_manager = SNMP_client(self.SNMP_community, self.SNMP_host, self.SNMP_timeout, self.Fault, self)
+
+        # map an access helper class
+        for i in self.attr_list():
+            try:
+                i.set_comm_client(self.snmp_manager)
+            except Exception as e:
+                # use the pass function instead of setting read/write fails
+                i.set_pass_func()
+                self.warn_stream("error while setting the SNMP attribute {} read/write function. {}".format(i, e))
+
+        self.snmp_manager.start()
+
+
+# --------
+# Commands
+# --------
+
+
+# ----------
+# Run server
+# ----------
+def main(args=None, **kwargs):
+    """Main function of the PCC module."""
+    return run((SNMP,), args=args, **kwargs)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/devices/clients/SNMP_client.py b/devices/clients/SNMP_client.py
new file mode 100644
index 0000000000000000000000000000000000000000..6230386aa300efbf9bf23ac1491b6a10354cba2f
--- /dev/null
+++ b/devices/clients/SNMP_client.py
@@ -0,0 +1,160 @@
+from util.comms_client import CommClient
+import snmp
+import numpy
+import traceback
+
+__all__ = ["SNMP_client"]
+
+
+snmp_to_numpy_dict = {
+    snmp.types.INTEGER: numpy.int64,
+    snmp.types.TimeTicks: numpy.int64,
+    snmp.types.OCTET_STRING: numpy.str_,
+    snmp.types.OID: numpy.str_,
+    snmp.types.Counter32: numpy.int64,
+    snmp.types.Gauge32: numpy.int64,
+    snmp.types.IpAddress: numpy.str_,
+}
+
+snmp_types = {
+    "Integer": numpy.int64,
+    "Gauge": numpy.int64,
+    "TimeTick": numpy.int64,
+    "Counter32": numpy.int64,
+    "OctetString": numpy.str_,
+    "IpAddress": numpy.str_,
+    "OID": numpy.str_,
+}
+
+
+class SNMP_client(CommClient):
+    """
+        messages to keep a check on the connection. On connection failure, reconnects once.
+    """
+
+    def start(self):
+        super().start()
+
+    def __init__(self, community, host, timeout, fault_func, streams, try_interval=2):
+        """
+        Create the SNMP and connect() to it
+        """
+        super().__init__(fault_func, streams, try_interval)
+
+        self.community = community
+        self.host = host
+        self.manager = snmp.Manager(community=bytes(community, "utf8"))
+
+        # Explicitly connect
+        if not self.connect():
+            # hardware or infra is down -- needs fixing first
+            fault_func()
+            return
+
+    def connect(self):
+        """
+        Try to connect to the client
+        """
+        self.streams.debug_stream("Connecting to community: %s, host: %s", self.community, self.host)
+
+        self.connected = True
+        return True
+
+    def ping(self):
+        """
+        ping the client to make sure the connection with the client is still functional.
+        """
+        pass
+
+    def _setup_annotation(self, annotation):
+        """
+        This class's Implementation of the get_mapping function. returns the read and write functions
+        """
+
+        if isinstance(annotation, dict):
+            # check if required path inarg is present
+            if annotation.get('oids') is None:
+                ValueError("SNMP get attributes require an oid")
+            oids = annotation.get("oids")  # required
+        else:
+            TypeError("SNMP attributes require a dict with oid(s)")
+            return
+
+        dtype = annotation.get('type', None)
+
+        return oids, dtype
+
+    def setup_value_conversion(self, attribute):
+        """
+        gives the client access to the attribute_wrapper object in order to access all data it could potentially need.
+        """
+
+        dim_x = attribute.dim_x
+        dim_y = attribute.dim_y
+        dtype = attribute.numpy_type
+
+        return dim_x, dim_y, dtype
+
+    def get_oids(self, x, y, in_oid):
+
+        if x == 0:
+            x = 1
+        if y == 0:
+            y = 1
+
+        nof_oids = x * y
+
+        if nof_oids == 1:
+            # is scalar
+            if type(in_oid) is str:
+                # for ease of handling put single oid in a 1 element list
+                in_oid = [in_oid]
+            return in_oid
+
+        elif type(in_oid) is list and len(in_oid) == nof_oids:
+            # already is an array and of the right length
+            return in_oid
+        elif type(in_oid) is list and len(in_oid) != nof_oids:
+            # already is an array but the wrong length. Unable to handle this
+            raise ValueError("SNMP oids need to either be a single value or an array the size of the attribute dimensions. got: {} expected: {}x{}={}".format(len(in_oid),x,y,x*y))
+        else:
+
+            return ["{}.{}".format(in_oid, i + 1) for i in range(nof_oids)]
+
+
+    def setup_attribute(self, annotation, attribute):
+        """
+        MANDATORY function: is used by the attribute wrapper to get read/write functions. must return the read and write functions
+        """
+
+        # process the annotation
+        oids, dtype = self._setup_annotation(annotation)
+
+        # get all the necessary data to set up the read/write functions from the attribute_wrapper
+        dim_x, dim_y, numpy_type = self.setup_value_conversion(attribute)
+        oids = self.get_oids(dim_x, dim_y, oids)
+
+        def _read_function():
+            vars = self.manager.get(self.host, *oids)
+            return [snmp_to_numpy_dict[type(i.value)](str(i.value)) for i in vars]
+
+        if dtype is not None:
+            def _write_function(value):
+                if len(oids) == 1 and type(value) != list:
+                    value = [value]
+
+                for i in range(len(oids)):
+                    self.manager.set(self.host, oids[i], snmp_types[dtype](value[i]))
+        else:
+            def _write_function(value):
+                if len(oids) == 1 and type(value) != list:
+                    value = [value]
+
+                for i in range(len(oids)):
+                    self.manager.set(self.host, oids[i], value[i])
+
+
+        # return the read/write functions
+        return _read_function, _write_function
+
+
diff --git a/devices/clients/ini_client.py b/devices/clients/ini_client.py
new file mode 100644
index 0000000000000000000000000000000000000000..2f4d714b57dd57d327795fe59fd6edf43eb4c9fa
--- /dev/null
+++ b/devices/clients/ini_client.py
@@ -0,0 +1,192 @@
+from util.comms_client import CommClient
+import configparser
+import numpy
+
+__all__ = ["ini_client"]
+
+
+numpy_to_ini_dict = {
+    numpy.int64: int,
+    numpy.double: float,
+    numpy.float64: float,
+    numpy.bool_: bool,
+    str: str
+}
+
+numpy_to_ini_get_dict = {
+    numpy.int64: configparser.ConfigParser.getint,
+    numpy.double: configparser.ConfigParser.getfloat,
+    numpy.float64: configparser.ConfigParser.getfloat,
+    numpy.bool_: configparser.ConfigParser.getboolean,
+    str: str
+}
+
+ini_to_numpy_dict = {
+    int: numpy.int64,
+    float: numpy.float64,
+    bool: numpy.bool_,
+    str: numpy.str_
+}
+
+import os
+
+class ini_client(CommClient):
+    """
+    this class provides an example implementation of a comms_client.
+    Durirng initialisation it creates a correctly shaped zero filled value. on read that value is returned and on write its modified.
+    """
+
+    def start(self):
+        super().start()
+
+    def __init__(self, filename, fault_func, streams, try_interval=2):
+        """
+        initialises the class and tries to connect to the client.
+        """
+        self.config = configparser.ConfigParser()
+        self.filename = filename
+
+        super().__init__(fault_func, streams, try_interval)
+
+        # Explicitly connect
+        if not self.connect():
+            # hardware or infra is down -- needs fixing first
+            fault_func()
+            return
+
+    def connect(self):
+        self.config_file = open(self.filename, "r")
+
+        self.connected = True  # set connected to true
+        return True  # if successful, return true. otherwise return false
+
+    def disconnect(self):
+        self.connected = False  # always force a reconnect, regardless of a successful disconnect
+        self.streams.debug_stream("disconnected from the 'client' ")
+
+    def _setup_annotation(self, annotation):
+        """
+        this function gives the client access to the comm client annotation data given to the attribute wrapper.
+        The annotation data can be used to provide whatever extra data is necessary in order to find/access the monitor/control point.
+
+        the annotation can be in whatever format may be required. it is up to the user to handle its content
+        example annotation may include:
+        - a file path and file line/location
+        - COM object path
+
+        Annotations:
+            name: Required, the name of the ini variable
+            section: Required,  the section of the ini variable
+
+        """
+
+        # as this is an example, just print the annotation
+        self.streams.debug_stream("annotation: {}".format(annotation))
+        name = annotation.get('name')
+        if name is None:
+            ValueError("ini client requires a variable `name` in the annotation to set/get")
+        section = annotation.get('section')
+        if section is None:
+            ValueError("requires a `section` specified in the annotation to open")
+
+        return section, name
+
+
+    def _setup_value_conversion(self, attribute):
+        """
+        gives the client access to the attribute_wrapper object in order to access all
+        necessary data such as dimensionality and data type
+        """
+
+        dim_y = attribute.dim_y
+        dim_x = attribute.dim_x
+
+        dtype = attribute.numpy_type
+
+        return dim_y, dim_x, dtype
+
+    def _setup_mapping(self, name, section, dtype, dim_y, dim_x):
+        """
+        takes all gathered data to configure and return the correct read and write functions
+        """
+
+        def read_function():
+            self.config.read_file(self.config_file)
+            value = self.config.get(section, name)
+
+            value = data_handler(value, dtype)
+
+            if dim_y > 1:
+                # if data is an image, slice it according to the y dimensions
+                value = numpy.array(numpy.split(value, indices_or_sections=dim_y))
+
+            return value
+
+        def write_function(value):
+
+            if type(value) is list:
+                write_value = ", ".join([str(v) for v in value])
+
+            else:
+                write_value = str(value)
+
+            self.config.read_file(self.config_file)
+            self.config.set(section, name, write_value)
+            fp = open(self.filename, 'w')
+            self.config.write(fp)
+
+        return read_function, write_function
+
+    def setup_attribute(self, annotation=None, attribute=None):
+        """
+        MANDATORY function: is used by the attribute wrapper to get read/write functions.
+        must return the read and write functions
+        """
+
+        # process the comms_annotation
+        section, name = self._setup_annotation(annotation)
+
+        # get all the necessary data to set up the read/write functions from the attribute_wrapper
+        dim_y, dim_x, dtype = self._setup_value_conversion(attribute)
+
+        # configure and return the read/write functions
+        read_function, write_function = self._setup_mapping(name, section, dtype, dim_y, dim_x)
+
+        # return the read/write functions
+        return read_function, write_function
+
+def data_handler(string, dtype):
+    value = []
+
+    if dtype is numpy.bool_:
+        # Handle special case for Bools
+        for i in string.split(","):
+            i = i.strip(" ")
+            if "True" == i:
+                value.append(True)
+            elif "False" == i:
+                value.append(False)
+            else:
+                raise ValueError("String to bool failed. String is not True/False, but is: '{}'".format(i))
+
+        value = dtype(value)
+
+    elif dtype is numpy.str_:
+        for i in string.split(","):
+            val = numpy.str_(i)
+            value.append(val)
+
+        value = numpy.array(value)
+
+    else:
+        # regular case, go through the separator
+        for i in string.split(","):
+            i = i.replace(" ", "")
+            val = dtype(i)
+            value.append(val)
+
+
+        # convert values from buildin type to numpy type
+        value = dtype(value)
+
+    return value
diff --git a/devices/clients/opcua_connection.py b/devices/clients/opcua_connection.py
index f55922df8dba4ca5dbb6c78db5600a7287d5f9ad..85afdfa08ef2849b8434d95bbd5c38e467a91b6c 100644
--- a/devices/clients/opcua_connection.py
+++ b/devices/clients/opcua_connection.py
@@ -51,6 +51,7 @@ class OPCUAConnection(CommClient):
             fault_func()
             return
 
+
         # determine namespace used
         try:
             if type(namespace) is str:
@@ -64,6 +65,7 @@ class OPCUAConnection(CommClient):
             self.name_space_index = 2
 
         self.obj = self.client.get_objects_node()
+        self.check_nodes()
 
     def _servername(self):
         return self.client.server_url.geturl()
@@ -83,6 +85,21 @@ class OPCUAConnection(CommClient):
             self.streams.error_stream("Could not connect to server %s: %s", self._servername(), e)
             raise Exception("Could not connect to server %s", self._servername()) from e
 
+    def check_nodes(self):
+        """
+        function purely for debugging/development only. Simply lists all top level nodes and the nodes below that
+        """
+
+        for i in self.obj.get_children():
+            print(i.get_browse_name())
+            for j in i.get_children():
+                try:
+                    print(j.get_browse_name(), j.get_data_type_as_variant_type())
+                except:
+                    print(j.get_browse_name())
+                finally:
+                    pass
+
 
     def disconnect(self):
         """
@@ -160,7 +177,6 @@ class OPCUAConnection(CommClient):
             self.streams.debug_stream("connected OPC ua node {} of type {} to attribute with dimensions: {} x {} ".format(str(node_name)[:len(node_name)-1], str(ua_type)[len("VariantType."):], dim_x, dim_y))
         except:
             pass
-
         # return the read/write functions
         return prot_attr.read_function, prot_attr.write_function
 
@@ -182,12 +198,19 @@ class ProtocolAttribute:
         """
         value = numpy.array(self.node.get_value())
 
-        if self.dim_y != 0:
+        if self.dim_y + self.dim_x == 1:
+            return numpy.array([value])
+        elif self.dim_y != 0:
             value = numpy.array(numpy.split(value, indices_or_sections=self.dim_y))
+        elif self.dim_y + self.dim_x == 1:
+            value = [numpy.array(value)]
         else:
             value = numpy.array(value)
+
         return value
 
+
+
     def write_function(self, value):
         """
         write_RW function
diff --git a/devices/examples/HW_device_template.py b/devices/examples/HW_device_template.py
index dd6cad99bd5824bd1b8a37a471aeb7796f32a05d..66b6bdb19c0b00703805dc2f76cce0d8208a36b2 100644
--- a/devices/examples/HW_device_template.py
+++ b/devices/examples/HW_device_template.py
@@ -17,6 +17,7 @@ from tango import AttrWriteType
 from util.attribute_wrapper import attribute_wrapper
 from util.hardware_device import hardware_device
 
+
 __all__ = ["HW_dev"]
 
 
@@ -55,24 +56,24 @@ class HW_dev(hardware_device):
     # --------
     # overloaded functions
     # --------
-    def fault(self):
+    def configure_for_fault(self):
         """ user code here. is called when the state is set to FAULT """
         pass
 
-    def off(self):
+    def configure_for_off(self):
         """ user code here. is called when the state is set to OFF """
         pass
 
-    def on(self):
+    def configure_for_on(self):
         """ user code here. is called when the state is set to ON """
 
         pass
 
-    def standby(self):
+    def configure_for_standby(self):
         """ user code here. is called when the state is set to STANDBY """
         pass
 
-    def initialise(self):
+    def configure_for_initialise(self):
         """ user code here. is called when the sate is set to INIT """
         pass
 
diff --git a/devices/ini_device.py b/devices/ini_device.py
new file mode 100644
index 0000000000000000000000000000000000000000..dbc6e6159409449cfa7f5577e06eaa84e0620a06
--- /dev/null
+++ b/devices/ini_device.py
@@ -0,0 +1,134 @@
+# -*- coding: utf-8 -*-
+#
+# This file wraps around a tango device class and provides a number of abstractions useful for hardware devices. It works together
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+"""
+
+"""
+
+# PyTango imports
+from tango.server import run
+from tango.server import device_property
+from tango import AttrWriteType
+from tango import DevState
+# Additional import
+from util.attribute_wrapper import attribute_wrapper
+from util.hardware_device import hardware_device
+
+
+import configparser
+import numpy
+
+from clients.ini_client import *
+
+
+__all__ = ["ini_device"]
+
+
+def write_ini_file(filename):
+    with open(filename, 'w') as configfile:
+
+        config = configparser.ConfigParser()
+        config['scalar'] = {}
+        config['scalar']['double_scalar_R'] = '1.2'
+        config['scalar']['bool_scalar_R'] = 'True'
+        config['scalar']['int_scalar_R'] = '5'
+        config['scalar']['str_scalar_R'] = 'this is a test'
+
+        config['spectrum'] = {}
+        config['spectrum']['double_spectrum_R'] = '1.2, 2.3, 3.4, 4.5'
+        config['spectrum']['bool_spectrum_R'] = 'True, True, False, False'
+        config['spectrum']['int_spectrum_R'] = '1, 2, 3, 4'
+        config['spectrum']['str_spectrum_R'] = '"a", "b", "c", "d"'
+
+        config['image'] = {}
+        config['image']['double_image_R'] = '1.2, 2.3, 3.4, 4.5, 5.6, 6.7'
+        config['image']['bool_image_R'] = 'True, True, False, False, True, False'
+        config['image']['int_image_R'] = '1, 2, 3, 4, 5, 6'
+        config['image']['str_image_R'] = '"a", "b", "c", "d", "e", "f"'
+
+        config.write(configfile)
+
+
+
+class ini_device(hardware_device):
+    """
+    This class is the minimal (read empty) implementation of a class using 'hardware_device'
+    """
+
+    # ----------
+    # Attributes
+    # ----------
+    """
+    attribute wrapper objects can be declared here. All attribute wrapper objects will get automatically put in a list (attr_list) for easy access
+
+    example = attribute_wrapper(comms_annotation="this is an example", datatype=numpy.double, dims=(8, 2), access=AttrWriteType.READ_WRITE)
+    ...
+
+    """
+    double_scalar_RW = attribute_wrapper(comms_annotation={"section": "scalar", "name": "double_scalar_RW"}, datatype=numpy.double, access=AttrWriteType.READ_WRITE)
+    double_scalar_R = attribute_wrapper(comms_annotation={"section": "scalar", "name": "double_scalar_R"}, datatype=numpy.double)
+    bool_scalar_RW = attribute_wrapper(comms_annotation={"section": "scalar", "name": "bool_scalar_RW"}, datatype=numpy.bool_, access=AttrWriteType.READ_WRITE)
+    bool_scalar_R = attribute_wrapper(comms_annotation={"section": "scalar", "name": "bool_scalar_R"}, datatype=numpy.bool_)
+    int_scalar_RW = attribute_wrapper(comms_annotation={"section": "scalar", "name": "int_scalar_RW"}, datatype=numpy.int64, access=AttrWriteType.READ_WRITE)
+    int_scalar_R = attribute_wrapper(comms_annotation={"section": "scalar", "name": "int_scalar_R"}, datatype=numpy.int64)
+    str_scalar_RW = attribute_wrapper(comms_annotation={"section": "scalar", "name": "str_scalar_RW"}, datatype=numpy.str_, access=AttrWriteType.READ_WRITE)
+    str_scalar_R = attribute_wrapper(comms_annotation={"section": "scalar", "name": "str_scalar_R"}, datatype=numpy.str_)
+
+    double_spectrum_RW = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "double_spectrum_RW"}, datatype=numpy.double, dims=(4,), access=AttrWriteType.READ_WRITE)
+    double_spectrum_R = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "double_spectrum_R"}, datatype=numpy.double, dims=(4,))
+    bool_spectrum_RW = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "bool_spectrum_RW"}, datatype=numpy.bool_, dims=(4,), access=AttrWriteType.READ_WRITE)
+    bool_spectrum_R = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "bool_spectrum_R"}, datatype=numpy.bool_, dims=(4,))
+    int_spectrum_RW = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "int_spectrum_RW"}, datatype=numpy.int64, dims=(4,), access=AttrWriteType.READ_WRITE)
+    int_spectrum_R = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "int_spectrum_R"}, datatype=numpy.int64, dims=(4,))
+    str_spectrum_RW = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "str_spectrum_RW"}, datatype=numpy.str_, dims=(4,), access=AttrWriteType.READ_WRITE)
+    str_spectrum_R = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "str_spectrum_R"}, datatype=numpy.str_, dims=(4,))
+
+    double_image_RW = attribute_wrapper(comms_annotation={"section": "image", "name": "double_image_RW"}, datatype=numpy.double, dims=(3, 2), access=AttrWriteType.READ_WRITE)
+    double_image_R = attribute_wrapper(comms_annotation={"section": "image", "name": "double_image_R"}, datatype=numpy.double, dims=(3, 2))
+    bool_image_RW = attribute_wrapper(comms_annotation={"section": "image", "name": "bool_image_RW"}, datatype=numpy.bool_, dims=(3, 2), access=AttrWriteType.READ_WRITE)
+    bool_image_R = attribute_wrapper(comms_annotation={"section": "image", "name": "bool_image_R"}, datatype=numpy.bool_, dims=(3, 2))
+    int_image_RW = attribute_wrapper(comms_annotation={"section": "image", "name": "int_image_RW"}, datatype=numpy.int64, dims=(3, 2), access=AttrWriteType.READ_WRITE)
+    int_image_R = attribute_wrapper(comms_annotation={"section": "image", "name": "int_image_R"}, datatype=numpy.int64, dims=(3, 2))
+    str_image_RW = attribute_wrapper(comms_annotation={"section": "image", "name": "str_image_RW"}, datatype=numpy.str_, dims=(3, 2), access=AttrWriteType.READ_WRITE)
+    str_image_R = attribute_wrapper(comms_annotation={"section": "image", "name": "str_image_R"}, datatype=numpy.str_, dims=(3, 2))
+
+    # --------
+    # overloaded functions
+    # --------
+    def configure_for_initialise(self):
+        """ user code here. is called when the sate is set to INIT """
+        """Initialises the attributes and properties of the PCC."""
+
+        # set up the OPC ua client
+        self.ini_client = ini_client("example.ini", self.Fault, self)
+
+        # map an access helper class
+        for i in self.attr_list():
+            try:
+                i.set_comm_client(self.ini_client)
+            except Exception as e:
+                # use the pass function instead of setting read/write fails
+                i.set_pass_func()
+
+                self.warn_stream("error while setting the ini attribute {} read/write function. {}".format(i, e))
+
+        self.ini_client.start()
+
+
+# ----------
+# Run server
+# ----------
+def main(args=None, **kwargs):
+    write_ini_file("example.ini")
+
+
+    """Main function of the hardware device module."""
+    return run((ini_device,), args=args, **kwargs)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/devices/test_device.py b/devices/test_device.py
index 5659da595706cded1e786a479a12238d723e5f1c..6a62907112ea1cf081436285aa0d21532ba24d0a 100644
--- a/devices/test_device.py
+++ b/devices/test_device.py
@@ -64,7 +64,7 @@ class test_device(hardware_device):
     # --------
     # overloaded functions
     # --------
-    def initialise(self):
+    def configure_for_initialise(self):
         """ user code here. is called when the sate is set to INIT """
         """Initialises the attributes and properties of the PCC."""
 
diff --git a/devices/util/archiver.py b/devices/util/archiver.py
old mode 100755
new mode 100644
diff --git a/devices/util/attribute_wrapper.py b/devices/util/attribute_wrapper.py
index b27187cda57a7d7238a677a085867011ad0ccb23..d4584f01b78b22d0ee634d91963d2b80adcd29f4 100644
--- a/devices/util/attribute_wrapper.py
+++ b/devices/util/attribute_wrapper.py
@@ -135,12 +135,15 @@ class attribute_wrapper(attribute):
         try:
             self.read_function, self.write_function = client.setup_attribute(self.comms_annotation, self)
         except Exception as e:
-            def pass_func(value=None):
-                pass
 
-            logger.error("Exception while setting %s attribute with annotation: '%s' read/write functions. using pass function instead to to keep running", client.__class__.__name__, self.comms_annotation)
+            logger.error("Exception while setting {} attribute with annotation: '{}' {}".format(client.__class__.__name__, self.comms_annotation, e))
+            raise Exception("Exception while setting %s attribute with annotation: '%s'", client.__class__.__name__, self.comms_annotation) from e
 
-            self.read_function = pass_func
-            self.write_function = pass_func
+    def set_pass_func(self):
+        def pass_func(value=None):
+            pass
 
-            raise Exception("Exception while setting comm_client read/write functions. using pass function instead. %s") from e
+        logger.debug("using pass function for attribute with annotation: {}".format(self.comms_annotation))
+
+        self.read_function = pass_func
+        self.write_function = pass_func
diff --git a/devices/util/get_internal_attribute_history.py b/devices/util/get_internal_attribute_history.py
old mode 100755
new mode 100644
diff --git a/devices/util/hardware_device.py b/devices/util/hardware_device.py
index 3dea7b7c06c5fea821895f958823ff80e3aafa72..e0c9154c703a7cb82c42e9cdd7db76d68a011e05 100644
--- a/devices/util/hardware_device.py
+++ b/devices/util/hardware_device.py
@@ -85,7 +85,7 @@ class hardware_device(Device):
         self.set_state(DevState.INIT)
         self.setup_value_dict()
 
-        self.initialise()
+        self.configure_for_initialise()
 
         self.set_state(DevState.STANDBY)
 
@@ -100,7 +100,7 @@ class hardware_device(Device):
 
         :return:None
         """
-        self.on()
+        self.configure_for_on()
         self.set_state(DevState.ON)
 
     @command()
@@ -119,7 +119,7 @@ class hardware_device(Device):
         # Turn off
         self.set_state(DevState.OFF)
 
-        self.off()
+        self.configure_for_off()
 
         # Turn off again, in case of race conditions through reconnecting
         self.set_state(DevState.OFF)
@@ -138,18 +138,18 @@ class hardware_device(Device):
 
         :return:None
         """
-        self.fault()
+        self.configure_for_fault()
         self.set_state(DevState.FAULT)
 
 
     # functions that can be overloaded
-    def fault(self):
+    def configure_for_fault(self):
         pass
-    def off(self):
+    def configure_for_off(self):
         pass
-    def on(self):
+    def configure_for_on(self):
         pass
-    def initialise(self):
+    def configure_for_initialise(self):
         pass
 
     def always_executed_hook(self):
diff --git a/devices/util/lofar2_config.py b/devices/util/lofar2_config.py
old mode 100755
new mode 100644
diff --git a/devices/util/lofar_git.py b/devices/util/lofar_git.py
new file mode 100644
index 0000000000000000000000000000000000000000..e95f6bdf369e9f21bfb89f0d3359a1328157d0a3
--- /dev/null
+++ b/devices/util/lofar_git.py
@@ -0,0 +1,78 @@
+import git # pip3 install gitpython
+import os
+from functools import lru_cache
+
+def get_repo(starting_directory: str = os.path.dirname(__file__)) -> git.Repo:
+    """ Try finding the repository by traversing up the tree.
+
+        By default, the repository containing this module is returned.
+    """
+
+    directory = starting_directory
+
+    try:
+        return git.Repo(directory)
+    except git.InvalidGitRepositoryError:
+        pass
+
+    # We now have to traverse up the tree
+    while directory != "/" and os.path.exists(directory):
+        # Go to parent
+        directory = os.path.abspath(directory + os.path.sep + "..")
+
+        try:
+            return git.Repo(directory)
+        except git.InvalidGitRepositoryError:
+            pass
+
+    raise git.InvalidGitRepositoryError("Could not find git repository root in {}".format(starting_directory))
+
+
+@lru_cache(maxsize=None)
+def get_version(repo: git.Repo = None) -> str:
+    """ Return a version string for the current commit.
+
+    There is a practical issue: the repository changes over time, f.e. switching branches with 'git checkout'. We want
+    to know the version that is running in memory, not the one that is on disk.
+
+    As a work-around, we cache the version information, in that it is at least consistent. It is up to the caller
+    to request the version early enough. 
+    
+    The version string is one of:
+       - <tag>
+       - <branch> [<commit>]
+
+    In both cases, a "*" prefix indicates this code is not production ready. Code is considered production ready if
+    it is a tag and there are no local modifications.
+       
+    """
+
+    if repo is None:
+        repo = get_repo()
+
+    branch = repo.active_branch
+    commit = repo.commit()
+    tags = { tag.commit: tag for tag in repo.tags }
+
+    if commit in tags:
+        commit_str = "{}".format(tags[commit])
+        production_ready = True
+    else:
+        commit_str = "{} [{}]".format(branch, commit)
+        production_ready = False
+
+    if repo.is_dirty():
+        production_ready = False
+
+    return "{}{}".format("*" if not production_ready else "", commit_str)
+
+
+# at least cache the current repo version immediately
+try:
+    _ = get_version()
+except:
+    pass
+
+
+if __name__ == "__main__":
+    print(get_version())
diff --git a/devices/util/lofar_logging.py b/devices/util/lofar_logging.py
index aa7d3633138679c63fd1934cf8d5638df7b1cedf..4bedad018047614c16d65b75816ac12dc7dbd7d0 100644
--- a/devices/util/lofar_logging.py
+++ b/devices/util/lofar_logging.py
@@ -1,5 +1,6 @@
 import logging
 from functools import wraps
+import sys
 
 # Always also log the hostname because it makes the origin of the log clear.
 import socket
@@ -14,7 +15,7 @@ def configure_logger(logger: logging.Logger, log_extra=None):
         # log to the tcp_input of logstash in our ELK stack
         handler = AsynchronousLogstashHandler("elk", 5959, database_path='pending_log_messages.db')
 
-        # configure log messages 
+        # configure log messages
         formatter = LogstashFormatter(extra=log_extra, tags=["python", "lofar"])
         handler.setFormatter(formatter)
 
diff --git a/devices/util/startup.py b/devices/util/startup.py
old mode 100755
new mode 100644
index f98097f994afc340fdb168311bcb524445658f1d..0f4bcbe702b1bd1edb873234763d56455b6009b4
--- a/devices/util/startup.py
+++ b/devices/util/startup.py
@@ -34,4 +34,3 @@ def startup(device: str, force_restart: bool):
     else:
         print("Device {} has successfully reached ON state.".format(device))
     return proxy
-
diff --git a/devices/versioneer.py b/devices/versioneer.py
deleted file mode 100644
index 1040c218924c06a246ea1bd872201a5c57744192..0000000000000000000000000000000000000000
--- a/devices/versioneer.py
+++ /dev/null
@@ -1,1855 +0,0 @@
-
-# Version: 0.19
-
-"""The Versioneer - like a rocketeer, but for versions.
-
-The Versioneer
-==============
-
-* like a rocketeer, but for versions!
-* https://github.com/python-versioneer/python-versioneer
-* Brian Warner
-* License: Public Domain
-* Compatible with: Python 3.6, 3.7, 3.8, 3.9 and pypy3
-* [![Latest Version][pypi-image]][pypi-url]
-* [![Build Status][travis-image]][travis-url]
-
-This is a tool for managing a recorded version number in distutils-based
-python projects. The goal is to remove the tedious and error-prone "update
-the embedded version string" step from your release process. Making a new
-release should be as easy as recording a new tag in your version-control
-system, and maybe making new tarballs.
-
-
-## Quick Install
-
-* `pip install versioneer` to somewhere in your $PATH
-* add a `[versioneer]` section to your setup.cfg (see [Install](INSTALL.md))
-* run `versioneer install` in your source tree, commit the results
-* Verify version information with `python setup.py version`
-
-## Version Identifiers
-
-Source trees come from a variety of places:
-
-* a version-control system checkout (mostly used by developers)
-* a nightly tarball, produced by build automation
-* a snapshot tarball, produced by a web-based VCS browser, like github's
-  "tarball from tag" feature
-* a release tarball, produced by "setup.py sdist", distributed through PyPI
-
-Within each source tree, the version identifier (either a string or a number,
-this tool is format-agnostic) can come from a variety of places:
-
-* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows
-  about recent "tags" and an absolute revision-id
-* the name of the directory into which the tarball was unpacked
-* an expanded VCS keyword ($Id$, etc)
-* a `_version.py` created by some earlier build step
-
-For released software, the version identifier is closely related to a VCS
-tag. Some projects use tag names that include more than just the version
-string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool
-needs to strip the tag prefix to extract the version identifier. For
-unreleased software (between tags), the version identifier should provide
-enough information to help developers recreate the same tree, while also
-giving them an idea of roughly how old the tree is (after version 1.2, before
-version 1.3). Many VCS systems can report a description that captures this,
-for example `git describe --tags --dirty --always` reports things like
-"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the
-0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has
-uncommitted changes).
-
-The version identifier is used for multiple purposes:
-
-* to allow the module to self-identify its version: `myproject.__version__`
-* to choose a name and prefix for a 'setup.py sdist' tarball
-
-## Theory of Operation
-
-Versioneer works by adding a special `_version.py` file into your source
-tree, where your `__init__.py` can import it. This `_version.py` knows how to
-dynamically ask the VCS tool for version information at import time.
-
-`_version.py` also contains `$Revision$` markers, and the installation
-process marks `_version.py` to have this marker rewritten with a tag name
-during the `git archive` command. As a result, generated tarballs will
-contain enough information to get the proper version.
-
-To allow `setup.py` to compute a version too, a `versioneer.py` is added to
-the top level of your source tree, next to `setup.py` and the `setup.cfg`
-that configures it. This overrides several distutils/setuptools commands to
-compute the version when invoked, and changes `setup.py build` and `setup.py
-sdist` to replace `_version.py` with a small static file that contains just
-the generated version data.
-
-## Installation
-
-See [INSTALL.md](./INSTALL.md) for detailed installation instructions.
-
-## Version-String Flavors
-
-Code which uses Versioneer can learn about its version string at runtime by
-importing `_version` from your main `__init__.py` file and running the
-`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can
-import the top-level `versioneer.py` and run `get_versions()`.
-
-Both functions return a dictionary with different flavors of version
-information:
-
-* `['version']`: A condensed version string, rendered using the selected
-  style. This is the most commonly used value for the project's version
-  string. The default "pep440" style yields strings like `0.11`,
-  `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section
-  below for alternative styles.
-
-* `['full-revisionid']`: detailed revision identifier. For Git, this is the
-  full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac".
-
-* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the
-  commit date in ISO 8601 format. This will be None if the date is not
-  available.
-
-* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that
-  this is only accurate if run in a VCS checkout, otherwise it is likely to
-  be False or None
-
-* `['error']`: if the version string could not be computed, this will be set
-  to a string describing the problem, otherwise it will be None. It may be
-  useful to throw an exception in setup.py if this is set, to avoid e.g.
-  creating tarballs with a version string of "unknown".
-
-Some variants are more useful than others. Including `full-revisionid` in a
-bug report should allow developers to reconstruct the exact code being tested
-(or indicate the presence of local changes that should be shared with the
-developers). `version` is suitable for display in an "about" box or a CLI
-`--version` output: it can be easily compared against release notes and lists
-of bugs fixed in various releases.
-
-The installer adds the following text to your `__init__.py` to place a basic
-version in `YOURPROJECT.__version__`:
-
-    from ._version import get_versions
-    __version__ = get_versions()['version']
-    del get_versions
-
-## Styles
-
-The setup.cfg `style=` configuration controls how the VCS information is
-rendered into a version string.
-
-The default style, "pep440", produces a PEP440-compliant string, equal to the
-un-prefixed tag name for actual releases, and containing an additional "local
-version" section with more detail for in-between builds. For Git, this is
-TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags
---dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the
-tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and
-that this commit is two revisions ("+2") beyond the "0.11" tag. For released
-software (exactly equal to a known tag), the identifier will only contain the
-stripped tag, e.g. "0.11".
-
-Other styles are available. See [details.md](details.md) in the Versioneer
-source tree for descriptions.
-
-## Debugging
-
-Versioneer tries to avoid fatal errors: if something goes wrong, it will tend
-to return a version of "0+unknown". To investigate the problem, run `setup.py
-version`, which will run the version-lookup code in a verbose mode, and will
-display the full contents of `get_versions()` (including the `error` string,
-which may help identify what went wrong).
-
-## Known Limitations
-
-Some situations are known to cause problems for Versioneer. This details the
-most significant ones. More can be found on Github
-[issues page](https://github.com/python-versioneer/python-versioneer/issues).
-
-### Subprojects
-
-Versioneer has limited support for source trees in which `setup.py` is not in
-the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are
-two common reasons why `setup.py` might not be in the root:
-
-* Source trees which contain multiple subprojects, such as
-  [Buildbot](https://github.com/buildbot/buildbot), which contains both
-  "master" and "slave" subprojects, each with their own `setup.py`,
-  `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI
-  distributions (and upload multiple independently-installable tarballs).
-* Source trees whose main purpose is to contain a C library, but which also
-  provide bindings to Python (and perhaps other languages) in subdirectories.
-
-Versioneer will look for `.git` in parent directories, and most operations
-should get the right version string. However `pip` and `setuptools` have bugs
-and implementation details which frequently cause `pip install .` from a
-subproject directory to fail to find a correct version string (so it usually
-defaults to `0+unknown`).
-
-`pip install --editable .` should work correctly. `setup.py install` might
-work too.
-
-Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in
-some later version.
-
-[Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking
-this issue. The discussion in
-[PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the
-issue from the Versioneer side in more detail.
-[pip PR#3176](https://github.com/pypa/pip/pull/3176) and
-[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve
-pip to let Versioneer work correctly.
-
-Versioneer-0.16 and earlier only looked for a `.git` directory next to the
-`setup.cfg`, so subprojects were completely unsupported with those releases.
-
-### Editable installs with setuptools <= 18.5
-
-`setup.py develop` and `pip install --editable .` allow you to install a
-project into a virtualenv once, then continue editing the source code (and
-test) without re-installing after every change.
-
-"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a
-convenient way to specify executable scripts that should be installed along
-with the python package.
-
-These both work as expected when using modern setuptools. When using
-setuptools-18.5 or earlier, however, certain operations will cause
-`pkg_resources.DistributionNotFound` errors when running the entrypoint
-script, which must be resolved by re-installing the package. This happens
-when the install happens with one version, then the egg_info data is
-regenerated while a different version is checked out. Many setup.py commands
-cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into
-a different virtualenv), so this can be surprising.
-
-[Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes
-this one, but upgrading to a newer version of setuptools should probably
-resolve it.
-
-
-## Updating Versioneer
-
-To upgrade your project to a new release of Versioneer, do the following:
-
-* install the new Versioneer (`pip install -U versioneer` or equivalent)
-* edit `setup.cfg`, if necessary, to include any new configuration settings
-  indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details.
-* re-run `versioneer install` in your source tree, to replace
-  `SRC/_version.py`
-* commit any changed files
-
-## Future Directions
-
-This tool is designed to make it easily extended to other version-control
-systems: all VCS-specific components are in separate directories like
-src/git/ . The top-level `versioneer.py` script is assembled from these
-components by running make-versioneer.py . In the future, make-versioneer.py
-will take a VCS name as an argument, and will construct a version of
-`versioneer.py` that is specific to the given VCS. It might also take the
-configuration arguments that are currently provided manually during
-installation by editing setup.py . Alternatively, it might go the other
-direction and include code from all supported VCS systems, reducing the
-number of intermediate scripts.
-
-## Similar projects
-
-* [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time
-  dependency
-* [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of
-  versioneer
-
-## License
-
-To make Versioneer easier to embed, all its code is dedicated to the public
-domain. The `_version.py` that it creates is also in the public domain.
-Specifically, both are released under the Creative Commons "Public Domain
-Dedication" license (CC0-1.0), as described in
-https://creativecommons.org/publicdomain/zero/1.0/ .
-
-[pypi-image]: https://img.shields.io/pypi/v/versioneer.svg
-[pypi-url]: https://pypi.python.org/pypi/versioneer/
-[travis-image]:
-https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg
-[travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer
-
-"""
-
-import configparser
-import errno
-import json
-import os
-import re
-import subprocess
-import sys
-
-
-class VersioneerConfig:
-    """Container for Versioneer configuration parameters."""
-
-
-def get_root():
-    """Get the project root directory.
-
-    We require that all commands are run from the project root, i.e. the
-    directory that contains setup.py, setup.cfg, and versioneer.py .
-    """
-    root = os.path.realpath(os.path.abspath(os.getcwd()))
-    setup_py = os.path.join(root, "setup.py")
-    versioneer_py = os.path.join(root, "versioneer.py")
-    if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
-        # allow 'python path/to/setup.py COMMAND'
-        root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0])))
-        setup_py = os.path.join(root, "setup.py")
-        versioneer_py = os.path.join(root, "versioneer.py")
-    if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
-        err = ("Versioneer was unable to run the project root directory. "
-               "Versioneer requires setup.py to be executed from "
-               "its immediate directory (like 'python setup.py COMMAND'), "
-               "or in a way that lets it use sys.argv[0] to find the root "
-               "(like 'python path/to/setup.py COMMAND').")
-        raise VersioneerBadRootError(err)
-    try:
-        # Certain runtime workflows (setup.py install/develop in a setuptools
-        # tree) execute all dependencies in a single python process, so
-        # "versioneer" may be imported multiple times, and python's shared
-        # module-import table will cache the first one. So we can't use
-        # os.path.dirname(__file__), as that will find whichever
-        # versioneer.py was first imported, even in later projects.
-        me = os.path.realpath(os.path.abspath(__file__))
-        me_dir = os.path.normcase(os.path.splitext(me)[0])
-        vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0])
-        if me_dir != vsr_dir:
-            print("Warning: build in %s is using versioneer.py from %s"
-                  % (os.path.dirname(me), versioneer_py))
-    except NameError:
-        pass
-    return root
-
-
-def get_config_from_root(root):
-    """Read the project setup.cfg file to determine Versioneer config."""
-    # This might raise EnvironmentError (if setup.cfg is missing), or
-    # configparser.NoSectionError (if it lacks a [versioneer] section), or
-    # configparser.NoOptionError (if it lacks "VCS="). See the docstring at
-    # the top of versioneer.py for instructions on writing your setup.cfg .
-    setup_cfg = os.path.join(root, "setup.cfg")
-    parser = configparser.ConfigParser()
-    with open(setup_cfg, "r") as f:
-        parser.read_file(f)
-    VCS = parser.get("versioneer", "VCS")  # mandatory
-
-    def get(parser, name):
-        if parser.has_option("versioneer", name):
-            return parser.get("versioneer", name)
-        return None
-    cfg = VersioneerConfig()
-    cfg.VCS = VCS
-    cfg.style = get(parser, "style") or ""
-    cfg.versionfile_source = get(parser, "versionfile_source")
-    cfg.versionfile_build = get(parser, "versionfile_build")
-    cfg.tag_prefix = get(parser, "tag_prefix")
-    if cfg.tag_prefix in ("''", '""'):
-        cfg.tag_prefix = ""
-    cfg.parentdir_prefix = get(parser, "parentdir_prefix")
-    cfg.verbose = get(parser, "verbose")
-    return cfg
-
-
-class NotThisMethod(Exception):
-    """Exception raised if a method is not valid for the current scenario."""
-
-
-# these dictionaries contain VCS-specific tools
-LONG_VERSION_PY = {}
-HANDLERS = {}
-
-
-def register_vcs_handler(vcs, method):  # decorator
-    """Create decorator to mark a method as the handler of a VCS."""
-    def decorate(f):
-        """Store f in HANDLERS[vcs][method]."""
-        if vcs not in HANDLERS:
-            HANDLERS[vcs] = {}
-        HANDLERS[vcs][method] = f
-        return f
-    return decorate
-
-
-def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
-                env=None):
-    """Call the given command(s)."""
-    assert isinstance(commands, list)
-    p = None
-    for c in commands:
-        try:
-            dispcmd = str([c] + args)
-            # remember shell=False, so use git.cmd on windows, not just git
-            p = subprocess.Popen([c] + args, cwd=cwd, env=env,
-                                 stdout=subprocess.PIPE,
-                                 stderr=(subprocess.PIPE if hide_stderr
-                                         else None))
-            break
-        except EnvironmentError:
-            e = sys.exc_info()[1]
-            if e.errno == errno.ENOENT:
-                continue
-            if verbose:
-                print("unable to run %s" % dispcmd)
-                print(e)
-            return None, None
-    else:
-        if verbose:
-            print("unable to find command, tried %s" % (commands,))
-        return None, None
-    stdout = p.communicate()[0].strip().decode()
-    if p.returncode != 0:
-        if verbose:
-            print("unable to run %s (error)" % dispcmd)
-            print("stdout was %s" % stdout)
-        return None, p.returncode
-    return stdout, p.returncode
-
-
-LONG_VERSION_PY['git'] = r'''
-# This file helps to compute a version number in source trees obtained from
-# git-archive tarball (such as those provided by githubs download-from-tag
-# feature). Distribution tarballs (built by setup.py sdist) and build
-# directories (produced by setup.py build) will contain a much shorter file
-# that just contains the computed version number.
-
-# This file is released into the public domain. Generated by
-# versioneer-0.19 (https://github.com/python-versioneer/python-versioneer)
-
-"""Git implementation of _version.py."""
-
-import errno
-import os
-import re
-import subprocess
-import sys
-
-
-def get_keywords():
-    """Get the keywords needed to look up the version information."""
-    # these strings will be replaced by git during git-archive.
-    # setup.py/versioneer.py will grep for the variable names, so they must
-    # each be defined on a line of their own. _version.py will just call
-    # get_keywords().
-    git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s"
-    git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s"
-    git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s"
-    keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
-    return keywords
-
-
-class VersioneerConfig:
-    """Container for Versioneer configuration parameters."""
-
-
-def get_config():
-    """Create, populate and return the VersioneerConfig() object."""
-    # these strings are filled in when 'setup.py versioneer' creates
-    # _version.py
-    cfg = VersioneerConfig()
-    cfg.VCS = "git"
-    cfg.style = "%(STYLE)s"
-    cfg.tag_prefix = "%(TAG_PREFIX)s"
-    cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s"
-    cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s"
-    cfg.verbose = False
-    return cfg
-
-
-class NotThisMethod(Exception):
-    """Exception raised if a method is not valid for the current scenario."""
-
-
-LONG_VERSION_PY = {}
-HANDLERS = {}
-
-
-def register_vcs_handler(vcs, method):  # decorator
-    """Create decorator to mark a method as the handler of a VCS."""
-    def decorate(f):
-        """Store f in HANDLERS[vcs][method]."""
-        if vcs not in HANDLERS:
-            HANDLERS[vcs] = {}
-        HANDLERS[vcs][method] = f
-        return f
-    return decorate
-
-
-def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
-                env=None):
-    """Call the given command(s)."""
-    assert isinstance(commands, list)
-    p = None
-    for c in commands:
-        try:
-            dispcmd = str([c] + args)
-            # remember shell=False, so use git.cmd on windows, not just git
-            p = subprocess.Popen([c] + args, cwd=cwd, env=env,
-                                 stdout=subprocess.PIPE,
-                                 stderr=(subprocess.PIPE if hide_stderr
-                                         else None))
-            break
-        except EnvironmentError:
-            e = sys.exc_info()[1]
-            if e.errno == errno.ENOENT:
-                continue
-            if verbose:
-                print("unable to run %%s" %% dispcmd)
-                print(e)
-            return None, None
-    else:
-        if verbose:
-            print("unable to find command, tried %%s" %% (commands,))
-        return None, None
-    stdout = p.communicate()[0].strip().decode()
-    if p.returncode != 0:
-        if verbose:
-            print("unable to run %%s (error)" %% dispcmd)
-            print("stdout was %%s" %% stdout)
-        return None, p.returncode
-    return stdout, p.returncode
-
-
-def versions_from_parentdir(parentdir_prefix, root, verbose):
-    """Try to determine the version from the parent directory name.
-
-    Source tarballs conventionally unpack into a directory that includes both
-    the project name and a version string. We will also support searching up
-    two directory levels for an appropriately named parent directory
-    """
-    rootdirs = []
-
-    for i in range(3):
-        dirname = os.path.basename(root)
-        if dirname.startswith(parentdir_prefix):
-            return {"version": dirname[len(parentdir_prefix):],
-                    "full-revisionid": None,
-                    "dirty": False, "error": None, "date": None}
-        else:
-            rootdirs.append(root)
-            root = os.path.dirname(root)  # up a level
-
-    if verbose:
-        print("Tried directories %%s but none started with prefix %%s" %%
-              (str(rootdirs), parentdir_prefix))
-    raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
-
-
-@register_vcs_handler("git", "get_keywords")
-def git_get_keywords(versionfile_abs):
-    """Extract version information from the given file."""
-    # the code embedded in _version.py can just fetch the value of these
-    # keywords. When used from setup.py, we don't want to import _version.py,
-    # so we do it with a regexp instead. This function is not used from
-    # _version.py.
-    keywords = {}
-    try:
-        f = open(versionfile_abs, "r")
-        for line in f.readlines():
-            if line.strip().startswith("git_refnames ="):
-                mo = re.search(r'=\s*"(.*)"', line)
-                if mo:
-                    keywords["refnames"] = mo.group(1)
-            if line.strip().startswith("git_full ="):
-                mo = re.search(r'=\s*"(.*)"', line)
-                if mo:
-                    keywords["full"] = mo.group(1)
-            if line.strip().startswith("git_date ="):
-                mo = re.search(r'=\s*"(.*)"', line)
-                if mo:
-                    keywords["date"] = mo.group(1)
-        f.close()
-    except EnvironmentError:
-        pass
-    return keywords
-
-
-@register_vcs_handler("git", "keywords")
-def git_versions_from_keywords(keywords, tag_prefix, verbose):
-    """Get version information from git keywords."""
-    if not keywords:
-        raise NotThisMethod("no keywords at all, weird")
-    date = keywords.get("date")
-    if date is not None:
-        # Use only the last line.  Previous lines may contain GPG signature
-        # information.
-        date = date.splitlines()[-1]
-
-        # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant
-        # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601
-        # -like" string, which we must then edit to make compliant), because
-        # it's been around since git-1.5.3, and it's too difficult to
-        # discover which version we're using, or to work around using an
-        # older one.
-        date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
-    refnames = keywords["refnames"].strip()
-    if refnames.startswith("$Format"):
-        if verbose:
-            print("keywords are unexpanded, not using")
-        raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
-    refs = set([r.strip() for r in refnames.strip("()").split(",")])
-    # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
-    # just "foo-1.0". If we see a "tag: " prefix, prefer those.
-    TAG = "tag: "
-    tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
-    if not tags:
-        # Either we're using git < 1.8.3, or there really are no tags. We use
-        # a heuristic: assume all version tags have a digit. The old git %%d
-        # expansion behaves like git log --decorate=short and strips out the
-        # refs/heads/ and refs/tags/ prefixes that would let us distinguish
-        # between branches and tags. By ignoring refnames without digits, we
-        # filter out many common branch names like "release" and
-        # "stabilization", as well as "HEAD" and "master".
-        tags = set([r for r in refs if re.search(r'\d', r)])
-        if verbose:
-            print("discarding '%%s', no digits" %% ",".join(refs - tags))
-    if verbose:
-        print("likely tags: %%s" %% ",".join(sorted(tags)))
-    for ref in sorted(tags):
-        # sorting will prefer e.g. "2.0" over "2.0rc1"
-        if ref.startswith(tag_prefix):
-            r = ref[len(tag_prefix):]
-            if verbose:
-                print("picking %%s" %% r)
-            return {"version": r,
-                    "full-revisionid": keywords["full"].strip(),
-                    "dirty": False, "error": None,
-                    "date": date}
-    # no suitable tags, so version is "0+unknown", but full hex is still there
-    if verbose:
-        print("no suitable tags, using unknown + full revision id")
-    return {"version": "0+unknown",
-            "full-revisionid": keywords["full"].strip(),
-            "dirty": False, "error": "no suitable tags", "date": None}
-
-
-@register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
-    """Get version from 'git describe' in the root of the source tree.
-
-    This only gets called if the git-archive 'subst' keywords were *not*
-    expanded, and _version.py hasn't already been rewritten with a short
-    version string, meaning we're inside a checked out source tree.
-    """
-    GITS = ["git"]
-    if sys.platform == "win32":
-        GITS = ["git.cmd", "git.exe"]
-
-    out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
-                          hide_stderr=True)
-    if rc != 0:
-        if verbose:
-            print("Directory %%s not under git control" %% root)
-        raise NotThisMethod("'git rev-parse --git-dir' returned error")
-
-    # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
-    # if there isn't one, this yields HEX[-dirty] (no NUM)
-    describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
-                                          "--always", "--long",
-                                          "--match", "%%s*" %% tag_prefix],
-                                   cwd=root)
-    # --long was added in git-1.5.5
-    if describe_out is None:
-        raise NotThisMethod("'git describe' failed")
-    describe_out = describe_out.strip()
-    full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
-    if full_out is None:
-        raise NotThisMethod("'git rev-parse' failed")
-    full_out = full_out.strip()
-
-    pieces = {}
-    pieces["long"] = full_out
-    pieces["short"] = full_out[:7]  # maybe improved later
-    pieces["error"] = None
-
-    # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
-    # TAG might have hyphens.
-    git_describe = describe_out
-
-    # look for -dirty suffix
-    dirty = git_describe.endswith("-dirty")
-    pieces["dirty"] = dirty
-    if dirty:
-        git_describe = git_describe[:git_describe.rindex("-dirty")]
-
-    # now we have TAG-NUM-gHEX or HEX
-
-    if "-" in git_describe:
-        # TAG-NUM-gHEX
-        mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
-        if not mo:
-            # unparseable. Maybe git-describe is misbehaving?
-            pieces["error"] = ("unable to parse git-describe output: '%%s'"
-                               %% describe_out)
-            return pieces
-
-        # tag
-        full_tag = mo.group(1)
-        if not full_tag.startswith(tag_prefix):
-            if verbose:
-                fmt = "tag '%%s' doesn't start with prefix '%%s'"
-                print(fmt %% (full_tag, tag_prefix))
-            pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'"
-                               %% (full_tag, tag_prefix))
-            return pieces
-        pieces["closest-tag"] = full_tag[len(tag_prefix):]
-
-        # distance: number of commits since tag
-        pieces["distance"] = int(mo.group(2))
-
-        # commit: short hex revision ID
-        pieces["short"] = mo.group(3)
-
-    else:
-        # HEX: no tags
-        pieces["closest-tag"] = None
-        count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
-                                    cwd=root)
-        pieces["distance"] = int(count_out)  # total number of commits
-
-    # commit date: see ISO-8601 comment in git_versions_from_keywords()
-    date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"],
-                       cwd=root)[0].strip()
-    # Use only the last line.  Previous lines may contain GPG signature
-    # information.
-    date = date.splitlines()[-1]
-    pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
-
-    return pieces
-
-
-def plus_or_dot(pieces):
-    """Return a + if we don't already have one, else return a ."""
-    if "+" in pieces.get("closest-tag", ""):
-        return "."
-    return "+"
-
-
-def render_pep440(pieces):
-    """Build up version string, with post-release "local version identifier".
-
-    Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
-    get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
-
-    Exceptions:
-    1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"] or pieces["dirty"]:
-            rendered += plus_or_dot(pieces)
-            rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
-            if pieces["dirty"]:
-                rendered += ".dirty"
-    else:
-        # exception #1
-        rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"],
-                                          pieces["short"])
-        if pieces["dirty"]:
-            rendered += ".dirty"
-    return rendered
-
-
-def render_pep440_pre(pieces):
-    """TAG[.post0.devDISTANCE] -- No -dirty.
-
-    Exceptions:
-    1: no tags. 0.post0.devDISTANCE
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"]:
-            rendered += ".post0.dev%%d" %% pieces["distance"]
-    else:
-        # exception #1
-        rendered = "0.post0.dev%%d" %% pieces["distance"]
-    return rendered
-
-
-def render_pep440_post(pieces):
-    """TAG[.postDISTANCE[.dev0]+gHEX] .
-
-    The ".dev0" means dirty. Note that .dev0 sorts backwards
-    (a dirty tree will appear "older" than the corresponding clean one),
-    but you shouldn't be releasing software with -dirty anyways.
-
-    Exceptions:
-    1: no tags. 0.postDISTANCE[.dev0]
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"] or pieces["dirty"]:
-            rendered += ".post%%d" %% pieces["distance"]
-            if pieces["dirty"]:
-                rendered += ".dev0"
-            rendered += plus_or_dot(pieces)
-            rendered += "g%%s" %% pieces["short"]
-    else:
-        # exception #1
-        rendered = "0.post%%d" %% pieces["distance"]
-        if pieces["dirty"]:
-            rendered += ".dev0"
-        rendered += "+g%%s" %% pieces["short"]
-    return rendered
-
-
-def render_pep440_old(pieces):
-    """TAG[.postDISTANCE[.dev0]] .
-
-    The ".dev0" means dirty.
-
-    Exceptions:
-    1: no tags. 0.postDISTANCE[.dev0]
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"] or pieces["dirty"]:
-            rendered += ".post%%d" %% pieces["distance"]
-            if pieces["dirty"]:
-                rendered += ".dev0"
-    else:
-        # exception #1
-        rendered = "0.post%%d" %% pieces["distance"]
-        if pieces["dirty"]:
-            rendered += ".dev0"
-    return rendered
-
-
-def render_git_describe(pieces):
-    """TAG[-DISTANCE-gHEX][-dirty].
-
-    Like 'git describe --tags --dirty --always'.
-
-    Exceptions:
-    1: no tags. HEX[-dirty]  (note: no 'g' prefix)
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"]:
-            rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
-    else:
-        # exception #1
-        rendered = pieces["short"]
-    if pieces["dirty"]:
-        rendered += "-dirty"
-    return rendered
-
-
-def render_git_describe_long(pieces):
-    """TAG-DISTANCE-gHEX[-dirty].
-
-    Like 'git describe --tags --dirty --always -long'.
-    The distance/hash is unconditional.
-
-    Exceptions:
-    1: no tags. HEX[-dirty]  (note: no 'g' prefix)
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
-    else:
-        # exception #1
-        rendered = pieces["short"]
-    if pieces["dirty"]:
-        rendered += "-dirty"
-    return rendered
-
-
-def render(pieces, style):
-    """Render the given version pieces into the requested style."""
-    if pieces["error"]:
-        return {"version": "unknown",
-                "full-revisionid": pieces.get("long"),
-                "dirty": None,
-                "error": pieces["error"],
-                "date": None}
-
-    if not style or style == "default":
-        style = "pep440"  # the default
-
-    if style == "pep440":
-        rendered = render_pep440(pieces)
-    elif style == "pep440-pre":
-        rendered = render_pep440_pre(pieces)
-    elif style == "pep440-post":
-        rendered = render_pep440_post(pieces)
-    elif style == "pep440-old":
-        rendered = render_pep440_old(pieces)
-    elif style == "git-describe":
-        rendered = render_git_describe(pieces)
-    elif style == "git-describe-long":
-        rendered = render_git_describe_long(pieces)
-    else:
-        raise ValueError("unknown style '%%s'" %% style)
-
-    return {"version": rendered, "full-revisionid": pieces["long"],
-            "dirty": pieces["dirty"], "error": None,
-            "date": pieces.get("date")}
-
-
-def get_versions():
-    """Get version information or return default if unable to do so."""
-    # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
-    # __file__, we can work backwards from there to the root. Some
-    # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
-    # case we can only use expanded keywords.
-
-    cfg = get_config()
-    verbose = cfg.verbose
-
-    try:
-        return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
-                                          verbose)
-    except NotThisMethod:
-        pass
-
-    try:
-        root = os.path.realpath(__file__)
-        # versionfile_source is the relative path from the top of the source
-        # tree (where the .git directory might live) to this file. Invert
-        # this to find the root from __file__.
-        for i in cfg.versionfile_source.split('/'):
-            root = os.path.dirname(root)
-    except NameError:
-        return {"version": "0+unknown", "full-revisionid": None,
-                "dirty": None,
-                "error": "unable to find root of source tree",
-                "date": None}
-
-    try:
-        pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
-        return render(pieces, cfg.style)
-    except NotThisMethod:
-        pass
-
-    try:
-        if cfg.parentdir_prefix:
-            return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
-    except NotThisMethod:
-        pass
-
-    return {"version": "0+unknown", "full-revisionid": None,
-            "dirty": None,
-            "error": "unable to compute version", "date": None}
-'''
-
-
-@register_vcs_handler("git", "get_keywords")
-def git_get_keywords(versionfile_abs):
-    """Extract version information from the given file."""
-    # the code embedded in _version.py can just fetch the value of these
-    # keywords. When used from setup.py, we don't want to import _version.py,
-    # so we do it with a regexp instead. This function is not used from
-    # _version.py.
-    keywords = {}
-    try:
-        f = open(versionfile_abs, "r")
-        for line in f.readlines():
-            if line.strip().startswith("git_refnames ="):
-                mo = re.search(r'=\s*"(.*)"', line)
-                if mo:
-                    keywords["refnames"] = mo.group(1)
-            if line.strip().startswith("git_full ="):
-                mo = re.search(r'=\s*"(.*)"', line)
-                if mo:
-                    keywords["full"] = mo.group(1)
-            if line.strip().startswith("git_date ="):
-                mo = re.search(r'=\s*"(.*)"', line)
-                if mo:
-                    keywords["date"] = mo.group(1)
-        f.close()
-    except EnvironmentError:
-        pass
-    return keywords
-
-
-@register_vcs_handler("git", "keywords")
-def git_versions_from_keywords(keywords, tag_prefix, verbose):
-    """Get version information from git keywords."""
-    if not keywords:
-        raise NotThisMethod("no keywords at all, weird")
-    date = keywords.get("date")
-    if date is not None:
-        # Use only the last line.  Previous lines may contain GPG signature
-        # information.
-        date = date.splitlines()[-1]
-
-        # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
-        # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
-        # -like" string, which we must then edit to make compliant), because
-        # it's been around since git-1.5.3, and it's too difficult to
-        # discover which version we're using, or to work around using an
-        # older one.
-        date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
-    refnames = keywords["refnames"].strip()
-    if refnames.startswith("$Format"):
-        if verbose:
-            print("keywords are unexpanded, not using")
-        raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
-    refs = set([r.strip() for r in refnames.strip("()").split(",")])
-    # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
-    # just "foo-1.0". If we see a "tag: " prefix, prefer those.
-    TAG = "tag: "
-    tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
-    if not tags:
-        # Either we're using git < 1.8.3, or there really are no tags. We use
-        # a heuristic: assume all version tags have a digit. The old git %d
-        # expansion behaves like git log --decorate=short and strips out the
-        # refs/heads/ and refs/tags/ prefixes that would let us distinguish
-        # between branches and tags. By ignoring refnames without digits, we
-        # filter out many common branch names like "release" and
-        # "stabilization", as well as "HEAD" and "master".
-        tags = set([r for r in refs if re.search(r'\d', r)])
-        if verbose:
-            print("discarding '%s', no digits" % ",".join(refs - tags))
-    if verbose:
-        print("likely tags: %s" % ",".join(sorted(tags)))
-    for ref in sorted(tags):
-        # sorting will prefer e.g. "2.0" over "2.0rc1"
-        if ref.startswith(tag_prefix):
-            r = ref[len(tag_prefix):]
-            if verbose:
-                print("picking %s" % r)
-            return {"version": r,
-                    "full-revisionid": keywords["full"].strip(),
-                    "dirty": False, "error": None,
-                    "date": date}
-    # no suitable tags, so version is "0+unknown", but full hex is still there
-    if verbose:
-        print("no suitable tags, using unknown + full revision id")
-    return {"version": "0+unknown",
-            "full-revisionid": keywords["full"].strip(),
-            "dirty": False, "error": "no suitable tags", "date": None}
-
-
-@register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
-    """Get version from 'git describe' in the root of the source tree.
-
-    This only gets called if the git-archive 'subst' keywords were *not*
-    expanded, and _version.py hasn't already been rewritten with a short
-    version string, meaning we're inside a checked out source tree.
-    """
-    GITS = ["git"]
-    if sys.platform == "win32":
-        GITS = ["git.cmd", "git.exe"]
-
-    out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
-                          hide_stderr=True)
-    if rc != 0:
-        if verbose:
-            print("Directory %s not under git control" % root)
-        raise NotThisMethod("'git rev-parse --git-dir' returned error")
-
-    # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
-    # if there isn't one, this yields HEX[-dirty] (no NUM)
-    describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
-                                          "--always", "--long",
-                                          "--match", "%s*" % tag_prefix],
-                                   cwd=root)
-    # --long was added in git-1.5.5
-    if describe_out is None:
-        raise NotThisMethod("'git describe' failed")
-    describe_out = describe_out.strip()
-    full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
-    if full_out is None:
-        raise NotThisMethod("'git rev-parse' failed")
-    full_out = full_out.strip()
-
-    pieces = {}
-    pieces["long"] = full_out
-    pieces["short"] = full_out[:7]  # maybe improved later
-    pieces["error"] = None
-
-    # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
-    # TAG might have hyphens.
-    git_describe = describe_out
-
-    # look for -dirty suffix
-    dirty = git_describe.endswith("-dirty")
-    pieces["dirty"] = dirty
-    if dirty:
-        git_describe = git_describe[:git_describe.rindex("-dirty")]
-
-    # now we have TAG-NUM-gHEX or HEX
-
-    if "-" in git_describe:
-        # TAG-NUM-gHEX
-        mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
-        if not mo:
-            # unparseable. Maybe git-describe is misbehaving?
-            pieces["error"] = ("unable to parse git-describe output: '%s'"
-                               % describe_out)
-            return pieces
-
-        # tag
-        full_tag = mo.group(1)
-        if not full_tag.startswith(tag_prefix):
-            if verbose:
-                fmt = "tag '%s' doesn't start with prefix '%s'"
-                print(fmt % (full_tag, tag_prefix))
-            pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
-                               % (full_tag, tag_prefix))
-            return pieces
-        pieces["closest-tag"] = full_tag[len(tag_prefix):]
-
-        # distance: number of commits since tag
-        pieces["distance"] = int(mo.group(2))
-
-        # commit: short hex revision ID
-        pieces["short"] = mo.group(3)
-
-    else:
-        # HEX: no tags
-        pieces["closest-tag"] = None
-        count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
-                                    cwd=root)
-        pieces["distance"] = int(count_out)  # total number of commits
-
-    # commit date: see ISO-8601 comment in git_versions_from_keywords()
-    date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
-                       cwd=root)[0].strip()
-    # Use only the last line.  Previous lines may contain GPG signature
-    # information.
-    date = date.splitlines()[-1]
-    pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
-
-    return pieces
-
-
-def do_vcs_install(manifest_in, versionfile_source, ipy):
-    """Git-specific installation logic for Versioneer.
-
-    For Git, this means creating/changing .gitattributes to mark _version.py
-    for export-subst keyword substitution.
-    """
-    GITS = ["git"]
-    if sys.platform == "win32":
-        GITS = ["git.cmd", "git.exe"]
-    files = [manifest_in, versionfile_source]
-    if ipy:
-        files.append(ipy)
-    try:
-        me = __file__
-        if me.endswith(".pyc") or me.endswith(".pyo"):
-            me = os.path.splitext(me)[0] + ".py"
-        versioneer_file = os.path.relpath(me)
-    except NameError:
-        versioneer_file = "versioneer.py"
-    files.append(versioneer_file)
-    present = False
-    try:
-        f = open(".gitattributes", "r")
-        for line in f.readlines():
-            if line.strip().startswith(versionfile_source):
-                if "export-subst" in line.strip().split()[1:]:
-                    present = True
-        f.close()
-    except EnvironmentError:
-        pass
-    if not present:
-        f = open(".gitattributes", "a+")
-        f.write("%s export-subst\n" % versionfile_source)
-        f.close()
-        files.append(".gitattributes")
-    run_command(GITS, ["add", "--"] + files)
-
-
-def versions_from_parentdir(parentdir_prefix, root, verbose):
-    """Try to determine the version from the parent directory name.
-
-    Source tarballs conventionally unpack into a directory that includes both
-    the project name and a version string. We will also support searching up
-    two directory levels for an appropriately named parent directory
-    """
-    rootdirs = []
-
-    for i in range(3):
-        dirname = os.path.basename(root)
-        if dirname.startswith(parentdir_prefix):
-            return {"version": dirname[len(parentdir_prefix):],
-                    "full-revisionid": None,
-                    "dirty": False, "error": None, "date": None}
-        else:
-            rootdirs.append(root)
-            root = os.path.dirname(root)  # up a level
-
-    if verbose:
-        print("Tried directories %s but none started with prefix %s" %
-              (str(rootdirs), parentdir_prefix))
-    raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
-
-
-SHORT_VERSION_PY = """
-# This file was generated by 'versioneer.py' (0.19) from
-# revision-control system data, or from the parent directory name of an
-# unpacked source archive. Distribution tarballs contain a pre-generated copy
-# of this file.
-
-import json
-
-version_json = '''
-%s
-'''  # END VERSION_JSON
-
-
-def get_versions():
-    return json.loads(version_json)
-"""
-
-
-def versions_from_file(filename):
-    """Try to determine the version from _version.py if present."""
-    try:
-        with open(filename) as f:
-            contents = f.read()
-    except EnvironmentError:
-        raise NotThisMethod("unable to read _version.py")
-    mo = re.search(r"version_json = '''\n(.*)'''  # END VERSION_JSON",
-                   contents, re.M | re.S)
-    if not mo:
-        mo = re.search(r"version_json = '''\r\n(.*)'''  # END VERSION_JSON",
-                       contents, re.M | re.S)
-    if not mo:
-        raise NotThisMethod("no version_json in _version.py")
-    return json.loads(mo.group(1))
-
-
-def write_to_version_file(filename, versions):
-    """Write the given version number to the given _version.py file."""
-    os.unlink(filename)
-    contents = json.dumps(versions, sort_keys=True,
-                          indent=1, separators=(",", ": "))
-    with open(filename, "w") as f:
-        f.write(SHORT_VERSION_PY % contents)
-
-    print("set %s to '%s'" % (filename, versions["version"]))
-
-
-def plus_or_dot(pieces):
-    """Return a + if we don't already have one, else return a ."""
-    if "+" in pieces.get("closest-tag", ""):
-        return "."
-    return "+"
-
-
-def render_pep440(pieces):
-    """Build up version string, with post-release "local version identifier".
-
-    Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
-    get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
-
-    Exceptions:
-    1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"] or pieces["dirty"]:
-            rendered += plus_or_dot(pieces)
-            rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
-            if pieces["dirty"]:
-                rendered += ".dirty"
-    else:
-        # exception #1
-        rendered = "0+untagged.%d.g%s" % (pieces["distance"],
-                                          pieces["short"])
-        if pieces["dirty"]:
-            rendered += ".dirty"
-    return rendered
-
-
-def render_pep440_pre(pieces):
-    """TAG[.post0.devDISTANCE] -- No -dirty.
-
-    Exceptions:
-    1: no tags. 0.post0.devDISTANCE
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"]:
-            rendered += ".post0.dev%d" % pieces["distance"]
-    else:
-        # exception #1
-        rendered = "0.post0.dev%d" % pieces["distance"]
-    return rendered
-
-
-def render_pep440_post(pieces):
-    """TAG[.postDISTANCE[.dev0]+gHEX] .
-
-    The ".dev0" means dirty. Note that .dev0 sorts backwards
-    (a dirty tree will appear "older" than the corresponding clean one),
-    but you shouldn't be releasing software with -dirty anyways.
-
-    Exceptions:
-    1: no tags. 0.postDISTANCE[.dev0]
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"] or pieces["dirty"]:
-            rendered += ".post%d" % pieces["distance"]
-            if pieces["dirty"]:
-                rendered += ".dev0"
-            rendered += plus_or_dot(pieces)
-            rendered += "g%s" % pieces["short"]
-    else:
-        # exception #1
-        rendered = "0.post%d" % pieces["distance"]
-        if pieces["dirty"]:
-            rendered += ".dev0"
-        rendered += "+g%s" % pieces["short"]
-    return rendered
-
-
-def render_pep440_old(pieces):
-    """TAG[.postDISTANCE[.dev0]] .
-
-    The ".dev0" means dirty.
-
-    Exceptions:
-    1: no tags. 0.postDISTANCE[.dev0]
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"] or pieces["dirty"]:
-            rendered += ".post%d" % pieces["distance"]
-            if pieces["dirty"]:
-                rendered += ".dev0"
-    else:
-        # exception #1
-        rendered = "0.post%d" % pieces["distance"]
-        if pieces["dirty"]:
-            rendered += ".dev0"
-    return rendered
-
-
-def render_git_describe(pieces):
-    """TAG[-DISTANCE-gHEX][-dirty].
-
-    Like 'git describe --tags --dirty --always'.
-
-    Exceptions:
-    1: no tags. HEX[-dirty]  (note: no 'g' prefix)
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"]:
-            rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
-    else:
-        # exception #1
-        rendered = pieces["short"]
-    if pieces["dirty"]:
-        rendered += "-dirty"
-    return rendered
-
-
-def render_git_describe_long(pieces):
-    """TAG-DISTANCE-gHEX[-dirty].
-
-    Like 'git describe --tags --dirty --always -long'.
-    The distance/hash is unconditional.
-
-    Exceptions:
-    1: no tags. HEX[-dirty]  (note: no 'g' prefix)
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
-    else:
-        # exception #1
-        rendered = pieces["short"]
-    if pieces["dirty"]:
-        rendered += "-dirty"
-    return rendered
-
-
-def render(pieces, style):
-    """Render the given version pieces into the requested style."""
-    if pieces["error"]:
-        return {"version": "unknown",
-                "full-revisionid": pieces.get("long"),
-                "dirty": None,
-                "error": pieces["error"],
-                "date": None}
-
-    if not style or style == "default":
-        style = "pep440"  # the default
-
-    if style == "pep440":
-        rendered = render_pep440(pieces)
-    elif style == "pep440-pre":
-        rendered = render_pep440_pre(pieces)
-    elif style == "pep440-post":
-        rendered = render_pep440_post(pieces)
-    elif style == "pep440-old":
-        rendered = render_pep440_old(pieces)
-    elif style == "git-describe":
-        rendered = render_git_describe(pieces)
-    elif style == "git-describe-long":
-        rendered = render_git_describe_long(pieces)
-    else:
-        raise ValueError("unknown style '%s'" % style)
-
-    return {"version": rendered, "full-revisionid": pieces["long"],
-            "dirty": pieces["dirty"], "error": None,
-            "date": pieces.get("date")}
-
-
-class VersioneerBadRootError(Exception):
-    """The project root directory is unknown or missing key files."""
-
-
-def get_versions(verbose=False):
-    """Get the project version from whatever source is available.
-
-    Returns dict with two keys: 'version' and 'full'.
-    """
-    if "versioneer" in sys.modules:
-        # see the discussion in cmdclass.py:get_cmdclass()
-        del sys.modules["versioneer"]
-
-    root = get_root()
-    cfg = get_config_from_root(root)
-
-    assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg"
-    handlers = HANDLERS.get(cfg.VCS)
-    assert handlers, "unrecognized VCS '%s'" % cfg.VCS
-    verbose = verbose or cfg.verbose
-    assert cfg.versionfile_source is not None, \
-        "please set versioneer.versionfile_source"
-    assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix"
-
-    versionfile_abs = os.path.join(root, cfg.versionfile_source)
-
-    # extract version from first of: _version.py, VCS command (e.g. 'git
-    # describe'), parentdir. This is meant to work for developers using a
-    # source checkout, for users of a tarball created by 'setup.py sdist',
-    # and for users of a tarball/zipball created by 'git archive' or github's
-    # download-from-tag feature or the equivalent in other VCSes.
-
-    get_keywords_f = handlers.get("get_keywords")
-    from_keywords_f = handlers.get("keywords")
-    if get_keywords_f and from_keywords_f:
-        try:
-            keywords = get_keywords_f(versionfile_abs)
-            ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)
-            if verbose:
-                print("got version from expanded keyword %s" % ver)
-            return ver
-        except NotThisMethod:
-            pass
-
-    try:
-        ver = versions_from_file(versionfile_abs)
-        if verbose:
-            print("got version from file %s %s" % (versionfile_abs, ver))
-        return ver
-    except NotThisMethod:
-        pass
-
-    from_vcs_f = handlers.get("pieces_from_vcs")
-    if from_vcs_f:
-        try:
-            pieces = from_vcs_f(cfg.tag_prefix, root, verbose)
-            ver = render(pieces, cfg.style)
-            if verbose:
-                print("got version from VCS %s" % ver)
-            return ver
-        except NotThisMethod:
-            pass
-
-    try:
-        if cfg.parentdir_prefix:
-            ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
-            if verbose:
-                print("got version from parentdir %s" % ver)
-            return ver
-    except NotThisMethod:
-        pass
-
-    if verbose:
-        print("unable to compute version")
-
-    return {"version": "0+unknown", "full-revisionid": None,
-            "dirty": None, "error": "unable to compute version",
-            "date": None}
-
-
-def get_version():
-    """Get the short version string for this project."""
-    return get_versions()["version"]
-
-
-def get_cmdclass(cmdclass=None):
-    """Get the custom setuptools/distutils subclasses used by Versioneer.
-
-    If the package uses a different cmdclass (e.g. one from numpy), it
-    should be provide as an argument.
-    """
-    if "versioneer" in sys.modules:
-        del sys.modules["versioneer"]
-        # this fixes the "python setup.py develop" case (also 'install' and
-        # 'easy_install .'), in which subdependencies of the main project are
-        # built (using setup.py bdist_egg) in the same python process. Assume
-        # a main project A and a dependency B, which use different versions
-        # of Versioneer. A's setup.py imports A's Versioneer, leaving it in
-        # sys.modules by the time B's setup.py is executed, causing B to run
-        # with the wrong versioneer. Setuptools wraps the sub-dep builds in a
-        # sandbox that restores sys.modules to it's pre-build state, so the
-        # parent is protected against the child's "import versioneer". By
-        # removing ourselves from sys.modules here, before the child build
-        # happens, we protect the child from the parent's versioneer too.
-        # Also see https://github.com/python-versioneer/python-versioneer/issues/52
-
-    cmds = {} if cmdclass is None else cmdclass.copy()
-
-    # we add "version" to both distutils and setuptools
-    from distutils.core import Command
-
-    class cmd_version(Command):
-        description = "report generated version string"
-        user_options = []
-        boolean_options = []
-
-        def initialize_options(self):
-            pass
-
-        def finalize_options(self):
-            pass
-
-        def run(self):
-            vers = get_versions(verbose=True)
-            print("Version: %s" % vers["version"])
-            print(" full-revisionid: %s" % vers.get("full-revisionid"))
-            print(" dirty: %s" % vers.get("dirty"))
-            print(" date: %s" % vers.get("date"))
-            if vers["error"]:
-                print(" error: %s" % vers["error"])
-    cmds["version"] = cmd_version
-
-    # we override "build_py" in both distutils and setuptools
-    #
-    # most invocation pathways end up running build_py:
-    #  distutils/build -> build_py
-    #  distutils/install -> distutils/build ->..
-    #  setuptools/bdist_wheel -> distutils/install ->..
-    #  setuptools/bdist_egg -> distutils/install_lib -> build_py
-    #  setuptools/install -> bdist_egg ->..
-    #  setuptools/develop -> ?
-    #  pip install:
-    #   copies source tree to a tempdir before running egg_info/etc
-    #   if .git isn't copied too, 'git describe' will fail
-    #   then does setup.py bdist_wheel, or sometimes setup.py install
-    #  setup.py egg_info -> ?
-
-    # we override different "build_py" commands for both environments
-    if 'build_py' in cmds:
-        _build_py = cmds['build_py']
-    elif "setuptools" in sys.modules:
-        from setuptools.command.build_py import build_py as _build_py
-    else:
-        from distutils.command.build_py import build_py as _build_py
-
-    class cmd_build_py(_build_py):
-        def run(self):
-            root = get_root()
-            cfg = get_config_from_root(root)
-            versions = get_versions()
-            _build_py.run(self)
-            # now locate _version.py in the new build/ directory and replace
-            # it with an updated value
-            if cfg.versionfile_build:
-                target_versionfile = os.path.join(self.build_lib,
-                                                  cfg.versionfile_build)
-                print("UPDATING %s" % target_versionfile)
-                write_to_version_file(target_versionfile, versions)
-    cmds["build_py"] = cmd_build_py
-
-    if "setuptools" in sys.modules:
-        from setuptools.command.build_ext import build_ext as _build_ext
-    else:
-        from distutils.command.build_ext import build_ext as _build_ext
-
-    class cmd_build_ext(_build_ext):
-        def run(self):
-            root = get_root()
-            cfg = get_config_from_root(root)
-            versions = get_versions()
-            _build_ext.run(self)
-            if self.inplace:
-                # build_ext --inplace will only build extensions in
-                # build/lib<..> dir with no _version.py to write to.
-                # As in place builds will already have a _version.py
-                # in the module dir, we do not need to write one.
-                return
-            # now locate _version.py in the new build/ directory and replace
-            # it with an updated value
-            target_versionfile = os.path.join(self.build_lib,
-                                              cfg.versionfile_source)
-            print("UPDATING %s" % target_versionfile)
-            write_to_version_file(target_versionfile, versions)
-    cmds["build_ext"] = cmd_build_ext
-
-    if "cx_Freeze" in sys.modules:  # cx_freeze enabled?
-        from cx_Freeze.dist import build_exe as _build_exe
-        # nczeczulin reports that py2exe won't like the pep440-style string
-        # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g.
-        # setup(console=[{
-        #   "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION
-        #   "product_version": versioneer.get_version(),
-        #   ...
-
-        class cmd_build_exe(_build_exe):
-            def run(self):
-                root = get_root()
-                cfg = get_config_from_root(root)
-                versions = get_versions()
-                target_versionfile = cfg.versionfile_source
-                print("UPDATING %s" % target_versionfile)
-                write_to_version_file(target_versionfile, versions)
-
-                _build_exe.run(self)
-                os.unlink(target_versionfile)
-                with open(cfg.versionfile_source, "w") as f:
-                    LONG = LONG_VERSION_PY[cfg.VCS]
-                    f.write(LONG %
-                            {"DOLLAR": "$",
-                             "STYLE": cfg.style,
-                             "TAG_PREFIX": cfg.tag_prefix,
-                             "PARENTDIR_PREFIX": cfg.parentdir_prefix,
-                             "VERSIONFILE_SOURCE": cfg.versionfile_source,
-                             })
-        cmds["build_exe"] = cmd_build_exe
-        del cmds["build_py"]
-
-    if 'py2exe' in sys.modules:  # py2exe enabled?
-        from py2exe.distutils_buildexe import py2exe as _py2exe
-
-        class cmd_py2exe(_py2exe):
-            def run(self):
-                root = get_root()
-                cfg = get_config_from_root(root)
-                versions = get_versions()
-                target_versionfile = cfg.versionfile_source
-                print("UPDATING %s" % target_versionfile)
-                write_to_version_file(target_versionfile, versions)
-
-                _py2exe.run(self)
-                os.unlink(target_versionfile)
-                with open(cfg.versionfile_source, "w") as f:
-                    LONG = LONG_VERSION_PY[cfg.VCS]
-                    f.write(LONG %
-                            {"DOLLAR": "$",
-                             "STYLE": cfg.style,
-                             "TAG_PREFIX": cfg.tag_prefix,
-                             "PARENTDIR_PREFIX": cfg.parentdir_prefix,
-                             "VERSIONFILE_SOURCE": cfg.versionfile_source,
-                             })
-        cmds["py2exe"] = cmd_py2exe
-
-    # we override different "sdist" commands for both environments
-    if 'sdist' in cmds:
-        _sdist = cmds['sdist']
-    elif "setuptools" in sys.modules:
-        from setuptools.command.sdist import sdist as _sdist
-    else:
-        from distutils.command.sdist import sdist as _sdist
-
-    class cmd_sdist(_sdist):
-        def run(self):
-            versions = get_versions()
-            self._versioneer_generated_versions = versions
-            # unless we update this, the command will keep using the old
-            # version
-            self.distribution.metadata.version = versions["version"]
-            return _sdist.run(self)
-
-        def make_release_tree(self, base_dir, files):
-            root = get_root()
-            cfg = get_config_from_root(root)
-            _sdist.make_release_tree(self, base_dir, files)
-            # now locate _version.py in the new base_dir directory
-            # (remembering that it may be a hardlink) and replace it with an
-            # updated value
-            target_versionfile = os.path.join(base_dir, cfg.versionfile_source)
-            print("UPDATING %s" % target_versionfile)
-            write_to_version_file(target_versionfile,
-                                  self._versioneer_generated_versions)
-    cmds["sdist"] = cmd_sdist
-
-    return cmds
-
-
-CONFIG_ERROR = """
-setup.cfg is missing the necessary Versioneer configuration. You need
-a section like:
-
- [versioneer]
- VCS = git
- style = pep440
- versionfile_source = src/myproject/_version.py
- versionfile_build = myproject/_version.py
- tag_prefix =
- parentdir_prefix = myproject-
-
-You will also need to edit your setup.py to use the results:
-
- import versioneer
- setup(version=versioneer.get_version(),
-       cmdclass=versioneer.get_cmdclass(), ...)
-
-Please read the docstring in ./versioneer.py for configuration instructions,
-edit setup.cfg, and re-run the installer or 'python versioneer.py setup'.
-"""
-
-SAMPLE_CONFIG = """
-# See the docstring in versioneer.py for instructions. Note that you must
-# re-run 'versioneer.py setup' after changing this section, and commit the
-# resulting files.
-
-[versioneer]
-#VCS = git
-#style = pep440
-#versionfile_source =
-#versionfile_build =
-#tag_prefix =
-#parentdir_prefix =
-
-"""
-
-INIT_PY_SNIPPET = """
-from ._version import get_versions
-__version__ = get_versions()['version']
-del get_versions
-"""
-
-
-def do_setup():
-    """Do main VCS-independent setup function for installing Versioneer."""
-    root = get_root()
-    try:
-        cfg = get_config_from_root(root)
-    except (EnvironmentError, configparser.NoSectionError,
-            configparser.NoOptionError) as e:
-        if isinstance(e, (EnvironmentError, configparser.NoSectionError)):
-            print("Adding sample versioneer config to setup.cfg",
-                  file=sys.stderr)
-            with open(os.path.join(root, "setup.cfg"), "a") as f:
-                f.write(SAMPLE_CONFIG)
-        print(CONFIG_ERROR, file=sys.stderr)
-        return 1
-
-    print(" creating %s" % cfg.versionfile_source)
-    with open(cfg.versionfile_source, "w") as f:
-        LONG = LONG_VERSION_PY[cfg.VCS]
-        f.write(LONG % {"DOLLAR": "$",
-                        "STYLE": cfg.style,
-                        "TAG_PREFIX": cfg.tag_prefix,
-                        "PARENTDIR_PREFIX": cfg.parentdir_prefix,
-                        "VERSIONFILE_SOURCE": cfg.versionfile_source,
-                        })
-
-    ipy = os.path.join(os.path.dirname(cfg.versionfile_source),
-                       "__init__.py")
-    if os.path.exists(ipy):
-        try:
-            with open(ipy, "r") as f:
-                old = f.read()
-        except EnvironmentError:
-            old = ""
-        if INIT_PY_SNIPPET not in old:
-            print(" appending to %s" % ipy)
-            with open(ipy, "a") as f:
-                f.write(INIT_PY_SNIPPET)
-        else:
-            print(" %s unmodified" % ipy)
-    else:
-        print(" %s doesn't exist, ok" % ipy)
-        ipy = None
-
-    # Make sure both the top-level "versioneer.py" and versionfile_source
-    # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so
-    # they'll be copied into source distributions. Pip won't be able to
-    # install the package without this.
-    manifest_in = os.path.join(root, "MANIFEST.in")
-    simple_includes = set()
-    try:
-        with open(manifest_in, "r") as f:
-            for line in f:
-                if line.startswith("include "):
-                    for include in line.split()[1:]:
-                        simple_includes.add(include)
-    except EnvironmentError:
-        pass
-    # That doesn't cover everything MANIFEST.in can do
-    # (http://docs.python.org/2/distutils/sourcedist.html#commands), so
-    # it might give some false negatives. Appending redundant 'include'
-    # lines is safe, though.
-    if "versioneer.py" not in simple_includes:
-        print(" appending 'versioneer.py' to MANIFEST.in")
-        with open(manifest_in, "a") as f:
-            f.write("include versioneer.py\n")
-    else:
-        print(" 'versioneer.py' already in MANIFEST.in")
-    if cfg.versionfile_source not in simple_includes:
-        print(" appending versionfile_source ('%s') to MANIFEST.in" %
-              cfg.versionfile_source)
-        with open(manifest_in, "a") as f:
-            f.write("include %s\n" % cfg.versionfile_source)
-    else:
-        print(" versionfile_source already in MANIFEST.in")
-
-    # Make VCS-specific changes. For git, this means creating/changing
-    # .gitattributes to mark _version.py for export-subst keyword
-    # substitution.
-    do_vcs_install(manifest_in, cfg.versionfile_source, ipy)
-    return 0
-
-
-def scan_setup_py():
-    """Validate the contents of setup.py against Versioneer's expectations."""
-    found = set()
-    setters = False
-    errors = 0
-    with open("setup.py", "r") as f:
-        for line in f.readlines():
-            if "import versioneer" in line:
-                found.add("import")
-            if "versioneer.get_cmdclass()" in line:
-                found.add("cmdclass")
-            if "versioneer.get_version()" in line:
-                found.add("get_version")
-            if "versioneer.VCS" in line:
-                setters = True
-            if "versioneer.versionfile_source" in line:
-                setters = True
-    if len(found) != 3:
-        print("")
-        print("Your setup.py appears to be missing some important items")
-        print("(but I might be wrong). Please make sure it has something")
-        print("roughly like the following:")
-        print("")
-        print(" import versioneer")
-        print(" setup( version=versioneer.get_version(),")
-        print("        cmdclass=versioneer.get_cmdclass(),  ...)")
-        print("")
-        errors += 1
-    if setters:
-        print("You should remove lines like 'versioneer.VCS = ' and")
-        print("'versioneer.versionfile_source = ' . This configuration")
-        print("now lives in setup.cfg, and should be removed from setup.py")
-        print("")
-        errors += 1
-    return errors
-
-
-if __name__ == "__main__":
-    cmd = sys.argv[1]
-    if cmd == "setup":
-        errors = do_setup()
-        errors += scan_setup_py()
-        if errors:
-            sys.exit(1)
diff --git a/docker-compose/Makefile b/docker-compose/Makefile
index 693bc26cbe78a45ead288c49ed2929ca7944c920..1b7983ed19bd5ada525d91d30f45fb1d45fea012 100644
--- a/docker-compose/Makefile
+++ b/docker-compose/Makefile
@@ -2,30 +2,33 @@
 MAKEPATH := $(abspath $(lastword $(MAKEFILE_LIST)))
 BASEDIR := $(notdir $(patsubst %/,%,$(dir $(MAKEPATH))))
 
+DOCKER_COMPOSE_ENV_FILE := $(abspath .env)
 COMPOSE_FILES := $(wildcard *.yml)
-COMPOSE_FILE_ARGS := $(foreach yml,$(COMPOSE_FILES),-f $(yml))
+COMPOSE_FILE_ARGS := --env-file $(DOCKER_COMPOSE_ENV_FILE) $(foreach yml,$(COMPOSE_FILES),-f $(yml))
 
 ATTACH_COMPOSE_FILE_ARGS := $(foreach yml,$(filter-out tango.yml,$(COMPOSE_FILES)),-f $(yml))
 
 # If the first make argument is "start" or "stop"...
 ifeq (start,$(firstword $(MAKECMDGOALS)))
-  SERVICE_TARGET = true
+    SERVICE_TARGET = true
 else ifeq (stop,$(firstword $(MAKECMDGOALS)))
-  SERVICE_TARGET = true
+    SERVICE_TARGET = true
 else ifeq (attach,$(firstword $(MAKECMDGOALS)))
-  SERVICE_TARGET = true
-ifndef NETWORK_MODE
-$(error NETWORK_MODE must specify the network to attach to, e.g., make NETWORK_MODE=tangonet-powersupply ...)
-endif
-ifndef TANGO_HOST
-$(error TANGO_HOST must specify the Tango database device, e.g., make TANGO_HOST=powersupply-databaseds:10000 ...)
-endif
+    SERVICE_TARGET = true
+    ifndef NETWORK_MODE
+        $(error NETWORK_MODE must specify the network to attach to, e.g., make NETWORK_MODE=tangonet-powersupply ...)
+    endif
+
+    ifndef TANGO_HOST
+        $(error TANGO_HOST must specify the Tango database device, e.g., make TANGO_HOST=powersupply-databaseds:10000 ...)
+    endif
 endif
+
 ifdef SERVICE_TARGET
-  # .. then use the rest as arguments for the make target
-  SERVICE := $(wordlist 2,$(words $(MAKECMDGOALS)),$(MAKECMDGOALS))
-  # ...and turn them into do-nothing targets
-  $(eval $(SERVICE):;@:)
+    # .. then use the rest as arguments for the make target
+    SERVICE := $(wordlist 2,$(words $(MAKECMDGOALS)),$(MAKECMDGOALS))
+    # ...and turn them into do-nothing targets
+    $(eval $(SERVICE):;@:)
 endif
 
 #
@@ -35,37 +38,41 @@ endif
 # time.
 #
 ifneq ($(CI_JOB_ID),)
-NETWORK_MODE := tangonet-$(CI_JOB_ID)
-CONTAINER_NAME_PREFIX := $(CI_JOB_ID)-
+    NETWORK_MODE := tangonet-$(CI_JOB_ID)
+    CONTAINER_NAME_PREFIX := $(CI_JOB_ID)-
 else
-CONTAINER_NAME_PREFIX :=
-$(info Network mode cannot be host for the archiver! It won't work unless you set the env var CI_JOB_ID=local)
+    CONTAINER_NAME_PREFIX :=
+    $(info Network mode cannot be host for the archiver! It won't work unless you set the env var CI_JOB_ID=local)
 endif
 
 ifeq ($(OS),Windows_NT)
     $(error Sorry, Windows is not supported yet)
 else
-	UNAME_S := $(shell uname -s)
-	ifeq ($(UNAME_S),Linux)
-		DISPLAY ?= :0.0
-		NETWORK_MODE ?= host
-		XAUTHORITY_MOUNT := /tmp/.X11-unix:/tmp/.X11-unix
-		XAUTHORITY ?= /hosthome/.Xauthority
-		# /bin/sh (=dash) does not evaluate 'docker network' conditionals correctly
-		SHELL := /bin/bash
-	endif
-	ifeq ($(UNAME_S),Darwin)
-		IF_INTERFACE := $(shell scutil --nwi | grep 'Network interfaces:' | cut -d' ' -f3)
-		IP_ADDRESS := $(shell scutil --nwi | grep 'address' | cut -d':' -f2 | tr -d ' ' | head -n1)
-		DISPLAY := $(IP_ADDRESS):0
-		# Make sure that Darwin, especially from macOS Catalina on,
-		# allows X access from our Docker containers.
-		ADD_TO_XHOST := $(shell xhost +$(IP_ADDRESS))
-		# network_mode = host doesn't work on MacOS, so fix to the internal network
-		NETWORK_MODE ?= tangonet
-		XAUTHORITY_MOUNT := $(HOME)/.Xauthority:/hosthome/.Xauthority:ro
-		XAUTHORITY := /hosthome/.Xauthority
-	endif
+    UNAME_S := $(shell uname -s)
+
+    ifeq ($(UNAME_S),Linux)
+        DISPLAY ?= :0.0
+        NETWORK_MODE ?= host
+        XAUTHORITY_MOUNT := /tmp/.X11-unix:/tmp/.X11-unix
+        XAUTHORITY ?= /hosthome/.Xauthority
+        # /bin/sh (=dash) does not evaluate 'docker network' conditionals correctly
+        SHELL := /bin/bash
+    else ifeq ($(UNAME_S),Darwin)
+        IF_INTERFACE := $(shell scutil --nwi | grep 'Network interfaces:' | cut -d' ' -f3)
+        IP_ADDRESS := $(shell scutil --nwi | grep 'address' | cut -d':' -f2 | tr -d ' ' | head -n1)
+        DISPLAY := $(IP_ADDRESS):0
+        # Make sure that Darwin, especially from macOS Catalina on,
+        # allows X access from our Docker containers.
+        ADD_TO_XHOST := $(shell xhost +$(IP_ADDRESS))
+        # network_mode = host doesn't work on MacOS, so fix to the internal network
+        ifeq ($(NETWORK_MODE),)
+            NETWORK_MODE := tangonet
+        else
+            NETWORK_MODE := $(NETWORK_MODE)
+        endif
+        XAUTHORITY_MOUNT := $(HOME)/.Xauthority:/hosthome/.Xauthority:ro
+        XAUTHORITY := /hosthome/.Xauthority
+    endif
 endif
 
 #
@@ -73,16 +80,32 @@ endif
 # machine rather than at the container.
 #
 ifeq ($(NETWORK_MODE),host)
-	TANGO_HOST := $(shell hostname):10000
-	MYSQL_HOST := $(shell hostname):3306
+    TANGO_HOST := $(shell hostname):10000
+    MYSQL_HOST := $(shell hostname):3306
 else
-	TANGO_HOST := $(CONTAINER_NAME_PREFIX)databaseds:10000
-	MYSQL_HOST := $(CONTAINER_NAME_PREFIX)tangodb:3306
+    ifeq ($(TANGO_HOST),)
+        TANGO_HOST := $(CONTAINER_NAME_PREFIX)databaseds:10000
+    else
+        TANGO_HOST := $(TANGO_HOST)
+    endif
+
+    ifeq ($(MYSQL_HOST),)
+        MYSQL_HOST := $(CONTAINER_NAME_PREFIX)tangodb:3306
+    else
+        MYSQL_HOST := $(MYSQL_HOST)
+    endif
 endif
 
-DOCKER_COMPOSE_ARGS := DISPLAY=$(DISPLAY) XAUTHORITY=$(XAUTHORITY) TANGO_HOST=$(TANGO_HOST) \
-		NETWORK_MODE=$(NETWORK_MODE) XAUTHORITY_MOUNT=$(XAUTHORITY_MOUNT) TANGO_SKA_CONTAINER_MOUNT=$(TANGO_SKA_CONTAINER_MOUNT) TANGO_LOFAR_CONTAINER_MOUNT=$(TANGO_LOFAR_CONTAINER_MOUNT) TANGO_LOFAR_CONTAINER_DIR=${TANGO_LOFAR_CONTAINER_DIR} MYSQL_HOST=$(MYSQL_HOST) \
-		CONTAINER_NAME_PREFIX=$(CONTAINER_NAME_PREFIX) COMPOSE_IGNORE_ORPHANS=true CONTAINER_EXECUTION_UID=$(shell id -u)
+DOCKER_COMPOSE_ARGS := DISPLAY=$(DISPLAY) \
+    XAUTHORITY=$(XAUTHORITY) \
+    TANGO_HOST=$(TANGO_HOST) \
+    NETWORK_MODE=$(NETWORK_MODE) \
+    XAUTHORITY_MOUNT=$(XAUTHORITY_MOUNT) \
+    TANGO_LOFAR_CONTAINER_MOUNT=$(TANGO_LOFAR_CONTAINER_MOUNT) \
+    TANGO_LOFAR_CONTAINER_DIR=${TANGO_LOFAR_CONTAINER_DIR} MYSQL_HOST=$(MYSQL_HOST) \
+    CONTAINER_NAME_PREFIX=$(CONTAINER_NAME_PREFIX) \
+    COMPOSE_IGNORE_ORPHANS=true \
+    CONTAINER_EXECUTION_UID=$(shell id -u)
 
 
 .PHONY: up down minimal start stop status clean pull help
@@ -132,4 +155,3 @@ clean: down  ## clear all TANGO database entries
 
 help:   ## show this help.
 	@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
-
diff --git a/docker-compose/archiver.yml b/docker-compose/archiver.yml
index f471de6285a23e9d4969b4d840e9b83accaac22e..98200f610c887a6a40cdc27c8e87dfbdac8b22a7 100644
--- a/docker-compose/archiver.yml
+++ b/docker-compose/archiver.yml
@@ -63,7 +63,6 @@ services:
              json2tango -w -a -u /tango-archiver/data/archiver-devices.json &&
              sleep infinity"
     volumes:
-      - ${TANGO_SKA_CONTAINER_MOUNT}
       - ${TANGO_LOFAR_CONTAINER_MOUNT}
       - ${HOME}:/hosthome
       - ../docker/tango/tango-archiver:/tango-archiver
diff --git a/docker-compose/device-pcc.yml b/docker-compose/device-pcc.yml
index 73feab180ba03a8a6ae0abc11b89a460341fcf9a..026ceff8ded94fb2d6d8951e6f8b33b758dbb467 100644
--- a/docker-compose/device-pcc.yml
+++ b/docker-compose/device-pcc.yml
@@ -14,7 +14,11 @@ version: '2'
 
 services:
   device-pcc:
-    image: lofar-device-base
+    image: device-pcc
+    # build explicitly, as docker-compose does not understand a local image
+    # being shared among services.
+    build:
+        context: lofar-device-base
     container_name: ${CONTAINER_NAME_PREFIX}device-pcc
     network_mode: ${NETWORK_MODE}
     volumes:
diff --git a/docker-compose/device-sdp.yml b/docker-compose/device-sdp.yml
index fd98bfede32634c0ab380a0d2f4fe6fec0096267..30e069a5eb0d38c9ccb1e9dbe1ffaf678dd0627c 100644
--- a/docker-compose/device-sdp.yml
+++ b/docker-compose/device-sdp.yml
@@ -14,7 +14,11 @@ version: '2'
 
 services:
   device-sdp:
-    image: lofar-device-base
+    image: device-sdp
+    # build explicitly, as docker-compose does not understand a local image
+    # being shared among services.
+    build:
+        context: lofar-device-base
     container_name: ${CONTAINER_NAME_PREFIX}device-sdp
     network_mode: ${NETWORK_MODE}
     volumes:
diff --git a/docker-compose/itango.yml b/docker-compose/itango.yml
index d131d405371bc6ff7e59892587b2bd2aba9d1fd6..4c12fe3a00fe8ee5fda3a26668bc24adcbf25a72 100644
--- a/docker-compose/itango.yml
+++ b/docker-compose/itango.yml
@@ -21,7 +21,6 @@ services:
     container_name: ${CONTAINER_NAME_PREFIX}itango
     network_mode: ${NETWORK_MODE}
     volumes:
-        - ${TANGO_SKA_CONTAINER_MOUNT}
         - ${TANGO_LOFAR_CONTAINER_MOUNT}
         - ${HOME}:/hosthome
     environment:
diff --git a/docker-compose/jive.yml b/docker-compose/jive.yml
index 1e8561d408b6aecf5d489c542bf123bf89b6121e..d3b8b06f9778e3e5f53953a5f2eb26725d657e60 100644
--- a/docker-compose/jive.yml
+++ b/docker-compose/jive.yml
@@ -2,6 +2,12 @@
 # Docker compose file that launches Jive, sending output to a remote X11
 # display.
 #
+# This container will always run on the same network as the host,
+# to make sure the DISPLAY variable can be used verbatim. For the
+# same reason, TANGO_HOST is hardcoded to be at localhost:10000:
+# the docker network offering our tangodb also exposes it on port 10000
+# on the host.
+#
 # Defines:
 #   - jive: container running Jive
 #
@@ -14,19 +20,18 @@ services:
   jive:
     image: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}/tango-java:latest
     container_name: ${CONTAINER_NAME_PREFIX}jive
-    network_mode: ${NETWORK_MODE}
+    network_mode: host
     volumes:
       - ${XAUTHORITY_MOUNT}
-      - ${TANGO_SKA_CONTAINER_MOUNT}
       - ${TANGO_LOFAR_CONTAINER_MOUNT}
       - ${HOME}:/hosthome
     environment:
       - XAUTHORITY=${XAUTHORITY}
       - DISPLAY=${DISPLAY}
-      - TANGO_HOST=${TANGO_HOST}
+      - TANGO_HOST=localhost:10000
     entrypoint:
       - /usr/local/bin/wait-for-it.sh
-      - ${TANGO_HOST}
+      - localhost:10000
       - --timeout=30
       - --strict
       - --
diff --git a/docker-compose/jupyter.yml b/docker-compose/jupyter.yml
index 71daf48e08afed3ae2ba8c873b216561b9bde213..0ac641b22f78a6a1d332e572dc3d9b6db470a5bf 100644
--- a/docker-compose/jupyter.yml
+++ b/docker-compose/jupyter.yml
@@ -13,10 +13,11 @@ services:
   jupyter:
     build:
         context: jupyter
+        args:
+            CONTAINER_EXECUTION_UID: ${CONTAINER_EXECUTION_UID}
     container_name: ${CONTAINER_NAME_PREFIX}jupyter
     network_mode: ${NETWORK_MODE}
     volumes:
-        - ${TANGO_SKA_CONTAINER_MOUNT}
         - ${TANGO_LOFAR_CONTAINER_MOUNT}
         - ${TANGO_LOFAR_LOCAL_DIR}/jupyter-notebooks:/jupyter-notebooks:rw
         - ${HOME}:/hosthome
diff --git a/docker-compose/jupyter/Dockerfile b/docker-compose/jupyter/Dockerfile
index 97ef7ca63daa60331ae0e8dee8f5d70fa143be44..62fb7395184b9bc9f4540c8ad68acabc5dc26713 100644
--- a/docker-compose/jupyter/Dockerfile
+++ b/docker-compose/jupyter/Dockerfile
@@ -1,6 +1,10 @@
 ARG VERSION=latest
 FROM nexus.engageska-portugal.pt/ska-docker/tango-itango:${VERSION}
 
+# UID if the user that this container will run under. This is needed to give directories
+# that are needed for temporary storage the proper owner and access rights.
+ARG CONTAINER_EXECUTION_UID=1000
+
 RUN sudo pip3 install jupyter
 RUN sudo pip3 install ipykernel
 RUN sudo pip3 install jupyter_bokeh
@@ -13,7 +17,6 @@ RUN sudo jupyter nbextension enable jupyter_bokeh --py --sys-prefix
 
 # Install profiles for ipython & jupyter
 COPY ipython-profiles /opt/ipython-profiles/
-RUN sudo chown tango.tango -R /opt/ipython-profiles
 COPY jupyter-kernels /usr/local/share/jupyter/kernels/
 
 # Install patched jupyter executable
@@ -27,5 +30,7 @@ ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /usr/
 RUN sudo chmod +x /usr/bin/tini
 
 # Make sure Jupyter can write to the home directory
-ENV HOME=/home/tango
-RUN chmod a+rwx /home/tango
+ENV HOME=/home/user
+RUN sudo mkdir -p ${HOME}
+RUN sudo chown ${CONTAINER_EXECUTION_UID} -R ${HOME}
+RUN sudo chown ${CONTAINER_EXECUTION_UID} -R /opt/ipython-profiles
diff --git a/docker-compose/lofar-device-base/Dockerfile b/docker-compose/lofar-device-base/Dockerfile
index 85326698052c8e9e48b1270377a513e3dbcda5d3..e0b6efd3d2e44f7f6f92283ede8916c70d7b8aaf 100644
--- a/docker-compose/lofar-device-base/Dockerfile
+++ b/docker-compose/lofar-device-base/Dockerfile
@@ -1,5 +1,7 @@
 FROM nexus.engageska-portugal.pt/ska-docker/tango-itango:latest
 
+RUN sudo apt-get update && sudo apt-get install -y git && sudo apt-get clean
+
 COPY lofar-requirements.txt /lofar-requirements.txt
 RUN sudo pip3 install -r /lofar-requirements.txt
 
diff --git a/docker-compose/lofar-device-base/lofar-requirements.txt b/docker-compose/lofar-device-base/lofar-requirements.txt
index 7ed18f76527891ca48953150977b2f23703c9baa..69d52984a264c3a53bbcfece15be810ccaa32e7b 100644
--- a/docker-compose/lofar-device-base/lofar-requirements.txt
+++ b/docker-compose/lofar-device-base/lofar-requirements.txt
@@ -1,3 +1,4 @@
 opcua >= 0.98.9
 astropy
 python-logstash-async
+gitpython
diff --git a/docker-compose/pogo.yml b/docker-compose/pogo.yml
index c9770ca7b355ea57a96e06b7d76a2d995c3e41a6..9e2d377c1c40f3276f979af621d986a04a04a6b7 100644
--- a/docker-compose/pogo.yml
+++ b/docker-compose/pogo.yml
@@ -26,7 +26,6 @@ services:
     volumes:
       - pogo:/home/tango
       - ${XAUTHORITY_MOUNT}
-      - ${TANGO_SKA_CONTAINER_MOUNT}
       - ${TANGO_LOFAR_CONTAINER_MOUNT}
       - ${HOME}:/hosthome:rw
     environment:
diff --git a/jupyter-notebooks/ini_device.ipynb b/jupyter-notebooks/ini_device.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..ba365f263ca35e627b0430f26a02d53af059333a
--- /dev/null
+++ b/jupyter-notebooks/ini_device.ipynb
@@ -0,0 +1,238 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": 128,
+   "id": "waiting-chance",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import time\n",
+    "import numpy"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 146,
+   "id": "moving-alexandria",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "d=DeviceProxy(\"LTS/ini_device/1\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 198,
+   "id": "ranking-aluminum",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Device is now in on state\n"
+     ]
+    }
+   ],
+   "source": [
+    "state = str(d.state())\n",
+    "\n",
+    "if state == \"OFF\":\n",
+    "    d.initialise()\n",
+    "    time.sleep(1)\n",
+    "state = str(d.state())\n",
+    "if state == \"STANDBY\":\n",
+    "    d.on()\n",
+    "state = str(d.state())\n",
+    "if state == \"ON\":\n",
+    "    print(\"Device is now in on state\")\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 199,
+   "id": "beneficial-evidence",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "double_scalar_RW [0.]\n",
+      "double_scalar_R [1.2]\n",
+      "bool_scalar_RW [False]\n",
+      "bool_scalar_R [ True]\n",
+      "int_scalar_RW [0]\n",
+      "int_scalar_R [5]\n",
+      "str_scalar_RW ('',)\n",
+      "str_scalar_R ('this is',)\n",
+      "double_spectrum_RW [0. 0. 0. 0.]\n",
+      "double_spectrum_R [1.2 2.3 3.4 4.5]\n",
+      "bool_spectrum_RW [False False False False]\n",
+      "bool_spectrum_R [ True  True False False]\n",
+      "int_spectrum_RW [0 0 0 0]\n",
+      "int_spectrum_R [1 2 3 4]\n",
+      "str_spectrum_RW ('', '', '', '')\n",
+      "str_spectrum_R ('\"a\"', ' \"b\"', ' \"c\"', ' \"d\"')\n",
+      "double_image_RW [[0. 0. 0.]\n",
+      " [0. 0. 0.]]\n",
+      "double_image_R [[1.2 2.3 3.4]\n",
+      " [4.5 5.6 6.7]]\n",
+      "bool_image_RW [[False False False]\n",
+      " [False False False]]\n",
+      "bool_image_R [[ True  True False]\n",
+      " [False  True False]]\n",
+      "int_image_RW [[0 0 0]\n",
+      " [0 0 0]]\n",
+      "int_image_R [[1 2 3]\n",
+      " [4 5 6]]\n",
+      "str_image_RW (('', '', ''), ('', '', ''))\n",
+      "str_image_R (('\"a\"', ' \"b\"', ' \"c\"'), (' \"d\"', ' \"e\"', ' \"f\"'))\n",
+      "State <function __get_command_func.<locals>.f at 0x7f3efee95c80>\n",
+      "Status <function __get_command_func.<locals>.f at 0x7f3efee95c80>\n"
+     ]
+    }
+   ],
+   "source": [
+    "attr_names = d.get_attribute_list()\n",
+    "\n",
+    "for i in attr_names:\n",
+    "    try:\n",
+    "        exec(\"print(i, d.{})\".format(i))\n",
+    "    except:\n",
+    "        pass\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 93,
+   "id": "sharing-mechanics",
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "array([0])"
+      ]
+     },
+     "execution_count": 93,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "d.int_scalar_RW"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 203,
+   "id": "2f03759a",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "d.str_image_RW = [[\"1\", \"2\", \"3\"],[\"4\", \"5\", \"6\"]]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 204,
+   "id": "3187f3bb",
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "(('1', '2', '3'), ('4', '5', '6'))"
+      ]
+     },
+     "execution_count": 204,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "d.str_image_RW"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 192,
+   "id": "eb406dce",
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "\"['a', 'b', 'c', 'd', 'e', 'f']\""
+      ]
+     },
+     "execution_count": 192,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "numpy.str_([\"a\", \"b\", \"c\", \"d\", \"e\", \"f\"])"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 197,
+   "id": "7b270085",
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "6"
+      ]
+     },
+     "execution_count": 197,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "array = []\n",
+    "string = '\"a\", \"b\", \"c\", \"d\", \"e\", \"f\"'\n",
+    "\n",
+    "for i in string.split(\",\"):\n",
+    "    value = numpy.str_(i)\n",
+    "    array.append(value)\n",
+    "\n",
+    "len(array)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "69ecc437",
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "StationControl",
+   "language": "python",
+   "name": "stationcontrol"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.7.3"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}