diff --git a/.cmake-format.py b/.cmake-format.py
new file mode 100644
index 0000000000000000000000000000000000000000..94457f5b233fd324995fef4e7a2a39e6bef5ec42
--- /dev/null
+++ b/.cmake-format.py
@@ -0,0 +1,240 @@
+# ----------------------------------
+# Options affecting listfile parsing
+# ----------------------------------
+with section("parse"):
+
+  # Specify structure for custom cmake functions
+  additional_commands = { 'foo': { 'flags': ['BAR', 'BAZ'],
+             'kwargs': {'DEPENDS': '*', 'HEADERS': '*', 'SOURCES': '*'}}}
+
+  # Override configurations per-command where available
+  override_spec = {}
+
+  # Specify variable tags.
+  vartags = []
+
+  # Specify property tags.
+  proptags = []
+
+# -----------------------------
+# Options affecting formatting.
+# -----------------------------
+with section("format"):
+
+  # Disable formatting entirely, making cmake-format a no-op
+  disable = False
+
+  # How wide to allow formatted cmake files
+  line_width = 80
+
+  # How many spaces to tab for indent
+  tab_size = 2
+
+  # If true, lines are indented using tab characters (utf-8 0x09) instead of
+  # <tab_size> space characters (utf-8 0x20). In cases where the layout would
+  # require a fractional tab character, the behavior of the  fractional
+  # indentation is governed by <fractional_tab_policy>
+  use_tabchars = False
+
+  # If <use_tabchars> is True, then the value of this variable indicates how
+  # fractional indentions are handled during whitespace replacement. If set to
+  # 'use-space', fractional indentation is left as spaces (utf-8 0x20). If set
+  # to `round-up` fractional indentation is replaced with a single tab character
+  # (utf-8 0x09) effectively shifting the column to the next tabstop
+  fractional_tab_policy = 'use-space'
+
+  # If an argument group contains more than this many sub-groups (parg or kwarg
+  # groups) then force it to a vertical layout.
+  max_subgroups_hwrap = 2
+
+  # If a positional argument group contains more than this many arguments, then
+  # force it to a vertical layout.
+  max_pargs_hwrap = 6
+
+  # If a cmdline positional group consumes more than this many lines without
+  # nesting, then invalidate the layout (and nest)
+  max_rows_cmdline = 2
+
+  # If true, separate flow control names from their parentheses with a space
+  separate_ctrl_name_with_space = False
+
+  # If true, separate function names from parentheses with a space
+  separate_fn_name_with_space = False
+
+  # If a statement is wrapped to more than one line, than dangle the closing
+  # parenthesis on its own line.
+  dangle_parens = False
+
+  # If the trailing parenthesis must be 'dangled' on its on line, then align it
+  # to this reference: `prefix`: the start of the statement,  `prefix-indent`:
+  # the start of the statement, plus one indentation  level, `child`: align to
+  # the column of the arguments
+  dangle_align = 'prefix'
+
+  # If the statement spelling length (including space and parenthesis) is
+  # smaller than this amount, then force reject nested layouts.
+  min_prefix_chars = 4
+
+  # If the statement spelling length (including space and parenthesis) is larger
+  # than the tab width by more than this amount, then force reject un-nested
+  # layouts.
+  max_prefix_chars = 10
+
+  # If a candidate layout is wrapped horizontally but it exceeds this many
+  # lines, then reject the layout.
+  max_lines_hwrap = 2
+
+  # What style line endings to use in the output.
+  line_ending = 'unix'
+
+  # Format command names consistently as 'lower' or 'upper' case
+  command_case = 'canonical'
+
+  # Format keywords consistently as 'lower' or 'upper' case
+  keyword_case = 'unchanged'
+
+  # A list of command names which should always be wrapped
+  always_wrap = []
+
+  # If true, the argument lists which are known to be sortable will be sorted
+  # lexicographicall
+  enable_sort = True
+
+  # If true, the parsers may infer whether or not an argument list is sortable
+  # (without annotation).
+  autosort = False
+
+  # By default, if cmake-format cannot successfully fit everything into the
+  # desired linewidth it will apply the last, most agressive attempt that it
+  # made. If this flag is True, however, cmake-format will print error, exit
+  # with non-zero status code, and write-out nothing
+  require_valid_layout = False
+
+  # A dictionary mapping layout nodes to a list of wrap decisions. See the
+  # documentation for more information.
+  layout_passes = {}
+
+# ------------------------------------------------
+# Options affecting comment reflow and formatting.
+# ------------------------------------------------
+with section("markup"):
+
+  # What character to use for bulleted lists
+  bullet_char = '*'
+
+  # What character to use as punctuation after numerals in an enumerated list
+  enum_char = '.'
+
+  # If comment markup is enabled, don't reflow the first comment block in each
+  # listfile. Use this to preserve formatting of your copyright/license
+  # statements.
+  first_comment_is_literal = False
+
+  # If comment markup is enabled, don't reflow any comment block which matches
+  # this (regex) pattern. Default is `None` (disabled).
+  literal_comment_pattern = None
+
+  # Regular expression to match preformat fences in comments default=
+  # ``r'^\s*([`~]{3}[`~]*)(.*)$'``
+  fence_pattern = '^\\s*([`~]{3}[`~]*)(.*)$'
+
+  # Regular expression to match rulers in comments default=
+  # ``r'^\s*[^\w\s]{3}.*[^\w\s]{3}$'``
+  ruler_pattern = '^\\s*[^\\w\\s]{3}.*[^\\w\\s]{3}$'
+
+  # If a comment line matches starts with this pattern then it is explicitly a
+  # trailing comment for the preceeding argument. Default is '#<'
+  explicit_trailing_pattern = '#<'
+
+  # If a comment line starts with at least this many consecutive hash
+  # characters, then don't lstrip() them off. This allows for lazy hash rulers
+  # where the first hash char is not separated by space
+  hashruler_min_length = 10
+
+  # If true, then insert a space between the first hash char and remaining hash
+  # chars in a hash ruler, and normalize its length to fill the column
+  canonicalize_hashrulers = True
+
+  # enable comment markup parsing and reflow
+  enable_markup = False
+
+# ----------------------------
+# Options affecting the linter
+# ----------------------------
+with section("lint"):
+
+  # a list of lint codes to disable
+  disabled_codes = []
+
+  # regular expression pattern describing valid function names
+  function_pattern = '[0-9a-z_]+'
+
+  # regular expression pattern describing valid macro names
+  macro_pattern = '[0-9A-Z_]+'
+
+  # regular expression pattern describing valid names for variables with global
+  # (cache) scope
+  global_var_pattern = '[A-Z][0-9A-Z_]+'
+
+  # regular expression pattern describing valid names for variables with global
+  # scope (but internal semantic)
+  internal_var_pattern = '_[A-Z][0-9A-Z_]+'
+
+  # regular expression pattern describing valid names for variables with local
+  # scope
+  local_var_pattern = '[a-z][a-z0-9_]+'
+
+  # regular expression pattern describing valid names for privatedirectory
+  # variables
+  private_var_pattern = '_[0-9a-z_]+'
+
+  # regular expression pattern describing valid names for public directory
+  # variables
+  public_var_pattern = '[A-Z][0-9A-Z_]+'
+
+  # regular expression pattern describing valid names for function/macro
+  # arguments and loop variables.
+  argument_var_pattern = '[a-z][a-z0-9_]+'
+
+  # regular expression pattern describing valid names for keywords used in
+  # functions or macros
+  keyword_pattern = '[A-Z][0-9A-Z_]+'
+
+  # In the heuristic for C0201, how many conditionals to match within a loop in
+  # before considering the loop a parser.
+  max_conditionals_custom_parser = 2
+
+  # Require at least this many newlines between statements
+  min_statement_spacing = 1
+
+  # Require no more than this many newlines between statements
+  max_statement_spacing = 2
+  max_returns = 6
+  max_branches = 12
+  max_arguments = 5
+  max_localvars = 15
+  max_statements = 50
+
+# -------------------------------
+# Options affecting file encoding
+# -------------------------------
+with section("encode"):
+
+  # If true, emit the unicode byte-order mark (BOM) at the start of the file
+  emit_byteorder_mark = False
+
+  # Specify the encoding of the input file. Defaults to utf-8
+  input_encoding = 'utf-8'
+
+  # Specify the encoding of the output file. Defaults to utf-8. Note that cmake
+  # only claims to support utf-8 so be careful when using anything else
+  output_encoding = 'utf-8'
+
+# -------------------------------------
+# Miscellaneous configurations options.
+# -------------------------------------
+with section("misc"):
+
+  # A dictionary containing any per-command configuration overrides. Currently
+  # only `command_case` is supported.
+  per_command = {}
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 0565e481ccf9ec38dae2e555bb8a15c7d4e52aeb..12b29f1aac04ed5556a2c1b878219d3308295a1f 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -160,9 +160,10 @@ build-package:
 linting:
   stage: linting
   extends: .failable
+  needs: ["build-base"]
   image: $CI_REGISTRY_IMAGE:$CI_COMMIT_SHORT_SHA
   script:
-    - ./scripts/run-clang-format.sh
+    - ./scripts/run-format.sh
 
 unit-test:
   stage: test
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 2aab7b28724e693c0544ed50877dcc79814ffb26..e72bd52b14c955d945e62dad3085e654f12021cd 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -1,12 +1,11 @@
 # Copyright (C) 2020 ASTRON (Netherlands Institute for Radio Astronomy)
 # SPDX-License-Identifier: GPL-3.0-or-later
 
-# FindHDF5 uses NATIVE_COMMAND in separate_arguments, which requires
-# CMake 3.9.
+# FindHDF5 uses NATIVE_COMMAND in separate_arguments, which requires CMake 3.9.
 cmake_minimum_required(VERSION 3.9)
 
 # CMake >= 3.19.1 gives a warning when this policy is not 'NEW'.
-if (${CMAKE_VERSION} VERSION_GREATER_EQUAL "3.19.1")
+if(${CMAKE_VERSION} VERSION_GREATER_EQUAL "3.19.1")
   cmake_policy(SET CMP0074 NEW)
 endif()
 
@@ -59,7 +58,6 @@ if(BUILD_PACKAGES)
   add_subdirectory(CPack)
 endif()
 
-
 # === Load external packages ===
 
 # DDECal dependencies
@@ -68,27 +66,36 @@ if(${ARMADILLO_FOUND})
   add_definitions(-DHAVE_ARMADILLO)
   include_directories(${ARMADILLO_INCLUDE_DIRS})
   set(DDE_ARMADILLO_FILES
-    ddecal/constraints/KLFitter.cc
-    ddecal/constraints/PiercePoint.cc
-    ddecal/constraints/ScreenConstraint.cc)
+      ddecal/constraints/KLFitter.cc ddecal/constraints/PiercePoint.cc
+      ddecal/constraints/ScreenConstraint.cc)
 else()
-  message(WARNING "Armadillo was not found, not including screenfitter inside DDECal")
+  message(
+    WARNING "Armadillo was not found, not including screenfitter inside DDECal")
   set(DDE_ARMADILLO_FILES)
 endif()
 
 # Casacore depends on HDF5 -> First load HDF5.
-find_package(HDF5 COMPONENTS C CXX REQUIRED)
+find_package(
+  HDF5
+  COMPONENTS C CXX
+  REQUIRED)
 add_definitions(${HDF5_DEFINITIONS})
 include_directories(${HDF5_INCLUDE_DIRS})
 
-find_package(Casacore COMPONENTS casa ms tables fits REQUIRED)
+find_package(
+  Casacore
+  COMPONENTS casa ms tables fits
+  REQUIRED)
 include_directories(${CASACORE_INCLUDE_DIR})
 
 find_package(CFITSIO REQUIRED)
 include_directories(${CFITSIO_INCLUDE_DIRS})
 
-if (${CMAKE_VERSION} VERSION_GREATER_EQUAL "3.12")
-  find_package(Python3 COMPONENTS Interpreter Development REQUIRED)
+if(${CMAKE_VERSION} VERSION_GREATER_EQUAL "3.12")
+  find_package(
+    Python3
+    COMPONENTS Interpreter Development
+    REQUIRED)
   find_package(Python3 COMPONENTS NumPy)
 else() # Use old, deprecated means of detecting python.
   find_package(PythonInterp 3 REQUIRED)
@@ -109,44 +116,56 @@ message(STATUS "Using python version ${Python3_VERSION}")
 include_directories(${Python3_INCLUDE_DIRS})
 
 if(BUILD_PACKAGES)
-  set(PYTHON_INSTALL_DIR lib/python${Python3_VERSION_MAJOR}.${Python3_VERSION_MINOR}/dist-packages)
+  set(PYTHON_INSTALL_DIR
+      lib/python${Python3_VERSION_MAJOR}.${Python3_VERSION_MINOR}/dist-packages)
 else()
-  set(PYTHON_INSTALL_DIR lib/python${Python3_VERSION_MAJOR}.${Python3_VERSION_MINOR}/site-packages)
+  set(PYTHON_INSTALL_DIR
+      lib/python${Python3_VERSION_MAJOR}.${Python3_VERSION_MINOR}/site-packages)
 endif()
 
-#Prevent accidentally finding old BoostConfig.cmake file from casapy
+# Prevent accidentally finding old BoostConfig.cmake file from casapy
 set(Boost_NO_BOOST_CMAKE ON)
-find_package(Boost COMPONENTS filesystem program_options system unit_test_framework REQUIRED)
+find_package(
+  Boost
+  COMPONENTS filesystem program_options system unit_test_framework
+  REQUIRED)
 include_directories(${Boost_INCLUDE_DIR})
 
 find_package(Threads REQUIRED)
 
-
 # === Load astron packages ===
 
 find_package(AOFlagger 3.0.1 REQUIRED)
 include_directories(${AOFLAGGER_INCLUDE_DIR})
 
-# We could use find_package(EveryBeam 0.1.1 REQUIRED),
-# however conditions below make it somewhat more explicit
+# We could use find_package(EveryBeam 0.1.1 REQUIRED), however conditions below
+# make it somewhat more explicit
 find_package(EveryBeam NO_MODULE)
 if(${EVERYBEAM_FOUND})
-  if(${EVERYBEAM_VERSION} VERSION_LESS "0.2.0" OR ${EVERYBEAM_VERSION} VERSION_GREATER_EQUAL "0.3.0")
-    message(FATAL_ERROR "DP3 needs EveryBeam version 0.2.x - with x >= 0 - but found version ${EveryBeam_VERSION}")
+  if(${EVERYBEAM_VERSION} VERSION_LESS "0.2.0" OR ${EVERYBEAM_VERSION}
+                                                  VERSION_GREATER_EQUAL "0.3.0")
+    message(
+      FATAL_ERROR
+        "DP3 needs EveryBeam version 0.2.x - with x >= 0 - but found version ${EveryBeam_VERSION}"
+    )
   endif()
-include_directories(${EVERYBEAM_INCLUDE_DIR})
+  include_directories(${EVERYBEAM_INCLUDE_DIR})
 else(${EVERYBEAM_FOUND})
-  message(FATAL_ERROR "DP3 requires EveryBeam, but EveryBeam was not found. "
-    "Please install https://git.astron.nl/RD/EveryBeam and make sure that "
-    "EveryBeam is added to the CMAKE_PREFIX_PATH")
+  message(
+    FATAL_ERROR
+      "DP3 requires EveryBeam, but EveryBeam was not found. "
+      "Please install https://git.astron.nl/RD/EveryBeam and make sure that "
+      "EveryBeam is added to the CMAKE_PREFIX_PATH")
 endif(${EVERYBEAM_FOUND})
 
 find_package(IDGAPI NO_MODULE QUIET)
 if(IDGAPI_FOUND)
   # Throw error if IDG version < 0.8 or version not provided at all
   if((IDGAPI_VERSION VERSION_LESS "0.8") OR (NOT DEFINED IDGAPI_VERSION))
-    message(FATAL_ERROR "IDGAPI was found, but DP3 requires IDGAPI to have version >= 0.8. "
-    "Please compile IDG repository at a version >= 0.8")
+    message(
+      FATAL_ERROR
+        "IDGAPI was found, but DP3 requires IDGAPI to have version >= 0.8. "
+        "Please compile IDG repository at a version >= 0.8")
   endif()
 endif()
 if(IDGAPI_LIBRARIES AND IDGAPI_INCLUDE_DIRS)
@@ -156,10 +175,12 @@ if(IDGAPI_LIBRARIES AND IDGAPI_INCLUDE_DIRS)
   message(STATUS "Image domain gridder API libraries found.")
 else(IDGAPI_LIBRARIES AND IDGAPI_INCLUDE_DIRS)
   set(IDGAPI_LIBRARIES "")
-  message(WARNING "Image domain gridder API libraries NOT found. IDG prediction will not be available.")
+  message(
+    WARNING
+      "Image domain gridder API libraries NOT found. IDG prediction will not be available."
+  )
 endif(IDGAPI_LIBRARIES AND IDGAPI_INCLUDE_DIRS)
 
-
 # === Load internal submodule packages. ===
 
 # Update submodules as needed
@@ -169,19 +190,27 @@ if(GIT_FOUND AND EXISTS "${PROJECT_SOURCE_DIR}/.git")
   if(GIT_SUBMODULE)
     message(STATUS "Syncing submodules")
     # Account for potential changes in git repo URL's
-    execute_process(COMMAND ${GIT_EXECUTABLE} submodule sync --recursive
-                    WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
-                    RESULT_VARIABLE GIT_SUBMOD_RESULT)
+    execute_process(
+      COMMAND ${GIT_EXECUTABLE} submodule sync --recursive
+      WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
+      RESULT_VARIABLE GIT_SUBMOD_RESULT)
     if(NOT GIT_SUBMOD_RESULT EQUAL "0")
-      message(FATAL_ERROR "Syncing submodules did not succeed"
-              "command '${GIT_EXECUTABLE} submodule sync --recursive' failed with exit code ${GIT_SUBMOD_RESULT}")
+      message(
+        FATAL_ERROR
+          "Syncing submodules did not succeed"
+          "command '${GIT_EXECUTABLE} submodule sync --recursive' failed with exit code ${GIT_SUBMOD_RESULT}"
+      )
     endif()
     message(STATUS "Updating submodules")
-    execute_process(COMMAND ${GIT_EXECUTABLE} submodule update --init --recursive --checkout
-                    WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
-                    RESULT_VARIABLE GIT_SUBMOD_RESULT)
+    execute_process(
+      COMMAND ${GIT_EXECUTABLE} submodule update --init --recursive --checkout
+      WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
+      RESULT_VARIABLE GIT_SUBMOD_RESULT)
     if(NOT GIT_SUBMOD_RESULT EQUAL "0")
-      message(FATAL_ERROR "git submodule update --init failed with ${GIT_SUBMOD_RESULT}, please checkout submodules")
+      message(
+        FATAL_ERROR
+          "git submodule update --init failed with ${GIT_SUBMOD_RESULT}, please checkout submodules"
+      )
     endif()
   endif()
 endif()
@@ -202,9 +231,10 @@ include_directories("${CMAKE_SOURCE_DIR}/external/schaapcommon/include")
 configure_file(base/Version.h.in base/Version.h)
 include_directories(${CMAKE_CURRENT_BINARY_DIR}/base)
 
-if (USE_LSMR)
+if(USE_LSMR)
   add_definitions(-DUSE_LSMR)
-  add_library(LSMR OBJECT
+  add_library(
+    LSMR OBJECT
     external/clsmr/lsmrblas.f90
     external/clsmr/lsmrblasInterface.f90
     external/clsmr/lsmrCheckModule.f90
@@ -216,21 +246,21 @@ if (USE_LSMR)
     external/clsmr/clsmrblas.f90
     external/clsmr/clsmrblasInterface.f90
     external/clsmr/clsmrDataModule.f90
-    external/clsmr/clsmrModule.f90
-  )
+    external/clsmr/clsmrModule.f90)
 endif()
 
-add_library(Blob OBJECT
+add_library(
+  Blob OBJECT
   blob/BlobAipsIO.cc
   blob/BlobArray.cc
   blob/BlobHeader.cc
   blob/BlobIBufStream.cc
   blob/BlobIStream.cc
   blob/BlobOBufStream.cc
-  blob/BlobOStream.cc
-)
+  blob/BlobOStream.cc)
 
-add_library(Common OBJECT
+add_library(
+  Common OBJECT
   common/BaselineSelect.cc
   common/ClusterDesc.cc
   common/DataConvert.cc
@@ -248,10 +278,10 @@ add_library(Common OBJECT
   common/TypeNames.cc
   common/VdsDesc.cc
   common/VdsMaker.cc
-  common/VdsPartDesc.cc
-)
+  common/VdsPartDesc.cc)
 
-add_library(DDECal OBJECT
+add_library(
+  DDECal OBJECT
   steps/DDECal.cc
   ddecal/Settings.cc
   ddecal/constraints/Constraint.cc
@@ -278,7 +308,8 @@ add_library(DDECal OBJECT
   ddecal/linear_solvers/LLSSolver.cc
   ${DDE_ARMADILLO_FILES})
 
-add_library(DP3_OBJ OBJECT
+add_library(
+  DP3_OBJ OBJECT
   base/Apply.cc
   base/BaselineSelection.cc
   base/BDABuffer.cc
@@ -345,10 +376,10 @@ add_library(DP3_OBJ OBJECT
   steps/Upsample.cc
   steps/UVWFlagger.cc
   steps/ApplyBeam.cc
-  steps/DemixerNew.cc
-)
+  steps/DemixerNew.cc)
 
-add_library(ParmDB OBJECT
+add_library(
+  ParmDB OBJECT
   parmdb/Axis.cc
   parmdb/AxisMapping.cc
   parmdb/Box.cc
@@ -370,46 +401,40 @@ add_library(ParmDB OBJECT
   parmdb/SourceDB.cc
   parmdb/SourceDBBlob.cc
   parmdb/SourceDBCasa.cc
-  parmdb/SourceInfo.cc
-)
+  parmdb/SourceInfo.cc)
 
-add_library(PythonDP3 OBJECT
-  pythondp3/PyStep.cc
-)
+add_library(PythonDP3 OBJECT pythondp3/PyStep.cc)
 
 # dp3_testdyndp3 requires using position independent code.
 set_property(TARGET Blob Common DDECal DP3_OBJ ParmDB PythonDP3
              PROPERTY POSITION_INDEPENDENT_CODE ON)
 if(USE_LSMR)
-set_property(TARGET LSMR
-             PROPERTY POSITION_INDEPENDENT_CODE ON)
+  set_property(TARGET LSMR PROPERTY POSITION_INDEPENDENT_CODE ON)
 endif()
 
 set(DP3_OBJECTS
-  $<TARGET_OBJECTS:Blob>
-  $<TARGET_OBJECTS:Common>
-  $<TARGET_OBJECTS:DDECal>
-  $<TARGET_OBJECTS:DP3_OBJ>
-  $<TARGET_OBJECTS:ParmDB>
-  $<TARGET_OBJECTS:PythonDP3>)
+    $<TARGET_OBJECTS:Blob> $<TARGET_OBJECTS:Common> $<TARGET_OBJECTS:DDECal>
+    $<TARGET_OBJECTS:DP3_OBJ> $<TARGET_OBJECTS:ParmDB>
+    $<TARGET_OBJECTS:PythonDP3>)
 
 if(USE_LSMR)
   list(APPEND DP3_OBJECTS $<TARGET_OBJECTS:LSMR>)
 endif()
 
 set(DP3_LIBRARIES
-  ${AOFLAGGER_LIB}
-  ${ARMADILLO_LIBRARIES}
-  ${Boost_LIBRARIES}
-  ${CASACORE_LIBRARIES}
-  ${CFITSIO_LIBRARY}
-  ${EVERYBEAM_LIB}
-  ${HDF5_LIBRARIES} ${HDF5_CXX_LIBRARIES}
-  ${IDGAPI_LIBRARIES}
-  ${Python3_LIBRARIES}
-  schaapcommon
-  Threads::Threads
-  pybind11::embed)
+    ${AOFLAGGER_LIB}
+    ${ARMADILLO_LIBRARIES}
+    ${Boost_LIBRARIES}
+    ${CASACORE_LIBRARIES}
+    ${CFITSIO_LIBRARY}
+    ${EVERYBEAM_LIB}
+    ${HDF5_LIBRARIES}
+    ${HDF5_CXX_LIBRARIES}
+    ${IDGAPI_LIBRARIES}
+    ${Python3_LIBRARIES}
+    schaapcommon
+    Threads::Threads
+    pybind11::embed)
 
 add_subdirectory(base)
 
@@ -417,14 +442,10 @@ if(NOT CMAKE_SYSTEM_NAME STREQUAL "Darwin")
   add_subdirectory(pythondp3)
 endif()
 
-set(SOURCEDB_OBJECTS
-  $<TARGET_OBJECTS:Blob>
-  $<TARGET_OBJECTS:Common>
-  $<TARGET_OBJECTS:ParmDB>)
+set(SOURCEDB_OBJECTS $<TARGET_OBJECTS:Blob> $<TARGET_OBJECTS:Common>
+                     $<TARGET_OBJECTS:ParmDB>)
 
-set(SOURCEDB_LIBRARIES
-  ${CASACORE_LIBRARIES}
-  ${Boost_SYSTEM_LIBRARY})
+set(SOURCEDB_LIBRARIES ${CASACORE_LIBRARIES} ${Boost_SYSTEM_LIBRARY})
 
 add_executable(makesourcedb parmdb/makesourcedb.cc ${SOURCEDB_OBJECTS})
 target_link_libraries(makesourcedb ${SOURCEDB_LIBRARIES})
@@ -435,74 +456,79 @@ target_link_libraries(showsourcedb ${SOURCEDB_LIBRARIES})
 install(TARGETS makesourcedb showsourcedb DESTINATION bin)
 
 # Install a script that warns users that DP3 is the new name of the executable
-install(PROGRAMS scripts/DPPP-deprecation.sh DESTINATION bin RENAME DPPP)
+install(
+  PROGRAMS scripts/DPPP-deprecation.sh
+  DESTINATION bin
+  RENAME DPPP)
 
 include(docs/CMakeLists.txt)
 
-if (BUILD_TESTING)
+if(BUILD_TESTING)
   include(CTest)
 
   if(CMAKE_SYSTEM_NAME STREQUAL "Darwin")
     set(OS_SPECIFIC_TESTS) # No specific tests for Apple
   else()
     # These run only on Linux
-    set(OS_SPECIFIC_TESTS
-    # steps/dynamic_test_step/test/tDynamicTestStep.cc # This test still fails
-    )
-    add_library(dp3_testdyndp3 SHARED
-      steps/dynamic_test_step/DynamicTestStep.cc ${DP3_OBJECTS}
+    set(OS_SPECIFIC_TESTS # steps/dynamic_test_step/test/tDynamicTestStep.cc #
+                          # This test still fails
     )
+    add_library(dp3_testdyndp3 SHARED steps/dynamic_test_step/DynamicTestStep.cc
+                                      ${DP3_OBJECTS})
     target_link_libraries(dp3_testdyndp3 ${DP3_LIBRARIES})
     add_dependencies(dp3_testdyndp3 schaapcommon)
   endif()
 
   set(TEST_FILENAMES
-    common/test/unit/tProximityClustering.cc
-    base/test/runtests.cc
-    base/test/unit/tBaselineSelection.cc
-    base/test/unit/tBDABuffer.cc
-    base/test/unit/tDPBuffer.cc
-    # base/test/unit/tDemixer.cc # Parset is no longer valid in this test
-    base/test/unit/tDP3.cc
-    base/test/unit/tMirror.cc
-    base/test/unit/tSolutionInterval.cc
-    base/test/unit/tSourceDBUtil.cc
-    ddecal/test/unit/SolverTester.cc
-    ddecal/test/unit/tBDASolverBuffer.cc
-    ddecal/test/unit/tLLSSolver.cc
-    ddecal/test/unit/tRotationConstraint.cc
-    ddecal/test/unit/tSmoothnessConstraint.cc
-    ddecal/test/unit/tSolvers.cc
-    ddecal/test/unit/tBdaSolvers.cc
-    steps/test/unit/mock/MockInput.cc
-    steps/test/unit/mock/MockStep.cc
-    steps/test/unit/fixtures/fDirectory.cc
-    steps/test/unit/tAOFlaggerStep.cc
-    steps/test/unit/tApplyCal.cc
-    steps/test/unit/tApplyCalH5.cc
-    steps/test/unit/tAverager.cc
-    steps/test/unit/tBDAResultStep.cc
-    steps/test/unit/tFilter.cc
-    steps/test/unit/tInterpolate.cc
-    steps/test/unit/tMedFlagger.cc
-    steps/test/unit/tMSReader.cc
-    steps/test/unit/tPreFlagger.cc
-    steps/test/unit/tPhaseShift.cc
-    steps/test/unit/tUpsample.cc
-    steps/test/unit/tUVWFlagger.cc
-    steps/test/unit/tPSet.cc
-    steps/test/unit/tScaleData.cc
-    steps/test/unit/tStationAdder.cc
-  )
+      common/test/unit/tProximityClustering.cc
+      base/test/runtests.cc
+      base/test/unit/tBaselineSelection.cc
+      base/test/unit/tBDABuffer.cc
+      base/test/unit/tDPBuffer.cc
+      # base/test/unit/tDemixer.cc # Parset is no longer valid in this test
+      base/test/unit/tDP3.cc
+      base/test/unit/tMirror.cc
+      base/test/unit/tSolutionInterval.cc
+      base/test/unit/tSourceDBUtil.cc
+      ddecal/test/unit/SolverTester.cc
+      ddecal/test/unit/tBDASolverBuffer.cc
+      ddecal/test/unit/tLLSSolver.cc
+      ddecal/test/unit/tRotationConstraint.cc
+      ddecal/test/unit/tSmoothnessConstraint.cc
+      ddecal/test/unit/tSolvers.cc
+      ddecal/test/unit/tBdaSolvers.cc
+      steps/test/unit/mock/MockInput.cc
+      steps/test/unit/mock/MockStep.cc
+      steps/test/unit/fixtures/fDirectory.cc
+      steps/test/unit/tAOFlaggerStep.cc
+      steps/test/unit/tApplyCal.cc
+      steps/test/unit/tApplyCalH5.cc
+      steps/test/unit/tAverager.cc
+      steps/test/unit/tBDAResultStep.cc
+      steps/test/unit/tFilter.cc
+      steps/test/unit/tInterpolate.cc
+      steps/test/unit/tMedFlagger.cc
+      steps/test/unit/tMSReader.cc
+      steps/test/unit/tPreFlagger.cc
+      steps/test/unit/tPhaseShift.cc
+      steps/test/unit/tUpsample.cc
+      steps/test/unit/tUVWFlagger.cc
+      steps/test/unit/tPSet.cc
+      steps/test/unit/tScaleData.cc
+      steps/test/unit/tStationAdder.cc)
   if(${Python3_NumPy_FOUND})
-     list(APPEND TEST_FILENAMES "steps/test/unit/tPyStep.cc")
+    list(APPEND TEST_FILENAMES "steps/test/unit/tPyStep.cc")
   else(${Python3_NumPy_FOUND})
-     message(WARNING "NumPy not found on machine, tPyStep test is excluded from tests.")
+    message(
+      WARNING "NumPy not found on machine, tPyStep test is excluded from tests."
+    )
   endif()
 
   # Boost 1.59 introduced BOOST_TEST. The tests below use this feature.
   if(Boost_VERSION_STRING VERSION_GREATER_EQUAL "1.59")
-    list(APPEND TEST_FILENAMES
+    list(
+      APPEND
+      TEST_FILENAMES
       common/test/unit/tMemory.cc
       common/test/unit/tStringTools.cc
       base/test/unit/tDPInfo.cc
@@ -522,93 +548,82 @@ if (BUILD_TESTING)
 
   # Add boost dynamic link flag for all test files.
   # https://www.boost.org/doc/libs/1_66_0/libs/test/doc/html/boost_test/usage_variants.html
-  # Without this flag, linking is incorrect and boost performs duplicate delete()
-  # calls after running all tests, in the cleanup phase.
+  # Without this flag, linking is incorrect and boost performs duplicate
+  # delete() calls after running all tests, in the cleanup phase.
   set_source_files_properties(
-    ${TEST_FILENAMES} PROPERTIES COMPILE_DEFINITIONS "BOOST_TEST_DYN_LINK"
-  )
+    ${TEST_FILENAMES} PROPERTIES COMPILE_DEFINITIONS "BOOST_TEST_DYN_LINK")
 
   set(DP3_RESOURCE_DIR ${CMAKE_SOURCE_DIR}/resources)
   set(EXTRACT_CMD ${CMAKE_COMMAND} -E tar xzf)
 
-  add_custom_target(extract_test_resources
+  add_custom_target(
+    extract_test_resources
     COMMAND ${EXTRACT_CMD} ${DP3_RESOURCE_DIR}/tApplyCal_tmp.parmdb.tgz
     COMMAND ${EXTRACT_CMD} ${DP3_RESOURCE_DIR}/tIDGPredict.sources.tgz
     COMMAND ${EXTRACT_CMD} ${DP3_RESOURCE_DIR}/tNDPPP.in_MS.tgz
     COMMAND ${EXTRACT_CMD} ${DP3_RESOURCE_DIR}/tNDPPP_bda.in_MS.tgz
     COMMAND ${EXTRACT_CMD} ${DP3_RESOURCE_DIR}/tNDPPP-generic.MS.tgz
-    COMMAND ${EXTRACT_CMD} ${DP3_RESOURCE_DIR}/tOSKAR.in_MS.tgz
-  )
+    COMMAND ${EXTRACT_CMD} ${DP3_RESOURCE_DIR}/tOSKAR.in_MS.tgz)
 
-  add_test(
-    NAME extract_resources
-    COMMAND ${CMAKE_COMMAND} --build ${CMAKE_BINARY_DIR} --target extract_test_resources
-  )
-  set_tests_properties(extract_resources PROPERTIES FIXTURES_SETUP extract_resources)
+  add_test(NAME extract_resources
+           COMMAND ${CMAKE_COMMAND} --build ${CMAKE_BINARY_DIR} --target
+                   extract_test_resources)
+  set_tests_properties(extract_resources PROPERTIES FIXTURES_SETUP
+                                                    extract_resources)
 
-  add_executable(
-    unittests
-    ${TEST_FILENAMES} ${OS_SPECIFIC_TESTS} ${DP3_OBJECTS}
-  )
+  add_executable(unittests ${TEST_FILENAMES} ${OS_SPECIFIC_TESTS}
+                           ${DP3_OBJECTS})
   set_target_properties(unittests PROPERTIES ENABLE_EXPORTS ON)
   target_link_libraries(unittests ${DP3_LIBRARIES})
   add_dependencies(unittests schaapcommon)
 
   # Automatically (re)build the unit tests on every ctest run.
-  add_test(buildunittests ${CMAKE_COMMAND} --build ${CMAKE_BINARY_DIR} --target unittests)
+  add_test(buildunittests ${CMAKE_COMMAND} --build ${CMAKE_BINARY_DIR} --target
+           unittests)
   set_tests_properties(buildunittests PROPERTIES FIXTURES_SETUP unittests)
 
   # unittests.sh adjusts the PYTHONPATH to make tPyStep working.
   configure_file(scripts/unittests.sh.in unittests.sh)
-  add_test(
-    NAME unittests
-    COMMAND unittests.sh  -t !@slow -f JUNIT -k unittests.xml --catch_system_error=yes
-  )
-  set_tests_properties(
-    unittests PROPERTIES LABELS unit
-    FIXTURES_REQUIRED "unittests;extract_resources"
-  )
-
-  # Long running tests are labeled 'slow' and use a separate add_test call,
-  # so ctest can run them in parallel.
-  # Unfortunately there is no easy means of retreiving a list of all slow tests.
+  add_test(NAME unittests COMMAND unittests.sh -t !@slow -f JUNIT -k
+                                  unittests.xml --catch_system_error=yes)
+  set_tests_properties(unittests PROPERTIES LABELS unit FIXTURES_REQUIRED
+                                            "unittests;extract_resources")
+
+  # Long running tests are labeled 'slow' and use a separate add_test call, so
+  # ctest can run them in parallel. Unfortunately there is no easy means of
+  # retreiving a list of all slow tests.
   set(SLOW_TESTS
-    bda_solvers/diagonal
-    bda_solvers/hybrid
-    bda_solvers/iterative_diagonal
-    bda_solvers/iterative_scalar
-    bda_solvers/scalar
-    idgpredict/process
-    idgpredict/process_beam
-    msbdawriter/process_simple
-    msbdawriter/create_default_subtables
-    msbdawriter/different_bda_intervals
-    solvers/scalar_solver
-    solvers/iterative_scalar_solver
-    solvers/scalar_solver_normaleq
-    solvers/diagonal_solver
-    solvers/iterative_diagonal_solver
-    solvers/hybrid_solver
-    solvers/full_jones_solver
-    solvers/min_iterations
-  )
+      bda_solvers/diagonal
+      bda_solvers/hybrid
+      bda_solvers/iterative_diagonal
+      bda_solvers/iterative_scalar
+      bda_solvers/scalar
+      idgpredict/process
+      idgpredict/process_beam
+      msbdawriter/process_simple
+      msbdawriter/create_default_subtables
+      msbdawriter/different_bda_intervals
+      solvers/scalar_solver
+      solvers/iterative_scalar_solver
+      solvers/scalar_solver_normaleq
+      solvers/diagonal_solver
+      solvers/iterative_diagonal_solver
+      solvers/hybrid_solver
+      solvers/full_jones_solver
+      solvers/min_iterations)
   if(USE_LSMR)
-    list(APPEND SLOW_TESTS
-      solvers/scalar_solver_lsmr
-      solvers/diagonal_solver_lsmr
-    )
+    list(APPEND SLOW_TESTS solvers/scalar_solver_lsmr
+         solvers/diagonal_solver_lsmr)
   endif()
 
   foreach(TEST ${SLOW_TESTS})
     string(REPLACE "/" "_" XMLNAME ${TEST})
     set(XMLNAME "unittest_${XMLNAME}.xml")
-    add_test(
-      NAME ${TEST}
-      COMMAND unittests -t ${TEST} -f JUNIT -k ${XMLNAME} --catch_system_error=yes
-    )
+    add_test(NAME ${TEST} COMMAND unittests -t ${TEST} -f JUNIT -k ${XMLNAME}
+                                  --catch_system_error=yes)
     set_tests_properties(
-      ${TEST} PROPERTIES LABELS "unit;slow" FIXTURES_REQUIRED "unittests;extract_resources"
-    )
+      ${TEST} PROPERTIES LABELS "unit;slow" FIXTURES_REQUIRED
+                         "unittests;extract_resources")
   endforeach()
 
   add_subdirectory(steps/test/integration)
diff --git a/CPack/CMakeLists.txt b/CPack/CMakeLists.txt
index 25f8a928a000fd2905a265165ef97d4f75c5f008..0101855adff78cce3fd0c8e69c19d44d1dc7356d 100644
--- a/CPack/CMakeLists.txt
+++ b/CPack/CMakeLists.txt
@@ -43,16 +43,16 @@ set(CPACK_SOURCE_GENERATOR "TGZ")
 
 set(CPACK_DEBIAN_FILE_NAME DEB-DEFAULT)
 set(CPACK_DEBIAN_PACKAGE_ARCHITECTURE "amd64")
-set(CPACK_DEBIAN_PACKAGE_DEPENDS 
-"aoflagger (>= 3.0.1),\
+set(CPACK_DEBIAN_PACKAGE_DEPENDS
+    "aoflagger (>= 3.0.1),\
  everybeam (>= 0.2), everybeam (<< 0.3),\
  idg-api (>= 0.8)")
 set(CPACK_DEBIAN_PACKAGE_MAINTAINER "deb-packages@astron.nl")
 set(CPACK_DEBIAN_PACKAGE_SECTION "science")
 set(CPACK_DEBIAN_PACKAGE_SHLIBDEPS ON)
 
-# Determine list of conflicting package names.
-# Packages not built for current target CPU always conflict.
+# Determine list of conflicting package names. Packages not built for current
+# target CPU always conflict.
 if(DEFINED IDENTIFIED_TARGET_CPU)
   set(_conflicts dp3)
 else()
@@ -73,4 +73,3 @@ include(CPack)
 
 message(STATUS "Package name: ${CPACK_PACKAGE_NAME}")
 message(STATUS "Package version: ${CPACK_PACKAGE_VERSION}")
-
diff --git a/CPack/DetermineTargetCPU.cmake b/CPack/DetermineTargetCPU.cmake
index fbe8ee314912f788a57077d9b7206f0c5ebcb9fb..72121d72eeb6530307bfb35e8d09e79f911de22c 100644
--- a/CPack/DetermineTargetCPU.cmake
+++ b/CPack/DetermineTargetCPU.cmake
@@ -24,25 +24,58 @@ This modules sets the following ``INTERNAL`` variables:
 
 #]=======================================================================]
 
-# List of target CPUs known by both GCC and Clang 
-# This list was produced as follows (note: requires llc to be installed):
-#   comm -12 \
-#     <(g++ -march=foo -E - < /dev/null |& grep '^cc1: note' | \
-#       sed -nE 's,^.*: *([^;]*).*$,\1,p' | tr ' ' '\n' | sort -u) \
-#     <(llc -mattr=help |& grep processor. | awk '{print $1}' | sort -u) 
+# List of target CPUs known by both GCC and Clang This list was produced as
+# follows (note: requires llc to be installed): comm -12 \ <(g++ -march=foo -E -
+# < /dev/null |& grep '^cc1: note' | \ sed -nE 's,^.*: *([^;]*).*$,\1,p' | tr '
+# ' '\n' | sort -u) \ <(llc -mattr=help |& grep processor. | awk '{print $1}' |
+# sort -u)
 set(KNOWN_TARGET_CPUS
-  amdfam10 athlon64 athlon64-sse3 athlon-fx atom barcelona bdver1 bdver2
-  bdver3 bdver4 bonnell broadwell btver1 btver2 core2 core-avx2 core-avx-i
-  corei7 corei7-avx haswell ivybridge k8 k8-sse3 knl nehalem nocona opteron
-  opteron-sse3 sandybridge silvermont skylake skylake-avx512 slm westmere
-  x86-64 znver1 CACHE INTERNAL "Known target CPUs")
+    amdfam10
+    athlon64
+    athlon64-sse3
+    athlon-fx
+    atom
+    barcelona
+    bdver1
+    bdver2
+    bdver3
+    bdver4
+    bonnell
+    broadwell
+    btver1
+    btver2
+    core2
+    core-avx2
+    core-avx-i
+    corei7
+    corei7-avx
+    haswell
+    ivybridge
+    k8
+    k8-sse3
+    knl
+    nehalem
+    nocona
+    opteron
+    opteron-sse3
+    sandybridge
+    silvermont
+    skylake
+    skylake-avx512
+    slm
+    westmere
+    x86-64
+    znver1
+    CACHE INTERNAL "Known target CPUs")
 
 if(NOT PORTABLE)
   get_directory_property(_compile_options COMPILE_OPTIONS)
   string(REPLACE ";" " " _compile_options "${_compile_options}")
-  execute_process(COMMAND bash -c
-    # Executed command is printed on stderr; we can discard stdout
-    "echo | ${CMAKE_CXX_COMPILER} ${_compile_options} -E -v - >/dev/null"
+  execute_process(
+    COMMAND
+      bash -c
+      # Executed command is printed on stderr; we can discard stdout
+      "echo | ${CMAKE_CXX_COMPILER} ${_compile_options} -E -v - >/dev/null"
     ERROR_VARIABLE _command
     RESULT_VARIABLE _result)
   if(NOT _result EQUAL 0)
@@ -50,13 +83,17 @@ if(NOT PORTABLE)
                     "target CPU '${TARGET_CPU}'")
   else()
     if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
-      execute_process(COMMAND bash -c
-        "echo '${_command}' | sed -nE '/cc1/s/^.*-march=([^ ]+).*$/\\1/p'"
+      execute_process(
+        COMMAND
+          bash -c
+          "echo '${_command}' | sed -nE '/cc1/s/^.*-march=([^ ]+).*$/\\1/p'"
         OUTPUT_VARIABLE _target_cpu
         OUTPUT_STRIP_TRAILING_WHITESPACE)
     elseif("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang")
-      execute_process(COMMAND bash -c
-        "echo '${_command}' | sed -nE '/cc1/s/^.*-target-cpu ([^ ]+).*$/\\1/p'"
+      execute_process(
+        COMMAND
+          bash -c
+          "echo '${_command}' | sed -nE '/cc1/s/^.*-target-cpu ([^ ]+).*$/\\1/p'"
         OUTPUT_VARIABLE _target_cpu
         OUTPUT_STRIP_TRAILING_WHITESPACE)
     else()
@@ -67,15 +104,17 @@ if(NOT PORTABLE)
 endif()
 
 if(DEFINED _target_cpu)
-  set(IDENTIFIED_TARGET_CPU ${_target_cpu} CACHE INTERNAL "")
+  set(IDENTIFIED_TARGET_CPU
+      ${_target_cpu}
+      CACHE INTERNAL "")
 else()
   unset(IDENTIFIED_TARGET_CPU CACHE)
 endif()
 
-if(DEFINED IDENTIFIED_TARGET_CPU AND
-   NOT IDENTIFIED_TARGET_CPU IN_LIST KNOWN_TARGET_CPUS)
-  message(AUTHOR_WARNING
-    "'${IDENTIFIED_TARGET_CPU}' is not in the list KNOWN_TARGET_CPUS. "
-    "Please check if this list is still up-to-date")
+if(DEFINED IDENTIFIED_TARGET_CPU AND NOT IDENTIFIED_TARGET_CPU IN_LIST
+                                     KNOWN_TARGET_CPUS)
+  message(
+    AUTHOR_WARNING
+      "'${IDENTIFIED_TARGET_CPU}' is not in the list KNOWN_TARGET_CPUS. "
+      "Please check if this list is still up-to-date")
 endif()
-
diff --git a/base/CMakeLists.txt b/base/CMakeLists.txt
index 94ebdb259bf53c042125dac16a5a42d1f85e4c22..51d4ccdd301905d9c37334d9411d8290d62d718f 100644
--- a/base/CMakeLists.txt
+++ b/base/CMakeLists.txt
@@ -6,11 +6,11 @@ target_link_libraries(DP3 ${DP3_LIBRARIES})
 add_dependencies(DP3 schaapcommon)
 
 set_target_properties(DP3 PROPERTIES ENABLE_EXPORTS ON)
-set_target_properties(DP3 PROPERTIES RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR})
+set_target_properties(DP3 PROPERTIES RUNTIME_OUTPUT_DIRECTORY
+                                     ${CMAKE_BINARY_DIR})
 
 install(TARGETS DP3 DESTINATION bin)
 
 # Data files
-install(
-  FILES share/LOFAR-LBA-default.rfis share/LOFAR-HBA-default.rfis
-  DESTINATION share/rfistrategies)
+install(FILES share/LOFAR-LBA-default.rfis share/LOFAR-HBA-default.rfis
+        DESTINATION share/rfistrategies)
diff --git a/base/etc/CMakeLists.txt b/base/etc/CMakeLists.txt
index ce203d7616d9d31169e9f64db88f31ad08b43c38..569deaf21576fbf5800b82b569a50144732b08de 100644
--- a/base/etc/CMakeLists.txt
+++ b/base/etc/CMakeLists.txt
@@ -2,6 +2,4 @@
 # SPDX-License-Identifier: GPL-3.0-or-later
 
 # Logger configuration
-install(FILES
-  DPPP.log_prop
-  DESTINATION etc)
+install(FILES DPPP.log_prop DESTINATION etc)
diff --git a/ddecal/test/integration/CMakeLists.txt b/ddecal/test/integration/CMakeLists.txt
index 7eee00920deee349435fa4f360f12cb2fcdfdaa7..4057a569e60ad2834523861b979458f0f3f0f432 100644
--- a/ddecal/test/integration/CMakeLists.txt
+++ b/ddecal/test/integration/CMakeLists.txt
@@ -5,17 +5,16 @@ configure_file(testInit.sh.in testInit.sh)
 
 # The 'source' symbolic link simplifies running the tests manually inside
 # ${CMAKE_CURRENT_BINARY_DIR}: It allows using 'source/tApplyBeam.sh' instead of
-# '../../../../DP3/steps/test/integration/tApplyBeam.sh.
-# (Using 'RESULT', fatal errors won't occur on systems without symlink support.)
+# '../../../../DP3/steps/test/integration/tApplyBeam.sh. (Using 'RESULT', fatal
+# errors won't occur on systems without symlink support.)
 if(${CMAKE_VERSION} VERSION_GREATER_EQUAL "3.14")
-  file(CREATE_LINK ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}/source
-       RESULT DUMMY_RESULT SYMBOLIC)
+  file(
+    CREATE_LINK ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}/source
+    RESULT DUMMY_RESULT
+    SYMBOLIC)
 endif()
 
-set(INTEGRATION_TESTS
-  tDDECal
-  tIDGPredict
-)
+set(INTEGRATION_TESTS tDDECal tIDGPredict)
 
 foreach(TEST ${INTEGRATION_TESTS})
   # Use ${CMAKE_CURRENT_SOURCE_DIR} instead of 'source' since not all systems
diff --git a/docker/ubuntu_20_04_base b/docker/ubuntu_20_04_base
index bf116824c7f877dd6f0a2e60ae3c8dbad4719883..471d56cfe5c2713c2341ae4b6071f5eccfc8fe33 100644
--- a/docker/ubuntu_20_04_base
+++ b/docker/ubuntu_20_04_base
@@ -50,6 +50,6 @@ RUN export DEBIAN_FRONTEND=noninteractive && apt-get update && \
     && tar xfz /WSRT_Measures.ztar \
     && rm /WSRT_Measures.ztar \
 # Install pip dependencies
-    && pip3 install gcovr clang-format==9.0.0 h5py \
+    && pip3 install gcovr clang-format==9.0.0 cmake-format h5py \
     # build-doc dependencies
     autosemver==0.5.5 jsonschema2rst==0.1.0 sphinx sphinx-rtd-theme
diff --git a/docs/CMakeLists.txt b/docs/CMakeLists.txt
index 838f6ffc07006cb89a6b077543efab1de7c21ccc..cc5497431f02c0868da9e9ec0f10a721df7657ab 100644
--- a/docs/CMakeLists.txt
+++ b/docs/CMakeLists.txt
@@ -10,43 +10,50 @@ find_package(Doxygen)
 
 if(NOT DP3_VERSION)
   # Building docs standalone, get version from top level CMakelists
-  FILE(READ "../CMakeLists.txt" TOPLEVEL_CMAKELISTS)
-  set(DP3_VERSION_REGEX ".*set\\(DP3_VERSION ([0-9]+)\\.([0-9]+)\\.([0-9]+).*\\)")
-  if ("${TOPLEVEL_CMAKELISTS}" MATCHES ${DP3_VERSION_REGEX})
+  file(READ "../CMakeLists.txt" TOPLEVEL_CMAKELISTS)
+  set(DP3_VERSION_REGEX
+      ".*set\\(DP3_VERSION ([0-9]+)\\.([0-9]+)\\.([0-9]+).*\\)")
+  if("${TOPLEVEL_CMAKELISTS}" MATCHES ${DP3_VERSION_REGEX})
     set(DP3_VERSION_MAJOR "${CMAKE_MATCH_1}")
     set(DP3_VERSION_MINOR "${CMAKE_MATCH_2}")
     set(DP3_VERSION_PATCH "${CMAKE_MATCH_3}")
   else()
-    message(FATAL_ERROR "Failed to parse DP3_VERSION from top level CMakeLists.txt")
+    message(
+      FATAL_ERROR "Failed to parse DP3_VERSION from top level CMakeLists.txt")
   endif()
 endif()
-  
 
 if(DOXYGEN_FOUND)
-  configure_file(${CMAKE_CURRENT_LIST_DIR}/doxygen/Doxyfile.in ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile @ONLY)
-  add_custom_target(doc
+  configure_file(${CMAKE_CURRENT_LIST_DIR}/doxygen/Doxyfile.in
+                 ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile @ONLY)
+  add_custom_target(
+    doc
     ${DOXYGEN_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile
     WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
-    COMMENT "Generating API documentation with Doxygen" VERBATIM)
+    COMMENT "Generating API documentation with Doxygen"
+    VERBATIM)
 endif(DOXYGEN_FOUND)
 
 find_package(Sphinx)
-find_program(JSONSCHEMA2RST_EXECUTABLE
-             NAMES jsonschema2rst
-             DOC "Path to jsonschema2rst executable")
+find_program(
+  JSONSCHEMA2RST_EXECUTABLE
+  NAMES jsonschema2rst
+  DOC "Path to jsonschema2rst executable")
 
 if(SPHINX_FOUND AND JSONSCHEMA2RST_EXECUTABLE)
   set(SPHINX_SOURCE ${CMAKE_CURRENT_BINARY_DIR})
   set(SPHINX_BUILD ${CMAKE_CURRENT_BINARY_DIR}/docs/)
-  configure_file(${CMAKE_CURRENT_LIST_DIR}/index.rst ${CMAKE_CURRENT_BINARY_DIR} COPYONLY)
-  configure_file(${CMAKE_CURRENT_LIST_DIR}/conf.py ${CMAKE_CURRENT_BINARY_DIR} COPYONLY)
-  add_custom_target(userdoc
-                    COMMAND
-                    ${SPHINX_EXECUTABLE} -b html
-                    ${SPHINX_SOURCE} ${SPHINX_BUILD}
-                    WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
-                    COMMENT "Generating user documentation with Sphinx"
-                    )
+  configure_file(${CMAKE_CURRENT_LIST_DIR}/index.rst
+                 ${CMAKE_CURRENT_BINARY_DIR} COPYONLY)
+  configure_file(${CMAKE_CURRENT_LIST_DIR}/conf.py ${CMAKE_CURRENT_BINARY_DIR}
+                 COPYONLY)
+  add_custom_target(
+    userdoc
+    COMMAND ${SPHINX_EXECUTABLE} -b html ${SPHINX_SOURCE} ${SPHINX_BUILD}
+    WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+    COMMENT "Generating user documentation with Sphinx")
 else()
-  message("Sphinx (sphinx-build) and jsonschema2rst are necessary for building user docs")
+  message(
+    "Sphinx (sphinx-build) and jsonschema2rst are necessary for building user docs"
+  )
 endif(SPHINX_FOUND AND JSONSCHEMA2RST_EXECUTABLE)
diff --git a/scripts/format.sh b/scripts/format.sh
new file mode 100644
index 0000000000000000000000000000000000000000..5c0d8261a9dc56936931f97f225ebea8fca48a85
--- /dev/null
+++ b/scripts/format.sh
@@ -0,0 +1,78 @@
+# Copyright (C) 2021 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# format.sh: Formats source code in a repository in accordance with
+# .clang-format and .cmake-format.py files.
+#
+# This script uses the following variables:
+# - SOURCE_DIR: The directory that contains the source files.
+# - EXCLUDE_DIRS: (Optional) directories that must be excluded from formatting.
+#                 These paths are relative to SOURCE_DIR.
+# - CXX_SOURCES: Patterns of the C++ files, which clang-format should format.
+# - CMAKE_SOURCES: Patterns of the CMake files, which cmake-format should format.
+#
+# A repository that uses format.sh should define its own run-format.sh script
+# that defines these variables and then sources this script.
+# If you want to automatically check formatting in each commit, include the line
+# "./scripts/run-format.sh" to .git/hooks/pre-commit
+# and make sure pre-commit is an executable shell script.
+
+# Disable globbing
+set -e -f
+
+# Check arguments
+if [ -z "$SOURCE_DIR" -o -z "$CXX_SOURCES" -o -z "$CMAKE_SOURCES" ]; then
+  echo "Please define SOURCE_DIR, CXX_SOURCES and CMAKE_SOURCES when using $BASH_SOURCE"
+  exit 1
+fi
+
+# Detect run environment.
+if [ -n "$CI" ]; then
+  DRYRUN=" (dry run on CI)"
+elif [ -n "$GIT_AUTHOR_DATE" ]; then
+  DRYRUN=" (dry run in git hook)"
+fi
+
+# print in bold-face
+echo -e "\e[1mRunning formatters$DRYRUN...\e[0m"
+
+# Convert SOURCES into "-name ext1 -o -name ext2 -o name ext3 ..."
+CXX_FIND_NAMES="-name ${CXX_SOURCES[0]}"
+for i in `seq 1 $((${#CXX_SOURCES[*]} - 1))`; do
+  CXX_FIND_NAMES+=" -o -name ${CXX_SOURCES[$i]}"
+done
+
+CMAKE_FIND_NAMES="-name ${CMAKE_SOURCES[0]}"
+for i in `seq 1 $((${#CMAKE_SOURCES[*]} - 1))`; do
+  CMAKE_FIND_NAMES+=" -o -name ${CMAKE_SOURCES[$i]}"
+done
+
+# Convert EXCLUDE_DIRS into "-path ./dir1 -prune -o -path ./dir2 -prune -o ..."
+FIND_EXCLUDES=
+for e in ${EXCLUDE_DIRS[*]}; do
+  FIND_EXCLUDES+="-path ./$e -prune -o "
+done
+
+cd $SOURCE_DIR
+CXX_FILES=$(find . $FIND_EXCLUDES -type f \( $CXX_FIND_NAMES \) -print)
+CMAKE_FILES=$(find . $FIND_EXCLUDES -type f \( $CMAKE_FIND_NAMES \) -print)
+
+if [ -n "$DRYRUN" ]; then
+  # If the clang-format xml has no replacement entries, all files are formatted.
+  if !(clang-format -style=file --output-replacements-xml $CXX_FILES |
+       grep -q "<replacement ") && cmake-format --check $CMAKE_FILES; then
+    # print in bold-face green
+    echo -e "\e[1m\e[32mGreat job, all files are properly formatted!\e[0m"
+    exit 0;
+  else
+    # Print in bold-face red
+    echo -e "\e[1m\e[31mAt least one file is not properly formatted!\e[0m"
+    echo -e "\e[1m\e[31mRun $0 for formatting all files!\e[0m"
+    exit 1;
+  fi
+else
+  clang-format -i -style=file $CXX_FILES
+  cmake-format -i $CMAKE_FILES
+  # print in bold-face
+  echo -e "\e[1mSuccessfully formatted all files.\e[0m"
+fi
diff --git a/scripts/run-clang-format.sh b/scripts/run-clang-format.sh
deleted file mode 100755
index 7a939c468a88a08ad5a92bf8eadc31cb80349a2a..0000000000000000000000000000000000000000
--- a/scripts/run-clang-format.sh
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/bin/bash
-
-# Copyright (C) 2020 ASTRON (Netherlands Institute for Radio Astronomy)
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# run-clang-format.sh: Formats source code in this repo in accordance with .clang-format file.
-#
-# To hook this script to pre-commit include the line
-# "./scripts/run-clang-format.sh" to .git/hooks/pre-commit
-# and make sure pre-commit is an executable shell script.
-
-# Disable globbing
-set -e -f
-
-#Script configuration for this repo. Adjust it when copying to a different repo.
-
-#The directory that contains the source files, which clang-format should format.
-SOURCE_DIR=$(dirname "$0")/..
-
-#Directories that must be excluded from formatting. These paths are
-#relative to SOURCE_DIR.
-EXCLUDE_DIRS=(external build)
-
-#The extensions of the source files, which clang-format should format.
-SOURCE_EXT=(*.cc *.h)
-
-#End script configuration.
-
-# Detect run environment.
-if [ -n "$CI" ]; then
-  DRYRUN=" (dry run on CI)"
-elif [ -n "$GIT_AUTHOR_DATE" ]; then
-  DRYRUN=" (dry run in git hook)"
-fi
-
-# print in bold-face
-echo -e "\e[1mRunning clang-format$DRYRUN...\e[0m"
-
-# Convert SOURCE_EXT into "-name ext1 -o -name ext2 -o name ext3 ..."
-FIND_NAMES="-name ${SOURCE_EXT[0]}"
-for i in `seq 1 $((${#SOURCE_EXT[*]} - 1))`; do
-  FIND_NAMES+=" -o -name ${SOURCE_EXT[$i]}"
-done
-
-# Convert EXCLUDE_DIRS into "-path ./dir1 -prune -o -path ./dir2 -prune -o ..."
-FIND_EXCLUDES=
-for e in ${EXCLUDE_DIRS[*]}; do
-  FIND_EXCLUDES+="-path ./$e -prune -o "
-done
-
-cd $SOURCE_DIR
-FILES=$(find . $FIND_EXCLUDES -type f \( $FIND_NAMES \) -print)
-
-if [ -n "$DRYRUN" ]; then
-  # If the xml has no replacement entries, all files are formatted.
-  if clang-format -style=file --output-replacements-xml $FILES |
-     grep -q "<replacement "; then
-    # Print in bold-face red
-    echo -e "\e[1m\e[31mAt least one file is not properly formatted!\e[0m"
-    echo -e "\e[1m\e[31mRun $0 for formatting all files!\e[0m"
-    exit 1;
-  else
-    # print in bold-face green
-    echo -e "\e[1m\e[32mGreat job, all files are properly formatted!\e[0m"
-    exit 0;
-  fi
-else
-  clang-format -i -style=file $FILES
-  # print in bold-face
-  echo -e "\e[1mSuccessfully formatted all files.\e[0m"
-fi
diff --git a/scripts/run-format.sh b/scripts/run-format.sh
new file mode 100755
index 0000000000000000000000000000000000000000..f851bdf4b0172be062ae5b1d23f98c394ef690a0
--- /dev/null
+++ b/scripts/run-format.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+
+# Copyright (C) 2021 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+#Script configuration for this repo. Adjust it when copying to a different repo.
+
+#The directory that contains the source files.
+SOURCE_DIR=$(dirname "$0")/..
+
+#Directories that must be excluded from formatting. These paths are
+#relative to SOURCE_DIR.
+EXCLUDE_DIRS=(external build CMake)
+
+#The patterns of the C++ source files, which clang-format should format.
+CXX_SOURCES=(*.cc *.h)
+
+#The patterns of the CMake source files, which cmake-format should format.
+CMAKE_SOURCES=(CMakeLists.txt *.cmake)
+
+#End script configuration.
+
+#The common formatting script has further documentation.
+source $(dirname "$0")/format.sh
diff --git a/steps/test/integration/CMakeLists.txt b/steps/test/integration/CMakeLists.txt
index 397277dbd0d1d87ee3fc3ffbded2c72cbaa51d3f..6aff263f202ca4779f404efd8daef02a94449c9a 100644
--- a/steps/test/integration/CMakeLists.txt
+++ b/steps/test/integration/CMakeLists.txt
@@ -5,25 +5,26 @@ configure_file(testInit.sh.in testInit.sh)
 
 # The 'source' symbolic link simplifies running the tests manually inside
 # ${CMAKE_CURRENT_BINARY_DIR}: It allows using 'source/tApplyBeam.sh' instead of
-# '../../../../DP3/steps/test/integration/tApplyBeam.sh.
-# (Using 'RESULT', fatal errors won't occur on systems without symlink support.)
+# '../../../../DP3/steps/test/integration/tApplyBeam.sh. (Using 'RESULT', fatal
+# errors won't occur on systems without symlink support.)
 if(${CMAKE_VERSION} VERSION_GREATER_EQUAL "3.14")
-  file(CREATE_LINK ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}/source
-       RESULT DUMMY_RESULT SYMBOLIC)
+  file(
+    CREATE_LINK ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}/source
+    RESULT DUMMY_RESULT
+    SYMBOLIC)
 endif()
 
 set(INTEGRATION_TESTS
-  tApplyBeam
-  tApplyCal2
-  tBdaPredict
-  tDemix
-  tGainCal
-  tGainCalH5Parm
-  tMultiApplyCal
-  tPredict
-  tReadOnly
-  tSplit
-)
+    tApplyBeam
+    tApplyCal2
+    tBdaPredict
+    tDemix
+    tGainCal
+    tGainCalH5Parm
+    tMultiApplyCal
+    tPredict
+    tReadOnly
+    tSplit)
 
 foreach(TEST ${INTEGRATION_TESTS})
   # Use ${CMAKE_CURRENT_SOURCE_DIR} instead of 'source' since not all systems