diff --git a/.deploy.gitlab-ci.yml b/.deploy.gitlab-ci.yml
new file mode 100644
index 0000000000000000000000000000000000000000..e8416def00fb570543e6833aadadba8985f84ccf
--- /dev/null
+++ b/.deploy.gitlab-ci.yml
@@ -0,0 +1,55 @@
+variables:
+  STATION: ""
+  COMPONENTS: ""
+
+stages:
+  - deploy
+
+deploy_nomad:
+  stage: deploy
+  needs:
+    - pipeline: $PARENT_PIPELINE_ID
+      job: render_levant
+  image:
+    name: hashicorp/nomad
+    entrypoint: [ "" ]
+  environment:
+    name: $STATION
+  script:
+    - |
+      if [ "${STATION}" == "dts-lab" ]; then
+          # dts-lab test station
+          HOSTNAME="dts-lab.lofar.net"
+      else
+          # core/remote station
+          HOSTNAME="${STATION}c.control.lofar"
+      fi
+
+      for COMPONENT in ${COMPONENTS}; do
+          echo "Running station ${STATION} component ${COMPONENT}"
+          nomad job run -address="http://${HOSTNAME}:4646" jobs/${STATION}/${COMPONENT}.nomad
+      done
+
+deploy_CDB:
+  stage: deploy
+  image:
+    name: hashicorp/nomad
+    entrypoint: [ "" ]
+  script:
+    - |
+      CDB_JSON_URL="s3::https://s3.lofar.net:9000/central-cdb/${CI_COMMIT_REF_SLUG}/${STATION}.json"
+
+      echo "Artifact URL: ${CDB_JSON_URL}"
+
+      if [ "${STATION}" == "dts-lab" ]; then
+          # dts-lab test station
+          HOSTNAME="dts-lab.lofar.net"
+      else
+          # core/remote station
+          HOSTNAME="${STATION}c.control.lofar"
+      fi
+
+      nomad job dispatch -address="http://${HOSTNAME}:4646" \
+        -id-prefix-template gitlab-deploy-cdb \
+        -meta cdb_json_url="${CDB_JSON_URL}" \
+        dsconfig
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index c4e5e18a5148df8ba77ee8df6843a739cfef2c00..e2176cfed59fae5f751239ae89910d4ec04c5c0f 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -24,6 +24,9 @@ variables:
   GIT_SUBMODULE_STRATEGY: recursive
   PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
   PIP_REQUIREMENTS_FILE: "requirements.txt"
+  # Trigger & fast-track deployment for this station
+  # using a multi-project pipeline
+  STATION: ""
 
 workflow:
   rules:
@@ -36,22 +39,43 @@ include:
   - template: Security/Dependency-Scanning.gitlab-ci.yml
   - template: Security/Secret-Detection.gitlab-ci.yml
 
+# Local jobs are triggered by commits instead of
+# external triggers.
+.local_job:
+  needs:
+    - trigger_prepare
+  rules:
+    # The rules here do not accept jobs,
+    # only disregard them. As such, additional
+    # rules are always needed, either to filter
+    # further or to always accept.
+    - if: $CI_PIPELINE_SOURCE == "pipeline"
+      when: never
+
 # Prepare image to run ci on
 trigger_prepare:
   stage: prepare
+  rules:
+    - if: $CI
   trigger:
     strategy: depend
     include: .prepare.gitlab-ci.yml
 
 run_lint:
   stage: lint
+  extends: .local_job
+  rules:
+    - !reference [.local_job, rules]
+    - if: $CI
   script:
     - tox -e lint
 
 run_shellcheck:
   stage: lint
-  needs:
-    - trigger_prepare
+  extends: .local_job
+  rules:
+    - !reference [.local_job, rules]
+    - if: $CI
   script:
     - shellcheck --version
     - shellcheck **/*.sh
@@ -67,10 +91,13 @@ run_shellcheck:
 # Run manually to bootstrap CDB/stations/$station.json for stations
 # converted from LOFAR1 to LOFAR2.
 render_CDB_stations_from_lofar1:
-  extends: .components
   stage: render
-  needs:
-    - trigger_prepare
+  extends:
+    - .components
+    - .local_job
+  rules:
+    - !reference [.local_job, rules]
+    - if: $CI
   when: manual
   script:
     - |
@@ -91,10 +118,13 @@ render_CDB_stations_from_lofar1:
 
 # Generate the ILT configurations for each station.
 render_CDB_environment_ilt:
-  extends: .components
   stage: render
-  needs:
-    - trigger_prepare
+  extends:
+    - .components
+    - .local_job
+  rules:
+    - !reference [.local_job, rules]
+    - if: $CI
   when: manual
   before_script:
     - sudo apt install -y jq
@@ -156,7 +186,7 @@ render_CDB:
     expose_as: 'deployment cdb configs'
     name: deploy-cdb
     paths:
-      - generated/CDB/deploy/
+      - generated/
 
 # Collect services from this repo, to deploy as Nomad jobs on a station
 render_levant:
@@ -263,10 +293,13 @@ secret_detection:
 
 # Run all unit tests for Python versions except the base image
 run_unit_tests:
-  extends: .run_unit_test_version_base
-  needs:
-   - trigger_prepare
   stage: test
+  extends:
+    - .run_unit_test_version_base
+    - .local_job
+  rules:
+    - !reference [.local_job, rules]
+    - if: $CI
   allow_failure: true
   image: python:3.${PY_VERSION}
   script:
@@ -276,10 +309,13 @@ run_unit_tests:
       - PY_VERSION: [10, 11]
 
 run_unit_tests_coverage:
-  extends: .run_unit_test_version_base
-  needs:
-   - trigger_prepare
   stage: test
+  extends:
+    - .run_unit_test_version_base
+    - .local_job
+  rules:
+    - !reference [.local_job, rules]
+    - if: $CI
   script:
     - tox -e coverage
   coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/'
@@ -296,8 +332,10 @@ run_unit_tests_coverage:
 
 package_files:
   stage: package
-  needs:
-  - trigger_prepare
+  extends: .local_job
+  rules:
+    - !reference [.local_job, rules]
+    - if: $CI
 
   artifacts:
     expire_in: 1w
@@ -309,6 +347,10 @@ package_files:
 
 sphinx_documentation:
   stage: package
+  extends: .local_job
+  rules:
+    - !reference [.local_job, rules]
+    - if: $CI
   artifacts:
     expire_in: 1w
     paths:
@@ -356,14 +398,18 @@ sphinx_documentation:
 # Download all remote images and store them on our image registry for tagged
 # master builds
 docker_store_images_master_tag:
-  extends: .base_docker_store_images
+  extends:
+    - .local_job
+    - .base_docker_store_images
   rules:
+    - !reference [.local_job, rules]
     - if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH) || $CI_COMMIT_TAG
 
 # Download all remote images and store them on our image registry if .env changes
 # on a merge request
 docker_store_images_changes:
-  extends: .base_docker_store_images
+  extends:
+    - .base_docker_store_images
   # This will spawn as detached pipeline but atleast ensures the changes rule
   # works. See the following references
   #  https://stackoverflow.com/questions/68955071/how-to-disable-detached-pipelines-in-gitlab
@@ -376,7 +422,9 @@ docker_store_images_changes:
 
 # Build and push custom images on merge request if relevant files changed
 docker_build:
-  extends: .base_docker_images
+  extends:
+    - .local_job
+    - .base_docker_store_images
   parallel:
     matrix:
       - IMAGE:
@@ -387,6 +435,7 @@ docker_build:
           - snmp-exporter
           - landing-page
   rules:
+    - !reference [.local_job, rules]
     - if: $CI_PIPELINE_SOURCE == "merge_request_event"
       changes:
         - docker/$IMAGE/**/*
@@ -400,24 +449,29 @@ docker_build:
       - docker/snmp-exporter/snmp.yml
 
 docker_build_device_base:
-  extends: .base_docker_images
+  extends:
+    - .local_job
+    - .base_docker_store_images
   rules:
+    - !reference [.local_job, rules]
     - if: $CI_PIPELINE_SOURCE == "merge_request_event"
     - if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH) || $CI_COMMIT_TAG
   script:
     #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh lofar-device-base $tag
 
-
-
 .run_integration_tests:
   stage: integration
-  image: docker:latest
+  extends: .local_job
+  rules:
+    - !reference [.local_job, rules]
+    - if: $CI
   needs:
     - run_unit_tests
     - docker_build_device_base
   tags:
     - integration_tests
+  image: docker:latest
   variables:
     JUMPPAD_HOME: $CI_PROJECT_DIR
   before_script:
@@ -454,19 +508,34 @@ docker_build_device_base:
       - .jumppad/logs/
 
 run_integration_test_core:
-  extends: .run_integration_tests
+  extends:
+    - .run_integration_tests
+    - .local_job
+  rules:
+    - !reference [.local_job, rules]
+    - if: $CI
   script:
     #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash -e $CI_PROJECT_DIR/sbin/run_integration_test.sh --no-build --save-logs --module="tango" --station=cs
 
 run_integration_test_remote:
-  extends: .run_integration_tests
+  extends:
+    - .run_integration_tests
+    - .local_job
+  rules:
+    - !reference [.local_job, rules]
+    - if: $CI
   script:
     #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash -e $CI_PROJECT_DIR/sbin/run_integration_test.sh --no-build --save-logs --module="tango" --station=rs
 
 run_service_test_docker:
-  extends: .run_integration_tests
+  extends:
+    - .run_integration_tests
+    - .local_job
+  rules:
+    - !reference [.local_job, rules]
+    - if: $CI
   script:
     # Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash -e $CI_PROJECT_DIR/sbin/run_integration_test.sh --no-build --skip-tests --module="services" --station=cs
@@ -474,6 +543,10 @@ run_service_test_docker:
 run_multi_project_integration_test:
   allow_failure: true
   stage: integration
+  extends: .local_job
+  rules:
+    - !reference [.local_job, rules]
+    - if: $CI
   needs:
     - docker_build_device_base
   variables:
@@ -491,12 +564,14 @@ run_multi_project_integration_test:
 publish_on_gitlab:
   stage: publish
   environment: gitlab
-  needs:
-    - package_files
+  extends: .local_job
   rules:
+    - !reference [.local_job, rules]
     - if: '$CI_COMMIT_TAG && $CI_COMMIT_REF_PROTECTED == "true"'
     - if: $CI_COMMIT_TAG
       when: manual
+  needs:
+    - package_files
   variables:
     TWINE_PASSWORD: ${CI_JOB_TOKEN}
     TWINE_USERNAME: gitlab-ci-token
@@ -507,12 +582,39 @@ publish_on_gitlab:
     - pip install twine --upgrade
   script:
     - echo "run twine for gitlab"
-    - python3 -m twine upload --repository-url ${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/pypi dist/*
+    - python3 -m twine upload --repository-url ${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/pypi dist/*  
+
+upload_CDB_minio:
+  stage: publish
+  extends: .local_job
+  image:
+      name: minio/mc
+      entrypoint: [ "" ]
+  rules:
+    - !reference [.local_job, rules]
+    - if: $CI
+    #- if: $CI_COMMIT_TAG
+  needs:
+    - render_CDB
+    - test_CDB_correctness
+  variables:
+    MINIO_USER: minioadmin
+    MINIO_PASSWORD: minioadmin
+  # override default before_script, job won't have Python available
+  before_script:
+    - mc --version
+  script:
+    - |
+      mc alias set object-storage https://s3.lofar.net:9000 ${MINIO_USER} ${MINIO_PASSWORD}
+      mc mb -p object-storage/central-cdb/${CI_COMMIT_REF_SLUG}/
+      mc cp --recursive generated/CDB/deploy/* object-storage/central-cdb/${CI_COMMIT_REF_SLUG}/
 
 release_job:
   stage: publish
   image: registry.gitlab.com/gitlab-org/release-cli:latest
+  extends: .local_job
   rules:
+    - !reference [.local_job, rules]
     - if: '$CI_COMMIT_TAG && $CI_COMMIT_REF_PROTECTED == "true"'
   before_script:
     - echo "running release_job before_script"
@@ -522,18 +624,26 @@ release_job:
     tag_name: '$CI_COMMIT_TAG'
     description: '$CI_COMMIT_TAG - $CI_COMMIT_TAG_MESSAGE'
 
-deploy_nomad:
-  extends: .components
+deploy_manual:
   stage: deploy
+  extends:
+    - .components
+    - .local_job
   rules:
+    - !reference [.local_job, rules]
     - if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH) || $CI_COMMIT_TAG
   when: manual
   needs:
     - docker_build
     - docker_build_device_base
-    - render_levant
-  dependencies:
-    - render_levant
+    - upload_CDB_minio
+  variables:
+    PARENT_PIPELINE_ID: $CI_PIPELINE_ID
+  trigger:
+    strategy: depend
+    include: .deploy.gitlab-ci.yml
+    forward:
+      pipeline_variables: true
   parallel:
     matrix:
       - STATION:
@@ -541,25 +651,17 @@ deploy_nomad:
           - cs032
           - rs307
           - dts-lab
-  image:
-    name: hashicorp/nomad
-    entrypoint: [ "" ]
-  environment:
-    name: $STATION
-  # override default before_script, job won't have Python available
-  before_script:
-    - uname
-  script:
-    - |
-      if [ "${STATION}" == "dts-lab" ]; then
-          # dts-lab test station
-          HOSTNAME="dts-lab.lofar.net"
-      else
-          # core/remote station
-          HOSTNAME="monitor.control.lofar"
-      fi
 
-      for COMPONENT in ${COMPONENTS}; do
-          echo "Running station ${STATION} component ${COMPONENT}"
-          nomad job run -address="http://${HOSTNAME}:4646" jobs/${STATION}/${COMPONENT}.nomad
-      done
+# Deploy on $STATION as set by a multi-project pipeline
+deploy_auto:
+  stage: deploy
+  extends: .components
+  rules:
+    - if: $CI_PIPELINE_SOURCE == "pipeline"
+  variables:
+    PARENT_PIPELINE_ID: $CI_PIPELINE_ID
+  trigger:
+    strategy: depend
+    include: .deploy.gitlab-ci.yml
+    forward:
+      pipeline_variables: true
diff --git a/README.md b/README.md
index dde3748e151fbb81eb2c572fac57482276a10916..db0315e908d37d4a07e73d713c2470077f17e7fb 100644
--- a/README.md
+++ b/README.md
@@ -150,6 +150,7 @@ Next change the version in the following places:
    through [https://git.astron.nl/lofar2.0/tango/-/tags](Deploy Tags)
 
 # Release Notes
+* 0.51.2 Allow automated deployment from a multi-project pipeline in Gitlab CI/CD
 * 0.51.1 Generate caltables from LOFAR1 data or by generating dummies
 * 0.51.0 Add Calibration service to gRPC server
 * 0.50.1 Generate and combine CDB files for all stations in Gitlab CI/CD
diff --git a/infra/jobs/station/dsconfig.levant.nomad b/infra/jobs/station/dsconfig.levant.nomad
index 238c94be72cf6b272d2a3ec2f3f50ec5a23eb53f..b29038163a9176f64141027a9b0c298f1837ac61 100644
--- a/infra/jobs/station/dsconfig.levant.nomad
+++ b/infra/jobs/station/dsconfig.levant.nomad
@@ -15,6 +15,20 @@ job "dsconfig" {
       mode = "bridge"
     }
 
+    task "wait-for-db" {
+      lifecycle {
+        hook    = "prestart"
+        sidecar = false
+      }
+      driver = "docker"
+
+      config {
+        image   = "[[ $.registry.astron.url ]]/busybox:latest"
+        command = "sh"
+        args    = ["-c", "while ! nc -z tango.service.consul 10000; do sleep 1; done"]
+      }
+    }
+
     task "dsconfig" {
       driver = "docker"
 
@@ -22,6 +36,11 @@ job "dsconfig" {
         source      = "${NOMAD_META_cdb_json_url}"
         destination = "local/dsconfig-update-settings.json"
         mode        = "file"
+
+        options {
+          aws_access_key_id     = "[[.object_storage.user.name]]"
+          aws_access_key_secret = "[[.object_storage.user.pass]]"
+        }
       }
 
       config {
diff --git a/sbin/prepare_dev_env.sh b/sbin/prepare_dev_env.sh
index 5c9faf617dea9c8b7ddc5857f07b510017b8cd6f..7e1d51cbf8eb6ffe6807f0a8a5b157efcf22f8aa 100755
--- a/sbin/prepare_dev_env.sh
+++ b/sbin/prepare_dev_env.sh
@@ -81,5 +81,6 @@ docker run --rm -i -v "$docker_volume":/mnt bash bash  <<- EOM
   mkdir -p /mnt/volumes/object-storage-data
   mkdir -p /mnt/volumes/jupyter-notebooks
   mkdir -p /mnt/volumes/IERS-data
+  mkdir -p /mnt/volumes/caltables
   chmod 0777 -R /mnt/volumes
 EOM
diff --git a/tangostationcontrol/VERSION b/tangostationcontrol/VERSION
index 967995cd2b53e1f28f76c50020426fd12d428050..969eb252497ceec1593a4b037de294d89fa0996e 100644
--- a/tangostationcontrol/VERSION
+++ b/tangostationcontrol/VERSION
@@ -1 +1 @@
-0.50.2
+0.51.2