diff --git a/.gitattributes b/.gitattributes index ae8a01b027fe6bf4213a7a682fae03b5e2f2a89b..e2359c94fd9401685285fa0a4e2a57bba60f6f64 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1614,8 +1614,8 @@ CEP/Pipeline/recipes/sip/nodes/selfcal_finalize.py eol=lf CEP/Pipeline/recipes/sip/nodes/setupparmdb.py eol=lf CEP/Pipeline/recipes/sip/nodes/setupsourcedb.py eol=lf CEP/Pipeline/recipes/sip/nodes/vdsmaker.py eol=lf +CEP/Pipeline/recipes/sip/pipeline.cfg.CEP4.docker-template -text CEP/Pipeline/recipes/sip/pipeline.cfg.in eol=lf -CEP/Pipeline/recipes/sip/pipeline.cfg.thead01.cep4 -text CEP/Pipeline/recipes/sip/plugins/PipelineStep_addMapfile.py -text CEP/Pipeline/recipes/sip/plugins/PipelineStep_changeMapfile.py -text CEP/Pipeline/recipes/sip/plugins/PipelineStep_combineParsets.py -text diff --git a/CEP/Pipeline/recipes/sip/CMakeLists.txt b/CEP/Pipeline/recipes/sip/CMakeLists.txt index f3654d95a5bbb549086695a73255a390a4b1bb75..dedd7ed49221b20be5ebed706a32eb521908ca4e 100644 --- a/CEP/Pipeline/recipes/sip/CMakeLists.txt +++ b/CEP/Pipeline/recipes/sip/CMakeLists.txt @@ -113,6 +113,7 @@ install(FILES install(FILES ${CMAKE_CURRENT_BINARY_DIR}/pipeline.cfg + ${CMAKE_CURRENT_BINARY_DIR}/pipeline.cfg.CEP4 ${CMAKE_CURRENT_BINARY_DIR}/tasks.cfg DESTINATION share/pipeline) @@ -127,6 +128,29 @@ configure_file( ${CMAKE_CURRENT_SOURCE_DIR}/pipeline.cfg.in ${CMAKE_CURRENT_BINARY_DIR}/pipeline.cfg) +# Convert configuration files through docker-template +foreach(_file ${CMAKE_CURRENT_SOURCE_DIR}/pipeline.cfg.CEP4) + # _src -> _dst + set(_src ${CMAKE_CURRENT_SOURCE_DIR}/${_file}.in_docker-template) + set(_dst ${CMAKE_CURRENT_BINARY_DIR}/${_file}) + + # add generating command, and (any) target to force the generation + # when "all" is build. + add_custom_command( + OUTPUT ${_dst} + COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/docker-template < ${_src} > ${_dst} + DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/docker-template ${_src} ${CMAKE_CURRENT_BINARY_DIR}/versiondocker + ) + add_custom_target(${_file}_target ALL DEPENDS ${_dst}) + + # install resulting file + install(FILES + ${_dst} + DESTINATION share/pipeline + RENAME Dockerfile + ) +endforeach() + configure_file( ${CMAKE_CURRENT_SOURCE_DIR}/tasks.cfg.in ${CMAKE_CURRENT_BINARY_DIR}/tasks.cfg) diff --git a/CEP/Pipeline/recipes/sip/bin/runPipeline.sh b/CEP/Pipeline/recipes/sip/bin/runPipeline.sh index 5c5de91a59eb429552ceacac4823380a0afda465..2fdfe4fabdccfcb19e3a54090e5829f488f70e09 100755 --- a/CEP/Pipeline/recipes/sip/bin/runPipeline.sh +++ b/CEP/Pipeline/recipes/sip/bin/runPipeline.sh @@ -15,9 +15,10 @@ # runPipeline.sh <obsid> || pipelineAborted.sh <obsid> OBSID=$1 +shift if [ -z "$OBSID" ]; then - echo "Usage: $0 <obsid>" + echo "Usage: $0 <obsid> <pipeline parameters>" exit 1 fi @@ -35,10 +36,7 @@ getParset.py -o $OBSID >$PARSET PROGRAM_NAME=$(getparsetvalue $PARSET "ObsSW.Observation.ObservationControl.PythonControl.programName") # Run pipeline -OPTIONS=" \ - -d \ - -c ${LOFARROOT}/share/pipeline/pipeline.cfg \ - -t ${LOFARROOT}/share/pipeline/tasks.cfg" +OPTIONS=" -d $@" # Set up the environment (information to propagate to the node scripts for monitoring and logging) export LOFAR_OBSID="$OBSID" diff --git a/CEP/Pipeline/recipes/sip/pipeline.cfg.thead01.cep4 b/CEP/Pipeline/recipes/sip/pipeline.cfg.CEP4.docker-template similarity index 85% rename from CEP/Pipeline/recipes/sip/pipeline.cfg.thead01.cep4 rename to CEP/Pipeline/recipes/sip/pipeline.cfg.CEP4.docker-template index 62ad298207d88a02f6ed3c1c416a505127967774..552bfb33abf8fb37baeaa9dac6f26ca8ed2391f9 100644 --- a/CEP/Pipeline/recipes/sip/pipeline.cfg.thead01.cep4 +++ b/CEP/Pipeline/recipes/sip/pipeline.cfg.CEP4.docker-template @@ -22,10 +22,10 @@ hdf5root = wcsroot = /opt/wcslib pythonpath = /opt/lofar/lib/python2.7/site-packages # runtime dir is a global FS (nfs, lustre) to exchange small files (parsets, vds, map files, etc) -runtime_directory = /shared/mol/regression_test/rundir +runtime_directory = /data/share/pipeline recipe_directories = [%(pythonpath)s/lofarpipe/recipes] # working dir is the local dir in which input/output dataproducts reside -working_directory = /globalhome/mol/regression_test/working_dir +working_directory = /data/scratch task_files = [%(lofarroot)s/share/pipeline/tasks.cfg] [layout] @@ -50,11 +50,9 @@ xml_stat_file = %(runtime_directory)s/%(job_name)s/logs/%(start_time)s/statistic method = none [docker] -image = lofar-patched +image = lofar-pipeline:${LOFAR_TAG} [remote] -#method = slurm_srun_cep3 -#method = ssh_docker method = custom_cmdline max_per_node = 1 @@ -85,5 +83,4 @@ max_per_node = 1 # /bin/bash -c # # Required because the pipeline framework needs some bash functionality in the commands it starts. -#cmdline = ssh -n -tt -x localhost srun -w {host} -N 1 -n 1 --jobid={slurm_job_id} docker run -t --rm -e LUSER={uid} -w g -v /home/mol/.ssh:/home/lofar/.ssh:ro -v /globalhome/mol/regression_test:/globalhome/mol/regression_test -v /shared:/shared --net=host {docker_image} /bin/bash -c -cmdline = ssh -n -tt -x localhost srun -w {host} -N 1 -n 1 --jobid={slurm_job_id} docker run -t --rm -e LUSER={uid} -v %(runtime_directory)s:%(runtime_directory)s -v %(working_directory)s:%(working_directory)s --net=host {docker_image} /bin/bash -c "\"{command}\"" +cmdline = ssh -n -tt -x localhost srun -w {host} -N 1 -n 1 --jobid={slurm_job_id} docker run --rm -e LUSER={uid} -v %(runtime_directory)s:%(runtime_directory)s -v working_directory)s:%(working_directory)s -v /data:/data --net=host {docker_image} /bin/bash -c "\"{command}\"" diff --git a/MAC/Services/src/PipelineControl.py b/MAC/Services/src/PipelineControl.py index f40ca7d0613370a0eb43785b696b14eaf652bb4d..b8afe302d2ac73f1e34af06f130d218a76087237 100755 --- a/MAC/Services/src/PipelineControl.py +++ b/MAC/Services/src/PipelineControl.py @@ -351,10 +351,11 @@ class PipelineControl(OTDBBusListener): " -e LUSER=$UID" " -v $HOME/.ssh:/home/lofar/.ssh:ro" " -e SLURM_JOB_ID=$SLURM_JOB_ID" - " runPipeline.sh {obsid}" + " runPipeline.sh {obsid} --config /opt/lofar/share/pipeline/pipeline.cfg.{cluster}" .format( obsid = treeId, tag = parset.dockerTag(), + cluster = parset.processingCluster() ), sbatch_params=sbatch_params