diff --git a/.gitattributes b/.gitattributes index c5e65a8f5c8d20d41478dbb416697af85fc126a9..94f5cc69b4dc2fd2a2bee285365a5f572418e8be 100644 --- a/.gitattributes +++ b/.gitattributes @@ -184,6 +184,7 @@ CEP/BB/MWImager/test/tmwimager.in_ms1.vds -text CEP/BB/MWImager/test/tmwimager.in_ms2.vds -text CEP/BB/MWImager/test/tmwimager.in_vd -text CEP/BB/ParmDB/test/tmakesourcedb.in_2 -text +CEP/BB/Pipeline/library/makevds.bash -text CEP/BB/SourceDB/bootstrap -text /CMakeLists.txt -text Firmware/tools/bootstrap -text diff --git a/CEP/BB/Pipeline/library/DPPP.py b/CEP/BB/Pipeline/library/DPPP.py index ff64191162030c920dcbffdf38d7fef76d295a6d..d31e2957a535bd0f302174ebfc1ef2bfe5f0bb6d 100755 --- a/CEP/BB/Pipeline/library/DPPP.py +++ b/CEP/BB/Pipeline/library/DPPP.py @@ -37,12 +37,13 @@ class DPPP(WSRTrecipe): compress UV-data in frequency and time.""" def __init__(self): WSRTrecipe.__init__(self) - self.inputs['parset-file'] = 'dppp.parset' - self.inputs['cluster-name'] = 'lioff' - self.inputs['observation'] = '' - self.inputs['output-dir'] = None - self.inputs['vds-dir'] = None - self.inputs['dryrun'] = False + self.inputs['parset-file'] = 'dppp.parset' + self.inputs['cluster-name'] = 'lioff' + self.inputs['observation'] = '' + self.inputs['output-dir'] = None + self.inputs['vds-dir'] = None + self.inputs['logfile'] = 'dppp.log' + self.inputs['dryrun'] = False self.helptext = """ This function runs the distributed DPPP Usage: DPPP [OPTION...] @@ -57,6 +58,8 @@ class DPPP(WSRTrecipe): (default: '/data/${USER}/<observation>') --vds-dir directory where the VDS-files reside; (default: '/users/${USER}/data/<observation>') + --logfile root name of logfile of each subprocess + (default 'dppp.log') --dryrun do a dry run (default: no) """ @@ -88,7 +91,7 @@ class DPPP(WSRTrecipe): opts += ['-nomasterhost'] opts += ['-dsn', dataset] opts += ['-cdn', clusterdesc] - opts += ['-logfile', 'dppp.log'] + opts += ['-logfile', self.inputs['logfile']] opts += ['-dry' if self.inputs['dryrun'] else '-nodry'] # program started by 'startdistproc' opts += [os.path.join(sysconfig.lofar_root(), @@ -104,7 +107,7 @@ class DPPP(WSRTrecipe): self.print_error('startdistproc failed!') return 1 - # Combine the VDS-files generated by dppp_node.py into one GDS file + # Combine the VDS-files generated by dppp_node.py into one GDS-file. self.print_message('Generating gds file from vds files') opts = [dataset] opts += glob.glob(os.path.join(vds_dir, '*.vds')) diff --git a/CEP/BB/Pipeline/library/MWImager.py b/CEP/BB/Pipeline/library/MWImager.py index 3d4dfc93c1f694569d68d80b90824e12ae63e81c..932031f078c3dd24644dfc4db6a5e87c983bf3a4 100755 --- a/CEP/BB/Pipeline/library/MWImager.py +++ b/CEP/BB/Pipeline/library/MWImager.py @@ -54,10 +54,10 @@ class MWImager(WSRTrecipe): (no default) --output-dir directory where images will be stored (default: same directory as MS) - --logfile root name of logfile of each subprocess - (default 'mwimager.log') --vds-dir directory where the VDS-files reside; (default: '/users/${USER}/data/<observation>') + --logfile root name of logfile of each subprocess + (default 'mwimager.log') --dryrun do a dry run (default: no) """ diff --git a/CEP/BB/Pipeline/library/MakeVDS.py b/CEP/BB/Pipeline/library/MakeVDS.py index 37fe2f4ffa2c1f3cfd366743b1c639c93dbb906e..85d803ef493e0ad8b4a3a636dbee38aa7f6530cb 100755 --- a/CEP/BB/Pipeline/library/MakeVDS.py +++ b/CEP/BB/Pipeline/library/MakeVDS.py @@ -39,22 +39,25 @@ class MakeVDSError(Exception): class MakeVDS(WSRTrecipe): - """Generate a VDS files for all MS-files that belong to the specified - observation. - The argument `cluster-name' is used to locate a cluster description file, - which (among other information) contains a list of mount points to use - when searching for MS-files. Use the argument `directory' if the MS-files - are located in a directory below the mount points. - After the VDS files have been generated a GDS file (which is like a - concatenation of all VDS files) is generated and stored in the current - directory.""" + """ + Generate VDS files for all MS-files that belong to the specified + observation. The argument `cluster-name' is used to locate a cluster + description file, which (among other information) contains a list of mount + points to use when searching for MS-files. Use the argument `input-dir' if + the MS-files are located in a directory below the mount points. The + location where the VDS files will be stored can be specified using the + argument `vds-dir'; `/users/${USER}/data/<observation>' by default. When + the VDS files have been generated, a GDS file (which is like a + concatenation of all VDS files) is generated in the same directory. + """ def __init__(self): WSRTrecipe.__init__(self) - self.inputs['cluster-name'] = 'lioff' - self.inputs['observation'] = '' - self.inputs['directory'] = None - self.inputs['vds-dir'] = None + self.inputs['cluster-name'] = 'lioff' + self.inputs['observation'] = '' + self.inputs['input-dir'] = None + self.inputs['vds-dir'] = None + self.inputs['dryrun'] = False self.helptext = """ This function generates vds files for the MS files that comprise the given observation. @@ -64,12 +67,14 @@ class MakeVDS(WSRTrecipe): (default: 'lioff') --observation name of the observation (e.g. L2007_03463) (no default) - --directory directory, relative to the mount point, where + --input-dir directory, relative to the mount point, where the input data is stored (e.g. /lifs001/pipeline); if None, use directory <mount-point>/<observation> --vds-dir directory for the output VDS-files; if None, use directory '/users/${USER}/data/<observation>' + --dryrun do a dry run + (default: no) """ ## Code to generate results ---------------------------------------- @@ -80,7 +85,7 @@ class MakeVDS(WSRTrecipe): clusterdesc = sysconfig.clusterdesc_file(self.inputs['cluster-name']) ms_files = Observation(self.inputs['cluster-name'], self.inputs['observation'], - self.inputs['directory']).ms_files() + self.inputs['input-dir']).ms_files() vds_dir = self.inputs['vds-dir'] \ if self.inputs['vds-dir'] \ else os.path.join('/users', os.environ['USER'], 'data', @@ -98,22 +103,26 @@ class MakeVDS(WSRTrecipe): if not os.path.exists(vds_dir): os.makedirs(vds_dir) self.print_debug('Created directory ' + vds_dir) - + + # Generate the VDS-files. fail = 0 for (ms, vds) in zip(ms_files, vds_files): self.print_message('Processing file ' + ms) - fail += self.cook_system('makevds', [clusterdesc, ms, vds]) + if not self.inputs['dryrun']: + fail += self.cook_system('makevds', [clusterdesc, ms, vds]) if fail: self.print_error(str(fail) + ' makevds process(es) failed!') return 1 + # Combine the VDS-files into one GDS-file. self.print_message('Generating gds file from vds files') opts = [os.path.join(vds_dir, self.inputs['observation'] + '.gds')] opts.extend(vds_files) - if self.cook_system('combinevds', opts): - self.print_error('combinevds failed!') - return 1 + if not self.inputs['dryrun']: + if self.cook_system('combinevds', opts): + self.print_error('combinevds failed!') + return 1 return 0 diff --git a/CEP/BB/Pipeline/library/Makefile.am b/CEP/BB/Pipeline/library/Makefile.am new file mode 100644 index 0000000000000000000000000000000000000000..a6a4d48d12ccd35a23c27164a29db749e1ba0709 --- /dev/null +++ b/CEP/BB/Pipeline/library/Makefile.am @@ -0,0 +1,14 @@ +# This will install .py files into $(bindir) with their execute bit set. +bin_SCRIPTS = dppp_node.py \ + DPPP.py \ + MakeVDS.py \ + MWImager.py \ + StandardImagingPipeline.py + +# This will install .py, .pyc, and .pyo files into the site-packages directory. +pypkgdir = $(pythondir)/lofar/pipeline +pypkg_PYTHON = __init__.py \ + Observation.py \ + sysconfig.py + +include $(top_srcdir)/Makefile.common diff --git a/CEP/BB/Pipeline/library/StandardImagingPipeline.py b/CEP/BB/Pipeline/library/StandardImagingPipeline.py index c804dcaaed1425fae5810cb781fc4857881dc626..ac446d7b369a20e6c72fa168f47da050a967ef97 100755 --- a/CEP/BB/Pipeline/library/StandardImagingPipeline.py +++ b/CEP/BB/Pipeline/library/StandardImagingPipeline.py @@ -24,12 +24,10 @@ """Script to run the Standard Imaging Pipeline""" -from WSRTrecipe import WSRTrecipe -from ingredient import WSRTingredient -#from dppp import DPPP -#from mwimager import MWImager -from parset import Parset -import sysconfig +from lofar.pipeline.WSRTrecipe import WSRTrecipe +from lofar.pipeline.ingredient import WSRTingredient +from lofar.pipeline.parset import Parset +from lofar.pipeline import sysconfig import os import sys @@ -45,7 +43,7 @@ class StandardImagingPipeline(WSRTrecipe): self.inputs['make-vds-files'] = True self.inputs['input-dir'] = None self.inputs['output-dir'] = None - self.inputs['vds-output-dir'] = None + self.inputs['vds-dir'] = None self.inputs['dryrun'] = False self.helptext = """ This is the recipe for the LOFAR standard imagaging pipeline. @@ -60,14 +58,14 @@ class StandardImagingPipeline(WSRTrecipe): --observation name of the observation to be processed (no default) --make-vds-files create VDS files - (default: no) + (default: yes) --input-dir directory for the input MS-files; only needed when VDS files are missing (optional; no default) --output-dir directory for the output MS-files; only needed when VDS files are missing (optional; default: '/data/${USER}/<obs>') - --vds-output-dir directory for the output VDS-files; + --vds-dir directory for the output VDS-files; only needed until IDPPP creates these files (optional; default: '/users/${USER}/data/<obs>) --dryrun do a dry run @@ -85,17 +83,18 @@ class StandardImagingPipeline(WSRTrecipe): self.inputs['output-dir'] = '/data/' + os.environ['USER'] + \ '/' + obs - if self.inputs['vds-output-dir'] is None: - self.inputs['vds-output-dir'] = '/users/' + os.environ['USER'] + \ - '/data/' + obs + if self.inputs['vds-dir'] is None: + self.inputs['vds-dir'] = '/users/' + os.environ['USER'] + \ + '/data/' + obs # Create VDS files for the MS-files in the observation, if requested. if self.inputs['make-vds-files']: inputs = WSRTingredient() outputs = WSRTingredient() inputs['cluster-name'] = self.inputs['cluster-name'] - inputs['observation'] = self.inputs['observation'] - inputs['directory'] = self.inputs['input-dir'] + inputs['observation'] = self.inputs['observation'] + inputs['input-dir'] = self.inputs['input-dir'] + inputs['dryrun'] = self.inputs['dryrun'] sts = self.cook_recipe('MakeVDS', inputs, outputs) if sts: print "MakeVDS returned with status", sts @@ -106,7 +105,7 @@ class StandardImagingPipeline(WSRTrecipe): inputs['cluster-name'] = self.inputs['cluster-name'] inputs['observation'] = self.inputs['observation'] inputs['output-dir'] = self.inputs['output-dir'] - inputs['vds-output-dir'] = self.inputs['vds-output-dir'] + inputs['vds-dir'] = self.inputs['vds-dir'] inputs['dryrun'] = self.inputs['dryrun'] outputs = WSRTingredient() sts = self.cook_recipe('DPPP', inputs, outputs) @@ -122,6 +121,9 @@ class StandardImagingPipeline(WSRTrecipe): inputs['cluster-name'] = self.inputs['cluster-name'] inputs['observation'] = self.inputs['observation'] inputs['output-dir'] = self.inputs['output-dir'] + inputs['vds-dir'] = self.inputs['vds-dir'] + inputs['dryrun'] = self.inputs['dryrun'] + outputs = WSRTingredient() sts = self.cook_recipe('MWImager', inputs, outputs) if sts: diff --git a/CEP/BB/Pipeline/library/__init__.py b/CEP/BB/Pipeline/library/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e779b78f84595e28f8bc784fddffff1b8c9b513d --- /dev/null +++ b/CEP/BB/Pipeline/library/__init__.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python +# +# __init__.py: module initialization file +# +# Copyright (C) 2002-2008 +# ASTRON (Netherlands Foundation for Research in Astronomy) +# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands, seg@astron.nl +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +# +# $Id$ + +"""Module initialization file""" diff --git a/CEP/BB/Pipeline/library/dppp_node.py b/CEP/BB/Pipeline/library/dppp_node.py index 27252d5aa404b321eafb2053feab539f03a8aead..c28ff239e4ce4fae4243ab9042ab61b2fceb66f7 100755 --- a/CEP/BB/Pipeline/library/dppp_node.py +++ b/CEP/BB/Pipeline/library/dppp_node.py @@ -1,4 +1,28 @@ #!/usr/bin/env python +# +# dppp_node.py: script to run DPPP on a single node +# +# Copyright (C) 2002-2008 +# ASTRON (Netherlands Foundation for Research in Astronomy) +# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands, seg@astron.nl +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +# +# $Id$ + +"""Script to run DPPP on a single node""" import os, sys, logging, socket @@ -64,10 +88,14 @@ if msin == msout: vds = vdsdir + '/' + os.path.basename(msout) + '.vds' logging.info('Output VDS: %s', vds) -# Create working dir if it doesn't exist and change to it. +# Create working dir if it doesn't exist and change to it. Working directory +# `wd' may already have been created by another instance of dppp_node.py. So +# wrap os.makedirs() in a try-except block. if wd: - if not os.path.exists(wd): + try: os.makedirs(wd) + except OSError, e: + logging.info(e) os.chdir(wd) wd = os.getcwd() logging.info('Current working directory: %s', wd) diff --git a/CEP/BB/Pipeline/library/export_image.g b/CEP/BB/Pipeline/library/export_image.g new file mode 100755 index 0000000000000000000000000000000000000000..46e8f7ada089d323d4a15c71b7fb99e2ba790d18 --- /dev/null +++ b/CEP/BB/Pipeline/library/export_image.g @@ -0,0 +1,30 @@ +#!/usr/bin/env glish + +include 'viewer.g' + +dp:=dv.newdisplaypanel(); + +for(i in 3:len(argv)) +{ + path:=split(argv[i],'/'); + imfile:=path[len(path)]; + + print paste(argv[i], "->", spaste(imfile, ".ps")); + + dd:=dv.loaddata(argv[i],'raster'); + dp.register(dd); + + dd.setoptions([titletext=[value=imfile]]); + dd.setoptions([axislabelswitch=[value=T],xgridtype=[value='Full grid'],ygridtype=[value='Full grid']]); + #dd.setoptions([colormode=[value=colormap]]); <-- seems to screw things up, cannot do 'Adjust' in the GUI after this. + dd.setoptions([colormap=[value='Hot Metal 2']]); + dd.setoptions([wedge=[value=T]]); + + man:=dp.canvasprintmanager(); + man.writeps(spaste(imfile, ".ps"), 'a4', F, 300); + + dp.unregister(dd); +} + +dp.done(); +exit; diff --git a/CEP/BB/Pipeline/library/makevds.bash b/CEP/BB/Pipeline/library/makevds.bash new file mode 100755 index 0000000000000000000000000000000000000000..30d2e5809531643fa3c5a1e24eccb984172787d2 --- /dev/null +++ b/CEP/BB/Pipeline/library/makevds.bash @@ -0,0 +1,15 @@ +#!/bin/bash + +source /users/loose/mwimager/LOFAR/installed/gnu_opt/lofarinit.sh + +for (( i=0; i<3; i++ )) +do + for (( j=0; j<12; j++ )) + do + ms=$(printf "/lifs%03d/pipeline/L2007_03463_SB%d.MS" \ + $(expr $j + 1) $(expr 12 "*" $i + $j)) + echo "Processing $(basename $ms)" + makevds $HOME/lioff.clusterdesc $ms $(basename $ms).vds + done +done +