diff --git a/CEP/PyBDSM/doc/source/conf.py b/CEP/PyBDSM/doc/source/conf.py
index ce464f45ee572012b11231880a3f6fe77175a554..b541401cfad0214fa7950e5b14df6ef2ce7f3870 100644
--- a/CEP/PyBDSM/doc/source/conf.py
+++ b/CEP/PyBDSM/doc/source/conf.py
@@ -41,7 +41,7 @@ master_doc = 'index'
 
 # General information about the project.
 project = u'PyBDSM'
-copyright = u'2013, David Rafferty and Niruj Mohan'
+copyright = u'2014, David Rafferty and Niruj Mohan'
 
 # The version info for the project you're documenting, acts as replacement for
 # |version| and |release|, also used in various other places throughout the
@@ -50,7 +50,7 @@ copyright = u'2013, David Rafferty and Niruj Mohan'
 # The short X.Y version.
 version = '1.8'
 # The full version, including alpha/beta/rc tags.
-release = '1.8.0'
+release = '1.8.1'
 
 # The language for content autogenerated by Sphinx. Refer to documentation
 # for a list of supported languages.
diff --git a/CEP/PyBDSM/doc/source/examples.rst b/CEP/PyBDSM/doc/source/examples.rst
index bcb11485d873cac499f22aedebba4ee07673a56e..b71c024cdaf400bb0e7dc2a0abea30fb9c8289c2 100644
--- a/CEP/PyBDSM/doc/source/examples.rst
+++ b/CEP/PyBDSM/doc/source/examples.rst
@@ -139,18 +139,27 @@ Lastly, the plot window is closed, and the source catalog is written out to an A
     WRITE_CATALOG: Write the Gaussian, source, or shapelet list to a file.
     ================================================================================
     outfile ............... None : Output file name. None => file is named
-                                   automatically
+                                   automatically; 'SAMP' => send to SAMP hub (e.g.,
+                                   to TOPCAT, ds9, or Aladin)
     bbs_patches ........... None : For BBS format, type of patch to use: None => no
                                    patches. 'single' => all Gaussians in one patch.
                                    'gaussian' => each Gaussian gets its own patch.
                                    'source' => all Gaussians belonging to a single
                                    source are grouped into one patch
+    bbs_patches_mask ...... None : Name of the mask file (of same size as input image)
+                                   that defines the patches if bbs_patches = 'mask'
     catalog_type .......... 'srl': Type of catalog to write:  'gaul' - Gaussian
                                    list, 'srl' - source list (formed by grouping
                                    Gaussians), 'shap' - shapelet list
     clobber .............. False : Overwrite existing file?
+    correct_proj .......... True : Correct source parameters for image projection
+                                   (BBS format only)?
     format ............... 'fits': Format of output catalog: 'bbs', 'ds9', 'fits',
-                                   'star', 'kvis', or 'ascii'
+                                   'star', 'kvis', or 'ascii', 'csv', 'casabox',
+                                   or 'sagecal'
+    incl_chan ............ False : Include flux densities from each channel (if any)?
+    incl_empty ........... False : Include islands without any valid Gaussians (source
+                                   list only)?
     srcroot ............... None : Root name for entries in the output catalog. None
                                    => use image file name
 
@@ -284,25 +293,6 @@ At this point, make sure that TOPCAT is started and its SAMP hub is running (act
 ::
 
     BSDM [2]: inp write_catalog
-    --------> inp(write_catalog)
-    WRITE_CATALOG: Write the Gaussian, source, or shapelet list to a file.
-    ================================================================================
-    outfile ............... None : Output file name. None => file is named
-                                   automatically; 'SAMP' => send to SAMP hub (e.g.,
-                                   to TOPCAT, ds9, or Aladin)
-    bbs_patches ........... None : For BBS format, type of patch to use: None => no
-                                   patches. 'single' => all Gaussians in one patch.
-                                   'gaussian' => each Gaussian gets its own patch.
-                                   'source' => all Gaussians belonging to a single
-                                   source are grouped into one patch
-    catalog_type .......... 'srl': Type of catalog to write:  'gaul' - Gaussian
-                                   list, 'srl' - source list (formed by grouping
-                                   Gaussians), 'shap' - shapelet list
-    clobber .............. False : Overwrite existing file?
-    format ............... 'fits': Format of output catalog: 'bbs', 'ds9', 'fits',
-                                   'star', 'kvis', or 'ascii'
-    srcroot ............... None : Root name for entries in the output catalog. None
-                                   => use image file name
 
     BDSM [3]: outfile='SAMP'
 
diff --git a/CEP/PyBDSM/doc/source/export_image.rst b/CEP/PyBDSM/doc/source/export_image.rst
index ac353ad8406256f3cb9c77104dd931c9de295dca..4da6ce46a67705d4621d4b9f6389a5023e174020 100644
--- a/CEP/PyBDSM/doc/source/export_image.rst
+++ b/CEP/PyBDSM/doc/source/export_image.rst
@@ -4,7 +4,7 @@
 ``export_image``: exporting internally derived images
 **************************************************************
 
-Internally derived images (e.g, the Gaussian model image) can be exported to FITS files using the ``export_image`` task:
+Internally derived images (e.g, the Gaussian model image) can be exported to FITS or CASA files using the ``export_image`` task:
 
 .. parsed-literal::
 
@@ -14,12 +14,15 @@ Internally derived images (e.g, the Gaussian model image) can be exported to FIT
                                    automatically; 'SAMP' => send to SAMP hub (e.g., to
                                    TOPCAT, ds9, or Aladin)
     :term:`clobber` .............. False : Overwrite existing file?
-    :term:`img_format` ........... 'fits': Format of output image: 'fits' or 'casa' (at the
-                                   moment only 'fits' is supported)
+    :term:`img_format` ........... 'fits': Format of output image: 'fits' or 'casa'
     :term:`img_type` ....... 'gaus_resid': Type of image to export: 'gaus_resid',
                                    'shap_resid', 'rms', 'mean', 'gaus_model',
                                    'shap_model', 'ch0', 'pi', 'psf_major', 'psf_minor',
-                                   'psf_pa', 'psf_ratio', 'psf_ratio_aper'
+                                   'psf_pa', 'psf_ratio', 'psf_ratio_aper', 'island_mask'
+    :term:`mask_dilation` ............ 0 : Number of iterations to use for island-mask dilation.
+                                   0 => no dilation
+    :term:`pad_image` ............ False : Pad image (with zeros) to original size
+
 
 Each of the parameters is described in detail below.
 
@@ -32,7 +35,7 @@ Each of the parameters is described in detail below.
         This parameter is a Boolean (default is ``False``) that determines whether existing files are overwritten or not.
 
     img_format
-        This parameter is a string (default is ``'fits'``) that sets the output file format: ``'fits'`` - FITS format, ``'casa'`` - CASA format.
+        This parameter is a string (default is ``'fits'``) that sets the output file format: ``'fits'`` - FITS format, ``'casa'`` - CASA format (requires pyrap).
 
     img_type
         This parameter is a string (default is ``'gaus_resid'``) that sets the type of image to export.
@@ -64,3 +67,10 @@ Each of the parameters is described in detail below.
 
         * ``'psf_ratio_aper'`` - image of peak-to-aperture flux variation (1/beam)
 
+        * ``'island_mask'`` - mask of islands (0 = outside island, 1 = inside island)
+
+    mask_dilation
+        This parameter is an integer (default is ``0``) that sets the number of dilation iterations to use when making the island mask. More iterations implies larger masked regions (one iteration expands the size of features in the mask by one pixel in all directions).
+
+    pad_image
+        This parameter is a Boolean (default is ``False``) that determines whether the output image is padded to be the same size as the original image (without any trimming defined by the ``trim_box`` parameter). If ``False``, the output image will have the size specified by the ``trim_box`` parameter.
diff --git a/CEP/PyBDSM/doc/source/process_image.rst b/CEP/PyBDSM/doc/source/process_image.rst
index cb82bf5df5be6639877ce64e169b215e27cdefb0..3c184fc1c6809680345d2ce05e3ea51be6d83788 100644
--- a/CEP/PyBDSM/doc/source/process_image.rst
+++ b/CEP/PyBDSM/doc/source/process_image.rst
@@ -709,7 +709,10 @@ The output options are:
                                    patches. 'single' => all Gaussians in one patch.
                                    'gaussian' => each Gaussian gets its own patch.
                                    'source' => all Gaussians belonging to a single
-                                   source are grouped into one patch
+                                   source are grouped into one patch. 'mask' => use mask
+                                   file specified by bbs_patches_mask
+      :term:`bbs_patches_mask` .... None : Name of the mask file (of same size as input image)
+                                   that defines the patches if bbs_patches = 'mask'
       :term:`indir` ............... None : Directory of input FITS files. None => get from
                                    filename
       :term:`opdir_overwrite` .. 'overwrite': 'overwrite'/'append': If output_all=True,
@@ -735,16 +738,16 @@ The output options are:
 .. glossary::
 
     bbs_patches
-        This parameter is a string (default is ``None``) that sets the type of patch to use in BBS-formatted catalogs. When the Gaussian catalogue is written as a BBS-readable sky file, this
-        determines whether all Gaussians are in a single patch (``'single'``), there are no
-        patches (``None``), all Gaussians for a given source are in a separate patch (``'source'``), or
-        each Gaussian gets its own patch (``'gaussian'``).
+        This parameter is a string (default is ``None``) that sets the type of patch to use in BBS-formatted catalogs. When the Gaussian catalogue is written as a BBS-readable sky file, this option determines whether all Gaussians are in a single patch (``'single'``), there are no patches (``None``), all Gaussians for a given source are in a separate patch (``'source'``), each Gaussian gets its own patch (``'gaussian'``), or a mask image is used to define the patches (``'mask'``).
 
         If you wish to have patches defined by island, then set
         ``group_by_isl = True`` before fitting to force all
         Gaussians in an island to be in a single source. Then set
         ``bbs_patches = 'source'`` when writing the catalog.
 
+    bbs_patches_mask
+        This parameter is a string (default is ``None``) that sets the file name of the mask file to use to define patches in BBS-formatted catalogs. The mask image should be 1 inside the patches and 0 elsewhere and should be the same size as the input image (before any ``trim_box`` is applied). Any Gaussians that fall outside of the patches will be ignored and will not appear in the output sky model.
+
     indir
         This parameter is a string (default is ``None``) that sets the directory of input FITS files. If ``None``, the directory is defined by the input filename.
 
diff --git a/CEP/PyBDSM/doc/source/whats_new.rst b/CEP/PyBDSM/doc/source/whats_new.rst
index 293e694f8da6988b1bfd0e5467927d227dfb65a5..b37e317b1bf1e341da775d0cf696f1d791664b17 100644
--- a/CEP/PyBDSM/doc/source/whats_new.rst
+++ b/CEP/PyBDSM/doc/source/whats_new.rst
@@ -4,6 +4,22 @@
 What's New
 **********
 
+Version 1.8.1 (2014/01/14):
+
+    * Added option (``bbs_patches = 'mask'``) to allow patches in an output BBS sky model to be defined using a mask image (set with the ``bbs_patches_mask`` option).
+
+    * Fix to bug that caused the ``incl_empty`` option to be ignored when ``format='fits'`` in the ``write_catalog`` task.
+
+    * Enabled output of images in CASA format in the ``export_image`` task (``img_format = 'casa'``).
+
+    * Added an option to ``export_image`` to export an island-mask image, with ones where there is emission and zeros elsewhere (``image_type = 'island_mask'``). Features in the island mask may be optionally dilated by specifying the number of dilation iterations with the ``mask_dilation`` parameter. The mask image may be padded with zeros to match the original image when the ``trim_box`` option was used to analyze only a portion of the image (``pad_image = True``).
+
+    * Added an option to write a CASA region file to the ``write_catalog`` task (``format = 'casabox'``).
+
+    * Added an option to write a CSV catalog to the ``write_catalog`` task (``format = 'csv'``).
+
+    * Added error message when the rms is zero in some part of the rms map.
+
 Version 1.8.0 (2013/10/16):
 
     * Improved wavelet fitting. Added option so that wavelet fitting can be done to the sum of images on the remaining wavelet scales, improving the signal for fitting (controlled with the ``atrous_sum`` option). Added option so that user can choose whether to include new islands found only in the wavelet images in the final fit or not (controlled with the ``atrous_orig_isl`` option).
diff --git a/CEP/PyBDSM/doc/source/write_catalog.rst b/CEP/PyBDSM/doc/source/write_catalog.rst
index 5b133dd7857b114c811b87eb7f967d890add4b66..a842cc8dde9e33b702fa9f40710d5dd47d31b0df 100644
--- a/CEP/PyBDSM/doc/source/write_catalog.rst
+++ b/CEP/PyBDSM/doc/source/write_catalog.rst
@@ -23,16 +23,20 @@ The task parameters are as follows:
                                    patches. 'single' => all Gaussians in one patch.
                                    'gaussian' => each Gaussian gets its own patch.
                                    'source' => all Gaussians belonging to a single
-                                   source are grouped into one patch
+                                   source are grouped into one patch. 'mask' => use mask
+                                   file specified by bbs_patches_mask
+    :term:`bbs_patches_mask` ...... None : Name of the mask file (of same size as input image)
+                                   that defines the patches if bbs_patches = 'mask'
     :term:`catalog_type` .......... 'srl': Type of catalog to write:  'gaul' - Gaussian
                                    list, 'srl' - source list (formed by grouping
-                                   Gaussians), 'shap' - shapelet list (not yet
-                                   supported)
+                                   Gaussians), 'shap' - shapelet list (FITS
+                                   format only)
     :term:`clobber` .............. False : Overwrite existing file?
     :term:`correct_proj` .......... True : Correct source parameters for image projection
                                    (BBS format only)?
     :term:`format` ............... 'fits': Format of output Gaussian list: 'bbs', 'ds9',
-                                   'fits', 'star', 'kvis', 'ascii', or 'sagecal'
+                                   'fits', 'star', 'kvis', 'ascii', 'csv', 'casabox', or
+                                   'sagecal'
     :term:`incl_chan` ............ False : Include fluxes from each channel (if any)?
     :term:`incl_empty` ........... False : Include islands without any valid Gaussians
                                    (source list only)?
@@ -47,16 +51,16 @@ Each of the parameters is described in detail below.
         This parameter is a string (default is ``None``) that sets the name of the output file. If ``None``, the file is named automatically. If 'SAMP' the full catalog (i.e., ``format = 'fits'``) is sent to a running SAMP Hub (e.g., to TOPCAT or Aladin).
 
     bbs_patches
-        This parameter is a string (default is ``None``) that sets the type of patch to use in BBS-formatted catalogs. When the Gaussian catalogue is written as a BBS-readable sky file, this
-        determines whether all Gaussians are in a single patch (``'single'``), there are no
-        patches (``None``), all Gaussians for a given source are in a separate patch (``'source'``), or
-        each Gaussian gets its own patch (``'gaussian'``).
+        This parameter is a string (default is ``None``) that sets the type of patch to use in BBS-formatted catalogs. When the Gaussian catalogue is written as a BBS-readable sky file, this option determines whether all Gaussians are in a single patch (``'single'``), there are no patches (``None``), all Gaussians for a given source are in a separate patch (``'source'``), each Gaussian gets its own patch (``'gaussian'``), or a mask image is used to define the patches (``'mask'``).
 
         If you wish to have patches defined by island, then set
         ``group_by_isl = True`` before fitting to force all
         Gaussians in an island to be in a single source. Then set
         ``bbs_patches = 'source'`` when writing the catalog.
 
+    bbs_patches_mask
+        This parameter is a string (default is ``None``) that sets the file name of the mask file to use to define patches in BBS-formatted catalogs. The mask image should be 1 inside the patches and 0 elsewhere and should be the same size as the input image (before any ``trim_box`` is applied). Any Gaussians that fall outside of the patches will be ignored and will not appear in the output sky model.
+
     catalog_type
         This parameter is a string (default is ``'srl'``) that sets the type of catalog to write:  ``'gaul'`` - Gaussian list, ``'srl'`` - source list
         (formed by grouping Gaussians), ``'shap'`` - shapelet list (``'fits'`` format only)
@@ -93,11 +97,15 @@ Each of the parameters is described in detail below.
 
         * ``'kvis'`` - kvis format (Gaussian list only)
 
-        * ``'ascii'`` - simple text file
+        * ``'ascii'`` - simple text file with spaces separating the values
+
+        * ``'csv'`` - Comma-separated Values (CSV) text file
+
+        * ``'casabox'`` - CASA region file (boxes only)
 
         * ``'sagecal'`` - SAGECAL sky model format (Gaussian list only)
 
-        Catalogues with the ``'fits'`` and ``'ascii'`` formats include all available
+        Catalogues with the ``'fits'``, ``'ascii'``, and ``'csv'`` formats include all available
         information (see :ref:`output_cols` for column definitions). The
         other formats include only a subset of the full information.
 
@@ -118,7 +126,7 @@ Definition of output columns
 The information included in the Gaussian and source catalogs varies by format and can include the following quantities.
 
 .. note::
-    For ACSII and FITS formats, the reference frequency (in Hz) and equinox are stored in the header of the catalog. The header in ASCII catalogs is the first few lines of the catalog. For FITS catalogs, this information is stored in the comments as well as in the FREQ0 and EQUINOX keywords in the primary header.
+    For ACSII, CSV, and FITS formats, the reference frequency (in Hz) and equinox are stored in the header of the catalog. The header in ASCII and CSV catalogs is the first few lines of the catalog. For FITS catalogs, this information is stored in the comments as well as in the FREQ0 and EQUINOX keywords in the primary header.
 
 * **Gaus_id:** a unique number that identifies the Gaussian, starting from zero
 
diff --git a/CEP/PyBDSM/src/python/_version.py b/CEP/PyBDSM/src/python/_version.py
index eeb8f17924177bd2c408f55e3706e0308bb50cd2..7d27789c008c16cde34b14f824d19f7b1849021f 100644
--- a/CEP/PyBDSM/src/python/_version.py
+++ b/CEP/PyBDSM/src/python/_version.py
@@ -9,7 +9,7 @@ adding to the changelog will naturally do this.
 """
 
 # Version number
-__version__ = '1.8.0'
+__version__ = '1.8.1'
 
 # Store svn Revision number. For this to work, one also needs to do:
 #
@@ -27,8 +27,26 @@ def changelog():
     PyBDSM Changelog.
     -----------------------------------------------------------------------
 
-    2013/11/04 - Added error message when rms is zero in some part of the
-        image.
+    2014/01/14 - Version 1.8.1
+
+    2014/01/13 - Added option (bbs_patches = 'mask') to allow patches in
+        an output BBS sky model to be defined using a mask image.
+
+    2014/01/09 - Fix to bug that caused the incl_empty option to be
+        ignored when format='fits' in the write_catalog task.
+
+    2013/12/05 - Enabled output of images in CASA format in the export_image
+    	task (img_format = 'casa'). Added an option to export_image task to
+    	export an island-mask image, with ones where there is emission and
+    	zeros elsewhere (image_type = 'island_mask'). Features in the island
+    	mask may be optionally dilated by specifying the number of dilation
+    	iterations with the mask_dilation parameter. Added an option to
+    	write a CASA region file to the write_catalog task (format =
+    	'casabox'). Added an option to write a CSV catalog to the
+    	write_catalog task (format = 'csv').
+
+    2013/11/04 - Added error message when the rms is zero in some part of the
+        rms map.
 
     2013/10/16 - Version 1.8.0
 
diff --git a/CEP/PyBDSM/src/python/functions.py b/CEP/PyBDSM/src/python/functions.py
index 8cfea620c9948edee89dce591f1429e0afa5a59b..19e2d61c9deb0c485c91673c5542f69109f468b8 100755
--- a/CEP/PyBDSM/src/python/functions.py
+++ b/CEP/PyBDSM/src/python/functions.py
@@ -1262,6 +1262,8 @@ def read_image_from_file(filename, img, indir, quiet=False):
 
     # Read in data. If only a subsection of the image is desired (as defined
     # by the trim_box option), we can try to use PyFITS to read only that section.
+    img._original_shape = (shape_out[2], shape_out[3])
+    img._xy_hdr_shift = (0, 0)
     if img.opts.trim_box != None:
         img.trim_box = img.opts.trim_box
         xmin, xmax, ymin, ymax = img.trim_box
@@ -1315,6 +1317,7 @@ def read_image_from_file(filename, img, indir, quiet=False):
         # Adjust WCS keywords for trim_box starting x and y.
         hdr['crpix1'] -= xmin
         hdr['crpix2'] -= ymin
+        img._xy_hdr_shift = (xmin, ymin)
     else:
         if img.use_io == 'fits':
             data = fits[0].data
@@ -1360,7 +1363,7 @@ def convert_pyrap_header(pyrap_image, tmpdir):
 
 
 def write_image_to_file(use, filename, image, img, outdir=None,
-                                           clobber=True):
+                        pad_image=False, clobber=True):
     """ Writes image array to dir/filename using pyfits"""
     import numpy as N
     import os
@@ -1368,6 +1371,18 @@ def write_image_to_file(use, filename, image, img, outdir=None,
 
     mylog = mylogger.logging.getLogger("PyBDSM."+img.log+"Writefile")
 
+    wcs_obj = img.wcs_obj
+    if pad_image and img.opts.trim_box is not None:
+        # Pad image to original size
+        xsize, ysize = img._original_shape
+        xmin, ymin = img._xy_hdr_shift
+        image_pad = N.zeros((xsize, ysize), dtype=N.float32)
+        image_pad[xmin:xmin+image.shape[0], ymin:ymin+image.shape[1]] = image
+        image = image_pad
+    else:
+        xmin = 0
+        ymin = 0
+
     if filename == 'SAMP':
         import tempfile
         if not hasattr(img,'samp_client'):
@@ -1376,9 +1391,10 @@ def write_image_to_file(use, filename, image, img, outdir=None,
             img.samp_key = private_key
 
         # Broadcast image to SAMP Hub
-        temp_im = make_fits_image(N.transpose(image), img.wcs_obj, img.beam, img.frequency)
+        temp_im = make_fits_image(N.transpose(image), wcs_obj, img.beam,
+            img.frequency, xmin=xmin, ymin=ymin)
         tfile = tempfile.NamedTemporaryFile(delete=False)
-        temp_im.writeto(tfile.name,  clobber=clobber)
+        temp_im.writeto(tfile.name, clobber=clobber)
         send_fits_image(img.samp_client, img.samp_key, 'PyBDSM image', tfile.name)
     else:
         # Write image to FITS file
@@ -1386,15 +1402,34 @@ def write_image_to_file(use, filename, image, img, outdir=None,
             outdir = img.indir
         if not os.path.exists(outdir) and outdir != '':
             os.makedirs(outdir)
-        if os.path.exists(outdir + filename):
+        if os.path.exists(outdir+filename) and use == 'fits':
             if clobber:
-                os.remove(outdir + filename)
+                os.remove(outdir+filename)
             else:
                 return
-        temp_im = make_fits_image(N.transpose(image), img.wcs_obj, img.beam, img.frequency)
-        temp_im.writeto(outdir + filename,  clobber=clobber)
+        temp_im = make_fits_image(N.transpose(image), wcs_obj, img.beam,
+            img.frequency, xmin=xmin, ymin=ymin)
+        if use == 'rap':
+            outfile = outdir + filename + '.fits'
+        else:
+            outfile = outdir + filename
+        temp_im.writeto(outfile,  clobber=clobber)
+        temp_im.close()
+
+        if use == 'rap':
+            # For CASA images, read in FITS image and convert
+            try:
+                import pyrap.images as pim
+                import os
+                outimage = pim.image(outfile)
+                outimage.saveas(outdir+filename, overwrite=clobber)
+            except ImportError, err:
+                import os
+                os.remove(outfile)
+                raise RuntimeError("Error writing CASA image. Use img_format = 'fits' instead.")
+
 
-def make_fits_image(imagedata, wcsobj, beam, freq):
+def make_fits_image(imagedata, wcsobj, beam, freq, xmin=0, ymin=0):
     """Makes a simple FITS hdulist appropriate for single-channel images"""
     from distutils.version import StrictVersion
     try:
@@ -1419,37 +1454,37 @@ def make_fits_image(imagedata, wcsobj, beam, freq):
     if use_header_update:
         header.update('CRVAL1', wcsobj.wcs.crval[0])
         header.update('CDELT1', wcsobj.wcs.cdelt[0])
-        header.update('CRPIX1', wcsobj.wcs.crpix[0])
+        header.update('CRPIX1', wcsobj.wcs.crpix[0] + xmin)
         header.update('CUNIT1', wcsobj.wcs.cunit[0])
         header.update('CTYPE1', wcsobj.wcs.ctype[0])
         header.update('CRVAL2', wcsobj.wcs.crval[1])
         header.update('CDELT2', wcsobj.wcs.cdelt[1])
-        header.update('CRPIX2', wcsobj.wcs.crpix[1])
+        header.update('CRPIX2', wcsobj.wcs.crpix[1] + ymin)
         header.update('CUNIT2', wcsobj.wcs.cunit[1])
         header.update('CTYPE2', wcsobj.wcs.ctype[1])
     else:
         header['CRVAL1'] = wcsobj.wcs.crval[0]
         header['CDELT1'] = wcsobj.wcs.cdelt[0]
-        header['CRPIX1'] = wcsobj.wcs.crpix[0]
+        header['CRPIX1'] = wcsobj.wcs.crpix[0] + xmin
         header['CUNIT1'] = str(wcsobj.wcs.cunit[0]).strip().lower() # needed due to bug in pywcs/astropy
         header['CTYPE1'] = wcsobj.wcs.ctype[0]
         header['CRVAL2'] = wcsobj.wcs.crval[1]
         header['CDELT2'] = wcsobj.wcs.cdelt[1]
-        header['CRPIX2'] = wcsobj.wcs.crpix[1]
+        header['CRPIX2'] = wcsobj.wcs.crpix[1] + ymin
         header['CUNIT2'] = str(wcsobj.wcs.cunit[1]).strip().lower() # needed due to bug in pywcs/astropy
         header['CTYPE2'] = wcsobj.wcs.ctype[1]
 
     # Add STOKES info
     if use_header_update:
-        header.update('CRVAL3', 1)
-        header.update('CDELT3', 1)
-        header.update('CRPIX3', 1)
+        header.update('CRVAL3', 1.0)
+        header.update('CDELT3', 1.0)
+        header.update('CRPIX3', 1.0)
         header.update('CUNIT3', '')
         header.update('CTYPE3', 'STOKES')
     else:
-        header['CRVAL3'] = 1
-        header['CDELT3'] = 1
-        header['CRPIX3'] = 1
+        header['CRVAL3'] = 1.0
+        header['CDELT3'] = 1.0
+        header['CRPIX3'] = 1.0
         header['CUNIT3'] = ''
         header['CTYPE3'] = 'STOKES'
 
@@ -1457,13 +1492,13 @@ def make_fits_image(imagedata, wcsobj, beam, freq):
     if use_header_update:
         header.update('CRVAL4', freq)
         header.update('CDELT4', 0.0)
-        header.update('CRPIX4', 1)
+        header.update('CRPIX4', 1.0)
         header.update('CUNIT4', 'Hz')
         header.update('CTYPE4', 'FREQ')
     else:
         header['CRVAL4'] = freq
         header['CDELT4'] = 0.0
-        header['CRPIX4'] = 1
+        header['CRPIX4'] = 1.0
         header['CUNIT4'] = 'Hz'
         header['CTYPE4'] = 'FREQ'
 
@@ -1479,15 +1514,15 @@ def make_fits_image(imagedata, wcsobj, beam, freq):
 
     # Add STOKES info
     if use_header_update:
-        header.update('CRVAL3', 1)
-        header.update('CDELT3', 1)
-        header.update('CRPIX3', 1)
+        header.update('CRVAL3', 1.0)
+        header.update('CDELT3', 1.0)
+        header.update('CRPIX3', 1.0)
         header.update('CUNIT3', '')
         header.update('CTYPE3', 'STOKES')
     else:
-        header['CRVAL3'] = 1
-        header['CDELT3'] = 1
-        header['CRPIX3'] = 1
+        header['CRVAL3'] = 1.0
+        header['CDELT3'] = 1.0
+        header['CRPIX3'] = 1.0
         header['CUNIT3'] = ''
         header['CTYPE3'] = 'STOKES'
 
@@ -1501,13 +1536,13 @@ def make_fits_image(imagedata, wcsobj, beam, freq):
         if use_header_update:
             header.update('CRVAL4', freq)
             header.update('CDELT4', 0.0)
-            header.update('CRPIX4', 1)
+            header.update('CRPIX4', 1.0)
             header.update('CUNIT4', 'Hz')
             header.update('CTYPE4', 'FREQ')
         else:
             header['CRVAL4'] = freq
             header['CDELT4'] = 0.0
-            header['CRPIX4'] = 1
+            header['CRPIX4'] = 1.0
             header['CUNIT4'] = 'Hz'
             header['CTYPE4'] = 'FREQ'
 
diff --git a/CEP/PyBDSM/src/python/gaul2srl.py b/CEP/PyBDSM/src/python/gaul2srl.py
index 1ec1cb320c66670a92184f3059e9ddf6c06b962b..7458be7d518567a8923af889dacc7d43f5f6fcd4 100644
--- a/CEP/PyBDSM/src/python/gaul2srl.py
+++ b/CEP/PyBDSM/src/python/gaul2srl.py
@@ -21,6 +21,7 @@ from gausfit import Gaussian
 from interface import wrap
 import mylogger
 import numpy as N
+
 N.seterr(divide='raise')
 
 nsrc = Int(doc="Number of sources in the image")
@@ -100,7 +101,7 @@ class Op_gaul2srl(Op):
                 message += 'should be fit, try adjusting the flagging options (use\n'\
                            'show_fit with "ch0_flagged=True" to see the flagged Gaussians)\n'\
                            'or enabling the wavelet module (with "atrous_do=True").'
-            message += '\nTo include these islands in output source catalogs, set\n'\
+            message += '\nTo include empty islands in output source catalogs, set\n'\
                         'incl_empty=True in the write_catalog task.'
             mylog.warning(message)
 
diff --git a/CEP/PyBDSM/src/python/interface.py b/CEP/PyBDSM/src/python/interface.py
index f3a969257654732bd11fa85ed7f406db7108a570..86d20c0e7e42a12f618a5787e6bd26f9353c1504 100644
--- a/CEP/PyBDSM/src/python/interface.py
+++ b/CEP/PyBDSM/src/python/interface.py
@@ -745,8 +745,8 @@ def round_list_of_tuples(val):
 
 # The following functions give convenient access to the output functions in
 # output.py
-def export_image(img, outfile=None, img_format='fits',
-                 img_type='gaus_resid', clobber=False):
+def export_image(img, outfile=None, img_format='fits', pad_image = False,
+                 img_type='gaus_resid', mask_dilation=0, clobber=False):
     """Write an image to a file. Returns True if successful, False if not.
 
     outfile - name of resulting file; if None, file is
@@ -771,6 +771,7 @@ def export_image(img, outfile=None, img_format='fits',
         'psf_pa' - PSF position angle image (degrees east of north)
         'psf_ratio' - PSF peak-to-total flux ratio (in units of 1/beam)
         'psf_ratio_aper' - PSF peak-to-aperture flux ratio (in units of 1/beam)
+        'island_mask' - Island mask image (0 = outside island, 1 = inside island)
     """
     import os
     import functions as func
@@ -805,9 +806,6 @@ def export_image(img, outfile=None, img_format='fits',
     if (format in ['fits', 'casa']) == False:
         print '\033[91mERROR\033[0m: img_format must be "fits" or "casa"'
         return False
-    if format == 'casa':
-        print "\033[91mERROR\033[0m: Only img_format = 'fits' is supported at the moment"
-        return False
     filename = outfile
     if filename == None or filename == '':
         filename = img.imagename + '_' + img_type + '.' + format
@@ -822,57 +820,76 @@ def export_image(img, outfile=None, img_format='fits',
     try:
         if img_type == 'ch0':
             func.write_image_to_file(use_io, filename,
-                                     img.ch0_arr, img, bdir,
+                                     img.ch0_arr, img, bdir, pad_image,
                                      clobber=clobber)
         elif img_type == 'rms':
             func.write_image_to_file(use_io, filename,
-                                     img.rms_arr, img, bdir,
+                                     img.rms_arr, img, bdir, pad_image,
                                      clobber=clobber)
         elif img_type == 'mean':
             func.write_image_to_file(use_io, filename,
-                                     img.mean_arr, img, bdir,
+                                     img.mean_arr, img, bdir, pad_image,
                                      clobber=clobber)
         elif img_type == 'pi':
             func.write_image_to_file(use_io, filename,
-                                     img.ch0_pi_arr, img, bdir,
+                                     img.ch0_pi_arr, img, bdir, pad_image,
                                      clobber=clobber)
         elif img_type == 'psf_major':
             func.write_image_to_file(use_io, filename,
-                                     img.psf_vary_maj_arr*fwsig, img, bdir,
+                                     img.psf_vary_maj_arr*fwsig, img, bdir, pad_image,
                                      clobber=clobber)
         elif img_type == 'psf_minor':
             func.write_image_to_file(use_io, filename,
-                                     img.psf_vary_min_arr*fwsig, img, bdir,
+                                     img.psf_vary_min_arr*fwsig, img, bdir, pad_image,
                                      clobber=clobber)
         elif img_type == 'psf_pa':
             func.write_image_to_file(use_io, filename,
-                                     img.psf_vary_pa_arr, img, bdir,
+                                     img.psf_vary_pa_arr, img, bdir, pad_image,
                                      clobber=clobber)
         elif img_type == 'psf_ratio':
             func.write_image_to_file(use_io, filename,
-                                     img.psf_vary_ratio_arr, img, bdir,
+                                     img.psf_vary_ratio_arr, img, bdir, pad_image,
                                      clobber=clobber)
         elif img_type == 'psf_ratio_aper':
             func.write_image_to_file(use_io, filename,
-                                     img.psf_vary_ratio_aper_arr, img, bdir,
+                                     img.psf_vary_ratio_aper_arr, img, bdir, pad_image,
                                      clobber=clobber)
         elif img_type == 'gaus_resid':
             im = img.resid_gaus_arr
             func.write_image_to_file(use_io, filename,
-                                     im, img, bdir,
+                                     im, img, bdir, pad_image,
                                      clobber=clobber)
         elif img_type == 'gaus_model':
             im = img.model_gaus_arr
             func.write_image_to_file(use_io, filename,
-                                     im, img, bdir,
+                                     im, img, bdir, pad_image,
                                      clobber=clobber)
         elif img_type == 'shap_resid':
             func.write_image_to_file(use_io, filename,
-                                     img.resid_shap_arr, img, bdir,
+                                     img.resid_shap_arr, img, bdir, pad_image,
                                      clobber=clobber)
         elif img_type == 'shap_model':
             func.write_image_to_file(use_io, filename,
-                                     img.model_shap_arr, img, bdir,
+                                     img.model_shap_arr, img, bdir, pad_image,
+                                     clobber=clobber)
+        elif img_type == 'island_mask':
+            import numpy as N
+            import scipy.ndimage as nd
+            island_mask_bool = img.pyrank + 1 > 0
+            if mask_dilation > 0:
+                # Dilate the mask by specified number of iterations
+                island_mask_bool = nd.binary_dilation(island_mask_bool,
+                    iterations=mask_dilation)
+                # Perform a binary closing to remove small holes/gaps. The
+                # structure array is chosen to be about the size of the
+                # beam (assuming a normally sampled psf), so that holes/gaps
+                # smaller than the beam are removed.
+                pbeam = int(round(img.beam2pix(img.beam)[0] * 1.5))
+                island_mask_bool = nd.binary_closing(island_mask_bool,
+                    structure=N.ones((pbeam, pbeam)))
+            island_mask = N.array(island_mask_bool, dtype=N.float32)
+            func.write_image_to_file(use_io, filename,
+                                     island_mask, img, bdir, pad_image,
                                      clobber=clobber)
         else:
             print "\n\033[91mERROR\033[0m: img_type not recognized."
@@ -881,6 +898,11 @@ def export_image(img, outfile=None, img_format='fits',
             print '--> Image sent to SMAP hub'
         else:
             print '--> Wrote file ' + repr(filename)
+            if use_io == 'rap':
+                # remove the temporary fits file used as a pyrap template
+                import os
+                os.remove(filename+'.fits')
+
         return True
     except RuntimeError, err:
         # Catch and log error
@@ -898,12 +920,14 @@ def export_image(img, outfile=None, img_format='fits',
 
 def write_catalog(img, outfile=None, format='bbs', srcroot=None, catalog_type='gaul',
                bbs_patches=None, incl_chan=False, incl_empty=False, clobber=False,
-               force_output=False, correct_proj=True):
+               force_output=False, correct_proj=True, bbs_patches_mask=None):
     """Write the Gaussian, source, or shapelet list to a file. Returns True if
     successful, False if not.
 
     filename - name of resulting file; if None, file is
-               named automatically.
+               named automatically. If 'SAMP', table is sent to a samp hub
+               (must be running already). If 'SCREEN', table is sent to the
+               screen.
     catalog_type - type of catalog
         "gaul"  - Gaussian list
         "srl"   - Source list
@@ -924,6 +948,8 @@ def write_catalog(img, outfile=None, format='bbs', srcroot=None, catalog_type='g
         "single"   - all Gaussians are put into a single
                      patch
         "source"   - sources are grouped by source into patches
+        "mask"     - use a Boolean mask to define the patches
+    bbs_patches_mask - file name of mask file if bbs_patches="mask"
     incl_chan - Include fluxes for each channel?
     incl_empty - Include islands without any valid Gaussians (source list only)?
     sort_by - Property to sort output list by:
@@ -950,15 +976,19 @@ def write_catalog(img, outfile=None, format='bbs', srcroot=None, catalog_type='g
     filename = outfile
     if isinstance(patch, str):
         patch = patch.lower()
-    if (format in ['fits', 'ascii', 'bbs', 'ds9', 'star',
-                   'kvis', 'sagecal']) == False:
+    if format not in ['fits', 'ascii', 'bbs', 'ds9', 'star',
+                   'kvis', 'sagecal', 'csv', 'casabox']:
         print '\033[91mERROR\033[0m: format must be "fits", '\
-            '"ascii", "ds9", "star", "kvis",  or "bbs"'
+            '"ascii", "ds9", "star", "kvis", "csv", "casabox", or "bbs"'
         return False
-    if (patch in [None, 'gaussian', 'single', 'source']) == False:
+    if patch not in [None, 'gaussian', 'single', 'source', 'mask']:
         print '\033[91mERROR\033[0m: patch must be None, '\
-            '"gaussian", "source", or "single"'
+                '"gaussian", "source", "single", or "mask"'
         return False
+    if patch == 'mask':
+        if bbs_patches_mask is None:
+            print '\033[91mERROR\033[0m: if patch is "mask", bbs_patches_mask must be set to the file name of the mask file'
+            return False
     if (catalog_type in ['gaul', 'srl', 'shap']) == False:
         print '\033[91mERROR\033[0m: catalog_type must be "gaul", '\
               '"srl", or "shap"'
@@ -967,10 +997,13 @@ def write_catalog(img, outfile=None, format='bbs', srcroot=None, catalog_type='g
         if not force_output:
             print 'No sources were found in the image. Output file not written.'
             return False
-    if filename == '': filename = None
+    if filename == '':
+        filename = None
+    if filename is not None:
+        filename = filename.lower()
 
     # Now go format by format and call appropriate function
-    if filename == 'SAMP':
+    if filename == 'samp':
         import tempfile
         import functions as func
         import os
@@ -992,6 +1025,7 @@ def write_catalog(img, outfile=None, format='bbs', srcroot=None, catalog_type='g
         func.send_fits_table(img.samp_client, img.samp_key, table_name, tfile.name)
         print '--> Table sent to SMAP hub'
         return True
+
     if format == 'fits':
         filename = output.write_fits_list(img, filename=filename,
                                              incl_chan=incl_chan, incl_empty=incl_empty,
@@ -1002,10 +1036,10 @@ def write_catalog(img, outfile=None, format='bbs', srcroot=None, catalog_type='g
         else:
             print '--> Wrote FITS file ' + repr(filename)
             return True
-    if format == 'ascii':
+    if format == 'ascii' or format == 'csv':
         filename = output.write_ascii_list(img, filename=filename,
                                               incl_chan=incl_chan, incl_empty=incl_empty,
-                                              sort_by='index',
+                                              sort_by='index', format = format,
                                               clobber=clobber, objtype=catalog_type)
         if filename == None:
             print '\033[91mERROR\033[0m: File exists and clobber = False.'
@@ -1077,14 +1111,13 @@ def write_catalog(img, outfile=None, format='bbs', srcroot=None, catalog_type='g
         else:
             print '--> Wrote kvis file ' + repr(filename)
             return True
-    # if format == 'casabox':
-    #     filename = output.write_casa_gaul(img, filename=filename,
-    #                                   incl_wavelet=incl_wavelet,
-    #                                   clobber=clobber)
-    #     if filename == None:
-    #         print '\033[91mERROR\033[0m: File exists and clobber=False.'
-    #     else:
-    #         print '--> Wrote CASA clean box file ' + filename
+    if format == 'casabox':
+        filename = output.write_casa_gaul(img, filename=filename,
+                                      incl_empty=incl_empty, clobber=clobber)
+        if filename == None:
+            print '\033[91mERROR\033[0m: File exists and clobber=False.'
+        else:
+            print '--> Wrote CASA clean box file ' + filename
 
 def add_break_to_logfile(logfile):
     f = open(logfile, 'a')
diff --git a/CEP/PyBDSM/src/python/opts.py b/CEP/PyBDSM/src/python/opts.py
index c2873e6d7f3357cf634bba96aec4364eba1f5fb7..823cbdd0871eac9f6cec96a000e253e7fd8da8a2 100644
--- a/CEP/PyBDSM/src/python/opts.py
+++ b/CEP/PyBDSM/src/python/opts.py
@@ -841,22 +841,31 @@ class Opts(object):
                                  "'gaussian' => each Gaussian gets its own "\
                                  "patch. 'source' => all Gaussians belonging "\
                                  "to a single source are grouped into one patch. "\
-                                 "'mask' => use mask file (bbs_patches_mask)\n"\
+                                 "'mask' => use mask file specified by bbs_patches_mask\n"\
                                  "When the Gaussian catalogue is written as a "\
                                  "BBS-readable sky file, this determines whether "\
                                  "all Gaussians are in a single patch, there are "\
                                  "no patches, all Gaussians for a given source "\
-                                 "are in a separate patch, or each Gaussian gets "\
-                                 "its own patch.\n"\
+                                 "are in a separate patch, each Gaussian gets "\
+                                 "its own patch, or a mask image is used to define "\
+                                 "the patches.\n"\
                                  "If you wish to have patches defined by island, "\
-                                 "then set group_by_isl=True (under advanced_opts) "\
+                                 "then set group_by_isl = True (under advanced_opts) "\
                                  "before fitting to force all Gaussians in an "\
                                  "island to be in a single source. Then set "\
                                  "bbs_patches='source' when writing the catalog.",
                              group = "output_opts")
     bbs_patches_mask = Option(None, String(),
-                             doc = "Name of the mask file to use to define the BBS "\
-                                 "patches (FITS or CASA format)",
+                             doc = "Name of the mask file (of same size as input "\
+                                 "image) that defines the patches if bbs_patches "\
+                                 "= 'mask'\nA mask file may be used to define the "\
+                                 "patches in the output BBS sky model. The mask "\
+                                 "image should be 1 inside the patches and 0 "\
+                                 "elsewhere and should be the same size as the "\
+                                 "input image (before any trim_box is applied). Any "\
+                                 "Gaussians that fall outside of the patches "\
+                                 "will be ignored and will not appear in the "\
+                                 "output sky model.",
                              group = "output_opts")
     solnname = Option(None, String(),
                              doc = "Name of the run, to be prepended "\
@@ -1061,6 +1070,22 @@ class Opts(object):
                              doc = "Restrict sources to "\
                                  "be only of type 'S'",
                              group = "psf_vary_do")
+    psf_stype_only = Bool(True,
+                             doc = "Restrict sources to "\
+                                 "be only of type 'S'",
+                             group = "psf_vary_do")
+    psf_fwhm = Option(None, Tuple(Float(), Float(), Float()),
+                             doc = "FWHM of the PSF. Specify as (maj, "\
+                                 "min, pos ang E of N) in degrees. "\
+                                 "E.g., psf_fwhm = (0.06, 0.02, 13.3). None => "\
+                                 "estimate from image\n"\
+                                 "If the size of the PSF is specified with this option, "\
+                                 "the PSF and its variation acrosss the image are "\
+                                 "assumed to be constant and are not estimated "\
+                                 "from the image. Instead, all sources "\
+                                 "are deconvolved with the specified PSF.",
+                             group = "psf_vary_do")
+
 
     #-----------------------------SHAPELET OPTIONS--------------------------------
     shapelet_basis = Enum("cartesian", "polar",
@@ -1150,9 +1175,9 @@ class Opts(object):
     clobber = Bool(False,
                              doc = "Overwrite existing file?",
                              group = 'hidden')
-    format = Enum('fits', 'ds9', 'ascii', 'bbs', 'star', 'kvis', 'sagecal',
+    format = Enum('fits', 'ds9', 'ascii', 'bbs', 'star', 'kvis', 'sagecal', 'csv', 'casabox',
                              doc = "Format of output catalog: 'bbs', "\
-                                 "'ds9', 'fits', 'star', 'kvis', 'ascii', or 'sagecal'\n"\
+                                 "'ds9', 'fits', 'star', 'kvis', 'ascii', 'csv', 'casabox', or 'sagecal'\n"\
                                  "The following formats are supported:\n"\
                                  "'bbs' - BlackBoard Selfcal sky model format "\
                                  "(Gaussian list only)\n"\
@@ -1213,17 +1238,16 @@ class Opts(object):
                                  "the location of the source center.",
                              group = 'hidden')
     img_format = Enum('fits', 'casa',
-                             doc = "Format of output image: 'fits' or "\
-                                 "'casa' (at the moment only 'fits' is "\
-                                 "supported)",
+                             doc = "Format of output image: 'fits' or 'casa'",
                              group = 'hidden')
     img_type = Enum('gaus_resid', 'shap_resid', 'rms', 'mean', 'gaus_model',
                              'shap_model', 'ch0', 'pi', 'psf_major', 'psf_minor',
-                             'psf_pa', 'psf_ratio', 'psf_ratio_aper',
+                             'psf_pa', 'psf_ratio', 'psf_ratio_aper', 'island_mask',
                              doc = "Type of image to export: 'gaus_resid', "\
                                  "'shap_resid', 'rms', 'mean', 'gaus_model', "\
                                  "'shap_model', 'ch0', 'pi', 'psf_major', "\
-                                 "'psf_minor', 'psf_pa'\nThe following images "\
+                                 "'psf_minor', 'psf_pa', 'psf_ratio', 'psf_ratio_aper', "\
+                                 "'island_mask'\nThe following images "\
                                  "can be exported:\n"\
                                  "'ch0' - image used for source detection\n"\
                                  "'rms' - rms map image\n"\
@@ -1237,8 +1261,26 @@ class Opts(object):
                                  "'psf_minor' - PSF minor axis FWHM image (FWHM in arcsec)\n"\
                                  "'psf_pa' - PSF position angle image (degrees east of north)\n"\
                                  "'psf_ratio' - PSF peak-to-total flux ratio (in units of 1/beam)\n"\
-                                 "'psf_ratio_aper' - PSF peak-to-aperture flux ratio (in units of 1/beam)",
+                                 "'psf_ratio_aper' - PSF peak-to-aperture flux ratio (in units of 1/beam)\n"\
+                                 "'island_mask' - Island mask image (0 = outside island, 1 = inside island)",
                              group = 'hidden')
+    mask_dilation = Int(0,
+                             doc = "Number of iterations to use for island-mask dilation. "\
+                                 "0 => no dilation\nThis option determines the number of "\
+                                 "dilation iterations to use when making the island mask. "\
+                                 "More iterations implies larger masked regions (one iteration "\
+                                 "expands the size of features in the mask by one pixel in all "\
+                                 "directions). After dilation, a closing operation is performed "\
+                                 "(using a structure array the size of the beam) to remove gaps "\
+                                 "and holes in the mask that are smaller than the beam.",
+                             group = "hidden")
+    pad_image = Bool(False,
+                             doc = "Pad image (with zeros) to original size\nIf True, the output "\
+                                 "image is padded to be the same size as the original "\
+                                 "image (without any trimming defined by the trim_box "\
+                                 "parameter). If False, the output image will have the "\
+                                 "size specified by the trim_box parameter.",
+                             group = "hidden")
     ch0_image = Bool(True,
                              doc = "Show the ch0 image. This is the image used for "\
                                  "source detection",
diff --git a/CEP/PyBDSM/src/python/output.py b/CEP/PyBDSM/src/python/output.py
index e18b03d6a3171176a97df1a1b96cb9fe39753128..c636a1fb32513b60c38135e891ea0c1ff20710b9 100644
--- a/CEP/PyBDSM/src/python/output.py
+++ b/CEP/PyBDSM/src/python/output.py
@@ -312,7 +312,7 @@ def write_ds9_list(img, filename=None, srcroot=None, deconvolve=False,
     return filename
 
 
-def write_ascii_list(img, filename=None, sort_by='indx',
+def write_ascii_list(img, filename=None, sort_by='indx', format = 'ascii',
                      incl_chan=False, incl_empty=False, clobber=False, objtype='gaul'):
     """Writes Gaussian list to an ASCII file"""
     import mylogger
@@ -326,7 +326,8 @@ def write_ascii_list(img, filename=None, sort_by='indx',
         if incl_empty:
             # Append the dummy sources for islands without any unflagged Gaussians
             outl[0] += img.dsources
-    outstr_list = make_ascii_str(img, outl, objtype=objtype, incl_empty=incl_empty)
+    outstr_list = make_ascii_str(img, outl, objtype=objtype,
+                                 incl_empty=incl_empty, format=format)
     if filename == None:
         if objtype == 'gaul':
             filename = img.imagename + '.gaul'
@@ -342,7 +343,7 @@ def write_ascii_list(img, filename=None, sort_by='indx',
     return filename
 
 
-def write_casa_gaul(img, filename=None, clobber=False):
+def write_casa_gaul(img, filename=None,  incl_empty=False, clobber=False):
     """Writes a clean box file for use in casapy"""
     import mylogger
     import os
@@ -413,7 +414,7 @@ def write_fits_list(img, filename=None, sort_by='index', objtype='gaul',
                                                               incl_aper=incl_aper,
                                                               incl_empty=incl_empty,
                                                               nmax=nmax, nchan=img.nchan)
-    out_list = make_fits_list(img, outl, objtype=objtype, nmax=nmax)
+    out_list = make_fits_list(img, outl, objtype=objtype, nmax=nmax, incl_empty=incl_empty)
     col_list = []
     for ind, col in enumerate(out_list):
       list1 = pyfits.Column(name=cnames[ind], format=cformats[ind],
@@ -790,7 +791,7 @@ def make_ds9_str(img, glist, gnames, deconvolve=False, objtype='gaul', incl_empt
     return outstr_list
 
 
-def make_ascii_str(img, glist, objtype='gaul', incl_empty=False):
+def make_ascii_str(img, glist, objtype='gaul', format='ascii', incl_empty=False):
     """Makes a list of string entries for an ascii region file."""
     from _version import __version__, __revision__
     outstr_list = []
@@ -821,9 +822,14 @@ def make_ascii_str(img, glist, objtype='gaul', incl_empty=False):
                                                               nchan=img.nchan)
         if cvals != None:
             cformats[-1] += "\n"
-            if i == 0:
-                outstr_list.append("# " + " ".join(cnames) + "\n")
-            outstr_list.append(" ".join(cformats) % tuple(cvals))
+            if format == 'ascii':
+                if i == 0:
+                    outstr_list.append("# " + " ".join(cnames) + "\n")
+                outstr_list.append(" ".join(cformats) % tuple(cvals))
+            else:
+                if i == 0:
+                    outstr_list.append("# " + ", ".join(cnames) + "\n")
+                outstr_list.append(", ".join(cformats) % tuple(cvals))
     return outstr_list
 
 
@@ -850,29 +856,34 @@ def make_fits_list(img, glist, objtype='gaul', nmax=30, incl_empty=False):
 
 
 def make_casa_str(img, glist):
-    """Makes a list of string entries for a casa clean box file."""
+    """Makes a list of string entries for a casa region file."""
     import functions as func
-    outstr_list = []
+    outstr_list = ['#CRTFv0 CASA Region Text Format version 0\n']
     sep = ' '
-    scale = 2.0
+    scale = 2.0 # scale box to 2 times FWHM of Gaussian
     for gindx, g in enumerate(glist[0]):
         x, y = g.centre_pix
-        xsize, ysize, ang = g.size_pix # FWHM
         ellx, elly = func.drawellipse(g)
-        blc = [int(min(ellx)), int(min(elly))]
-        trc = [int(max(ellx)), int(max(elly))]
+        blc = [min(ellx), min(elly)]
+        trc = [max(ellx), max(elly)]
 
         blc[0] -= (x - blc[0]) * scale
         blc[1] -= (y - blc[1]) * scale
         trc[0] += (trc[0] - x) * scale
         trc[1] += (trc[1] - y) * scale
-        # Format is: <id> <blcx> <blcy> <trcx> <trcy>
+
+        blc_sky = img.pix2sky(blc)
+        trc_sky = img.pix2sky(trc)
+
+        blc_sky_str = convert_radec_str(blc_sky[0], blc_sky[1])
+        trc_sky_str = convert_radec_str(trc_sky[0], trc_sky[1])
+
+        # Format is: box [ [<blcx>, <blcy>], [<trcx>, <trcy>] ]
         # Note that we use gindx rather than g.gaus_num so that
         # all Gaussians will have a unique id, even if wavelet
         # Gaussians are included.
-        outstr_list.append(str(gindx+1) + sep + str(blc[0]) + sep +
-                           str(blc[1]) + sep + str(trc[0]) + sep +
-                           str(trc[1]) +'\n')
+        outstr_list.append('box [[' + ', '.join(blc_sky_str) + '], [' +
+                           ', '.join(trc_sky_str) + ']] coord=J2000\n')
     return outstr_list
 
 
@@ -901,6 +912,7 @@ def write_islands(img):
 
     f.close()
 
+
 def get_src(src_list, srcid):
     """Returns the source for srcid or None if not found"""
     for src in src_list:
@@ -908,6 +920,17 @@ def get_src(src_list, srcid):
             return src
     return None
 
+
+def convert_radec_str(ra, dec):
+    """Takes ra, dec in degrees and returns BBS/CASA strings"""
+    ra = ra2hhmmss(ra)
+    sra = str(ra[0]).zfill(2)+':'+str(ra[1]).zfill(2)+':'+str("%.3f" % (ra[2])).zfill(6)
+    dec = dec2ddmmss(dec)
+    decsign = ('-' if dec[3] < 0 else '+')
+    sdec = decsign+str(dec[0]).zfill(2)+'.'+str(dec[1]).zfill(2)+'.'+str("%.3f" % (dec[2])).zfill(6)
+    return sra, sdec
+
+
 def list_and_sort_gaussians(img, patch=None, root=None,
                             sort_by='index'):
     """Returns sorted lists of Gaussians and their names and patch names.
@@ -937,17 +960,20 @@ def list_and_sort_gaussians(img, patch=None, root=None,
     gausindx = [] # indices of Gaussians
     patchflux = [] # total flux of each patch
     patchindx = [] # indices of sources
+    patchnums = [] # number of patch from mask
 
     # If a mask image is to be used to define patches, read it in and
     # make a rank image from it
-#     if patch == 'mask':
-#         patches_mask = func.readimage(mask_file)
-#         act_pixels = patches_mask
-#         rank = len(patches_mask.shape)
-#         connectivity = nd.generate_binary_structure(rank, rank)
-#         labels, count = nd.label(act_pixels, connectivity)
-#         mask_labels = labels
-
+    use_mask = False
+    if patch not in ['single', 'gaussian', 'source', None]:
+        mask_file = img.opts.bbs_patches_mask
+        patches_mask, hdr = func.read_image_from_file(mask_file, img, img.indir)
+        use_mask = True
+        act_pixels = patches_mask[0,0]
+        rank = len(act_pixels.shape)
+        import scipy.ndimage as nd
+        connectivity = nd.generate_binary_structure(rank, rank)
+        mask_labels, count = nd.label(act_pixels, connectivity)
 
     src_list = img.sources
     for src in src_list:
@@ -968,8 +994,9 @@ def list_and_sort_gaussians(img, patch=None, root=None,
                 gausname = []
                 gausflux = []
                 gausindx = []
-#                 if patch == 'mask':
-#                     patchnum = mask_labels[g.centre_pix]
+            if use_mask:
+                patchnums.append(mask_labels[g.centre_pix[0], g.centre_pix[1]])
+
 
         if patch == 'source':
             sorted_gauslist = list(gauslist)
@@ -998,6 +1025,27 @@ def list_and_sort_gaussians(img, patch=None, root=None,
             gausname = []
             gausflux = []
 
+    if use_mask:
+        unique_patch_ids = set(patchnums)
+
+        # Check if there is a patch with id = 0. If so, this means there were
+        # some Gaussians that fell outside of the regions in the patch
+        # mask file.
+        if 0 in unique_patch_ids:
+            import mylogger
+            mylog = mylogger.logging.getLogger("PyBDSM.write_gaul")
+            mylog.warning('Some sources fall outside of the regions '
+                      'defined in the mask file. These sources are not '
+                      'included in the output sky model.')
+        for p in unique_patch_ids:
+            if p != 0:
+                in_patch = N.where(patchnums == p)
+                outlist.append(N.array(gauslist)[in_patch].tolist())
+                outnames.append(N.array(gausname)[in_patch].tolist())
+                patchnames.append('patch_'+str(p))
+                patchflux.append(N.sum(N.array(gausflux)[in_patch]))
+                patchindx.append(p)
+
     # Sort
     if patch == 'single' or patch == None:
         outlist = [list(gauslist)]
@@ -1170,11 +1218,11 @@ def make_output_columns(obj, fits=False, objtype='gaul', incl_spin=False,
     for i, v in enumerate(cvals):
         if fits:
             if isinstance(v, int):
-                cformats.append('1J')
+                cformats.append('J')
             if isinstance(v, float):
-                cformats.append('1D')
+                cformats.append('D')
             if isinstance(v, str):
-                cformats.append('1A')
+                cformats.append('A')
             if isinstance(v, N.ndarray):
                 cformats.append('%iD' % (nmax**2,))
         else:
diff --git a/CEP/PyBDSM/src/python/psf_vary.py b/CEP/PyBDSM/src/python/psf_vary.py
index bbd76d90217dd2b6d4ac92b772c8f7ee0ba4861a..aefcf730c568e42082e2b3b1ffadf6420ee08037 100644
--- a/CEP/PyBDSM/src/python/psf_vary.py
+++ b/CEP/PyBDSM/src/python/psf_vary.py
@@ -49,298 +49,315 @@ class Op_psf_vary(Op):
             mylog.warning('PyFITS version is too old: psf_vary module skipped')
             return
 
-        over = 2
-        generators = opts.psf_generators; nsig = opts.psf_nsig; kappa2 = opts.psf_kappa2
-        snrtop = opts.psf_snrtop; snrbot = opts.psf_snrbot; snrcutstack = opts.psf_snrcutstack
-        gencode = opts.psf_gencode; primarygen = opts.psf_primarygen; itess_method = opts.psf_itess_method
-        tess_sc = opts.psf_tess_sc; tess_fuzzy= opts.psf_tess_fuzzy
-        bright_snr_cut = opts.psf_high_snr
-        s_only = opts.psf_stype_only
-        if opts.psf_snrcut < 5.0:
-            mylogger.userinfo(mylog, "Value of psf_snrcut too low; increasing to 5")
-            snrcut = 5.0
+        if opts.psf_fwhm is not None:
+            # User has specified a constant PSF to use, so skip PSF fitting/etc.
+            psf_maj = opts.psf_fwhm[0] # FWHM in deg
+            psf_min = opts.psf_fwhm[1] # FWHM in deg
+            psf_pa = opts.psf_fwhm[2] # PA in deg
+            mylogger.userinfo(mylog, 'Using constant PSF (major, minor, pos angle)',
+                  '(%s, %s, %s) degrees' % (round(psf_maj, 5),
+                                            round(psf_maj, 5),
+                                            round(psf_pa, 1)))
         else:
-            snrcut = opts.psf_snrcut
-        img.psf_snrcut = snrcut
-        if opts.psf_high_snr != None:
-            if opts.psf_high_snr < 10.0:
-                mylogger.userinfo(mylog, "Value of psf_high_snr too low; increasing to 10")
-                high_snrcut = 10.0
+            # Use did not specify a constant PSF to use, so estimate it
+            over = 2
+            generators = opts.psf_generators; nsig = opts.psf_nsig; kappa2 = opts.psf_kappa2
+            snrtop = opts.psf_snrtop; snrbot = opts.psf_snrbot; snrcutstack = opts.psf_snrcutstack
+            gencode = opts.psf_gencode; primarygen = opts.psf_primarygen; itess_method = opts.psf_itess_method
+            tess_sc = opts.psf_tess_sc; tess_fuzzy= opts.psf_tess_fuzzy
+            bright_snr_cut = opts.psf_high_snr
+            s_only = opts.psf_stype_only
+            if opts.psf_snrcut < 5.0:
+                mylogger.userinfo(mylog, "Value of psf_snrcut too low; increasing to 5")
+                snrcut = 5.0
+            else:
+                snrcut = opts.psf_snrcut
+            img.psf_snrcut = snrcut
+            if opts.psf_high_snr != None:
+                if opts.psf_high_snr < 10.0:
+                    mylogger.userinfo(mylog, "Value of psf_high_snr too low; increasing to 10")
+                    high_snrcut = 10.0
+                else:
+                    high_snrcut = opts.psf_high_snr
             else:
                 high_snrcut = opts.psf_high_snr
-        else:
-            high_snrcut = opts.psf_high_snr
-        img.psf_high_snr = high_snrcut
-
-        wtfns=['unity', 'roundness', 'log10', 'sqrtlog10']
-        if 0 <= itess_method < 4: tess_method=wtfns[itess_method]
-        else: tess_method='unity'
-
-        ### now put all relevant gaussian parameters into a list
-        ngaus = img.ngaus
-        nsrc = img.nsrc
-        num = N.zeros(nsrc, dtype=N.int32)
-        peak = N.zeros(nsrc)
-        xc = N.zeros(nsrc)
-        yc = N.zeros(nsrc)
-        bmaj = N.zeros(nsrc)
-        bmin = N.zeros(nsrc)
-        bpa = N.zeros(nsrc)
-        code = N.array(['']*nsrc);
-        rms = N.zeros(nsrc)
-        src_id_list = []
-        for i, src in enumerate(img.sources):
-            src_max = 0.0
-            for gmax in src.gaussians:
-                # Take only brightest Gaussian per source
-                if gmax.peak_flux > src_max:
-                    src_max = gmax.peak_flux
-                    g = gmax
-            num[i] = i
-            peak[i] = g.peak_flux
-            xc[i] = g.centre_pix[0]
-            yc[i] = g.centre_pix[1]
-            bmaj[i] = g.size_pix[0]
-            bmin[i] = g.size_pix[1]
-            bpa[i] = g.size_pix[2]
-            code[i] = img.sources[g.source_id].code
-            rms[i] = img.islands[g.island_id].rms
-        gauls = (num, peak, xc, yc, bmaj, bmin, bpa, code, rms)
-        tr_gauls = self.trans_gaul(gauls)
-
-        # takes gaussians with code=S and snr > snrcut.
-        if s_only:
-            tr = [n for n in tr_gauls if n[1]/n[8]>snrcut and n[7] == 'S']
-        else:
-            tr = [n for n in tr_gauls if n[1]/n[8]>snrcut]
-        g_gauls = self.trans_gaul(tr)
-
-        # computes statistics of fitted sizes. Same as psfvary_fullstat.f in fBDSM.
-        bmaj_a, bmaj_r, bmaj_ca, bmaj_cr, ni = _cbdsm.bstat(bmaj, None, nsig)
-        bmin_a, bmin_r, bmin_ca, bmin_cr, ni = _cbdsm.bstat(bmin, None, nsig)
-        bpa_a, bpa_r, bpa_ca, bpa_cr, ni = _cbdsm.bstat(bpa, None, nsig)
-
-        # get subset of sources deemed to be unresolved. Same as size_ksclip_wenss.f in fBDSM.
-        flag_unresolved = self.get_unresolved(g_gauls, img.beam, nsig, kappa2, over, img.psf_high_snr, plot)
-
-        # see how much the SNR-weighted sizes of unresolved sources differ from the synthesized beam.
-        wtsize_beam_snr = self.av_psf(g_gauls, img.beam, flag_unresolved)
-
-        # filter out resolved sources
-        tr_gaul = self.trans_gaul(g_gauls)
-        tr = [n for i, n in enumerate(tr_gaul) if flag_unresolved[i]]
-        g_gauls = self.trans_gaul(tr)
-        mylogger.userinfo(mylog, 'Number of unresolved sources', str(len(g_gauls[0])))
-
-        # get a list of voronoi generators. vorogenS has values (and not None) if generators='field'.
-        vorogenP, vorogenS = self.get_voronoi_generators(g_gauls, generators, gencode, snrcut, snrtop, snrbot, snrcutstack)
-        mylogger.userinfo(mylog, 'Number of generators for PSF variation', str(len(vorogenP[0])))
-        if len(vorogenP[0]) < 3:
-            mylog.warning('Insufficient number of generators')
-            return
+            img.psf_high_snr = high_snrcut
+
+            wtfns=['unity', 'roundness', 'log10', 'sqrtlog10']
+            if 0 <= itess_method < 4: tess_method=wtfns[itess_method]
+            else: tess_method='unity'
+
+            ### now put all relevant gaussian parameters into a list
+            ngaus = img.ngaus
+            nsrc = img.nsrc
+            num = N.zeros(nsrc, dtype=N.int32)
+            peak = N.zeros(nsrc)
+            xc = N.zeros(nsrc)
+            yc = N.zeros(nsrc)
+            bmaj = N.zeros(nsrc)
+            bmin = N.zeros(nsrc)
+            bpa = N.zeros(nsrc)
+            code = N.array(['']*nsrc);
+            rms = N.zeros(nsrc)
+            src_id_list = []
+            for i, src in enumerate(img.sources):
+                src_max = 0.0
+                for gmax in src.gaussians:
+                    # Take only brightest Gaussian per source
+                    if gmax.peak_flux > src_max:
+                        src_max = gmax.peak_flux
+                        g = gmax
+                num[i] = i
+                peak[i] = g.peak_flux
+                xc[i] = g.centre_pix[0]
+                yc[i] = g.centre_pix[1]
+                bmaj[i] = g.size_pix[0]
+                bmin[i] = g.size_pix[1]
+                bpa[i] = g.size_pix[2]
+                code[i] = img.sources[g.source_id].code
+                rms[i] = img.islands[g.island_id].rms
+            gauls = (num, peak, xc, yc, bmaj, bmin, bpa, code, rms)
+            tr_gauls = self.trans_gaul(gauls)
+
+            # takes gaussians with code=S and snr > snrcut.
+            if s_only:
+                tr = [n for n in tr_gauls if n[1]/n[8]>snrcut and n[7] == 'S']
+            else:
+                tr = [n for n in tr_gauls if n[1]/n[8]>snrcut]
+            g_gauls = self.trans_gaul(tr)
+
+            # computes statistics of fitted sizes. Same as psfvary_fullstat.f in fBDSM.
+            bmaj_a, bmaj_r, bmaj_ca, bmaj_cr, ni = _cbdsm.bstat(bmaj, None, nsig)
+            bmin_a, bmin_r, bmin_ca, bmin_cr, ni = _cbdsm.bstat(bmin, None, nsig)
+            bpa_a, bpa_r, bpa_ca, bpa_cr, ni = _cbdsm.bstat(bpa, None, nsig)
+
+            # get subset of sources deemed to be unresolved. Same as size_ksclip_wenss.f in fBDSM.
+            flag_unresolved = self.get_unresolved(g_gauls, img.beam, nsig, kappa2, over, img.psf_high_snr, plot)
+
+            # see how much the SNR-weighted sizes of unresolved sources differ from the synthesized beam.
+            wtsize_beam_snr = self.av_psf(g_gauls, img.beam, flag_unresolved)
+
+            # filter out resolved sources
+            tr_gaul = self.trans_gaul(g_gauls)
+            tr = [n for i, n in enumerate(tr_gaul) if flag_unresolved[i]]
+            g_gauls = self.trans_gaul(tr)
+            mylogger.userinfo(mylog, 'Number of unresolved sources', str(len(g_gauls[0])))
+
+            # get a list of voronoi generators. vorogenS has values (and not None) if generators='field'.
+            vorogenP, vorogenS = self.get_voronoi_generators(g_gauls, generators, gencode, snrcut, snrtop, snrbot, snrcutstack)
+            mylogger.userinfo(mylog, 'Number of generators for PSF variation', str(len(vorogenP[0])))
+            if len(vorogenP[0]) < 3:
+                mylog.warning('Insufficient number of generators')
+                return
 
-        mylogger.userinfo(mylog, 'Tesselating image')
-        # group generators into tiles
-        tile_prop = self.edit_vorogenlist(vorogenP, frac=0.9)
+            mylogger.userinfo(mylog, 'Tesselating image')
+            # group generators into tiles
+            tile_prop = self.edit_vorogenlist(vorogenP, frac=0.9)
 
-        # tesselate the image
-        volrank, vorowts = self.tesselate(vorogenP, vorogenS, tile_prop, tess_method, tess_sc, tess_fuzzy, \
-                  generators, gencode, image.shape)
-        if opts.output_all:
-            func.write_image_to_file(img.use_io, img.imagename + '.volrank.fits', volrank, img, dir)
+            # tesselate the image
+            volrank, vorowts = self.tesselate(vorogenP, vorogenS, tile_prop, tess_method, tess_sc, tess_fuzzy, \
+                      generators, gencode, image.shape)
+            if opts.output_all:
+                func.write_image_to_file(img.use_io, img.imagename + '.volrank.fits', volrank, img, dir)
 
-        tile_list, tile_coord, tile_snr = tile_prop
-        ntile = len(tile_list)
-        bar = statusbar.StatusBar('Determining PSF variation ............... : ', 0, ntile)
-        mylogger.userinfo(mylog, 'Number of tiles for PSF variation', str(ntile))
+            tile_list, tile_coord, tile_snr = tile_prop
+            ntile = len(tile_list)
+            bar = statusbar.StatusBar('Determining PSF variation ............... : ', 0, ntile)
+            mylogger.userinfo(mylog, 'Number of tiles for PSF variation', str(ntile))
 
-        # For each tile, calculate the weighted averaged psf image. Also for all the sources in the image.
-        cdelt = list(img.wcs_obj.acdelt[0:2])
-        factor=3.
-        psfimages, psfcoords, totpsfimage, psfratio, psfratio_aper = self.psf_in_tile(image, img.beam, g_gauls, \
-                   cdelt, factor, snrcutstack, volrank, tile_prop, plot, img)
-        npsf = len(psfimages)
+            # For each tile, calculate the weighted averaged psf image. Also for all the sources in the image.
+            cdelt = list(img.wcs_obj.acdelt[0:2])
+            factor=3.
+            psfimages, psfcoords, totpsfimage, psfratio, psfratio_aper = self.psf_in_tile(image, img.beam, g_gauls, \
+                       cdelt, factor, snrcutstack, volrank, tile_prop, plot, img)
+            npsf = len(psfimages)
 
         if opts.psf_use_shap:
-            # use totpsfimage to get beta, centre and nmax for shapelet decomposition. Use nmax=5 or 6
-            mask=N.zeros(totpsfimage.shape, dtype=bool)
-            (m1, m2, m3)=func.moment(totpsfimage, mask)
-            betainit=sqrt(m3[0]*m3[1])*2.0  * 1.4
-            tshape = totpsfimage.shape
-            cen = N.array(N.unravel_index(N.argmax(totpsfimage), tshape))+[1,1]
-            cen = tuple(cen)
-            nmax = 12
-            basis = 'cartesian'
-            betarange = [0.5,sqrt(betainit*max(tshape))]
-            beta, error  = sh.shape_varybeta(totpsfimage, mask, basis, betainit, cen, nmax, betarange, plot)
-            if error == 1: print '  Unable to find minimum in beta'
-
-            # decompose all the psf images using the beta from above
-            nmax=12; psf_cf=[]
-            for i in range(npsf):
-                psfim = psfimages[i]
-                cf = sh.decompose_shapelets(psfim, mask, basis, beta, cen, nmax, mode='')
-                psf_cf.append(cf)
-                if img.opts.quiet == False:
-                    bar.increment()
-            bar.stop()
-
-            # transpose the psf image list
-            xt, yt = N.transpose(tile_coord)
-            tr_psf_cf = N.transpose(N.array(psf_cf))
-
-            # interpolate the coefficients across the image. Ok, interpolate in scipy for
-            # irregular grids is crap. doesnt even pass through some of the points.
-            # for now, fit polynomial.
-            compress = 100.0
-            x, y = N.transpose(psfcoords)
-            if len(x) < 3:
-                mylog.warning('Insufficient number of tiles to do interpolation of PSF variation')
-                return
-
-            psf_coeff_interp, xgrid, ygrid = self.interp_shapcoefs(nmax, tr_psf_cf, psfcoords, image.shape, \
-                     compress, plot)
-
-            psfshape = psfimages[0].shape
-            skip = 5
-            aa = self.create_psf_grid(psf_coeff_interp, image.shape, xgrid, ygrid, skip, nmax, psfshape, \
-                 basis, beta, cen, totpsfimage, plot)
-            img.psf_images = aa
-        else:
-            if ntile < 4:
-                mylog.warning('Insufficient number of tiles to do interpolation of PSF variation')
-                return
-            else:
-                # Fit stacked PSFs with Gaussians and measure aperture fluxes
-                bm_pix = N.array([img.pixel_beam()[0]*fwsig, img.pixel_beam()[1]*fwsig, img.pixel_beam()[2]])
-                psf_maj = N.zeros(npsf)
-                psf_min = N.zeros(npsf)
-                psf_pa = N.zeros(npsf)
-                if img.opts.quiet == False:
-                    bar.start()
-                for i in range(ntile):
+            if opts.psf_fwhm is None:
+                # use totpsfimage to get beta, centre and nmax for shapelet decomposition. Use nmax=5 or 6
+                mask=N.zeros(totpsfimage.shape, dtype=bool)
+                (m1, m2, m3)=func.moment(totpsfimage, mask)
+                betainit=sqrt(m3[0]*m3[1])*2.0  * 1.4
+                tshape = totpsfimage.shape
+                cen = N.array(N.unravel_index(N.argmax(totpsfimage), tshape))+[1,1]
+                cen = tuple(cen)
+                nmax = 12
+                basis = 'cartesian'
+                betarange = [0.5,sqrt(betainit*max(tshape))]
+                beta, error  = sh.shape_varybeta(totpsfimage, mask, basis, betainit, cen, nmax, betarange, plot)
+                if error == 1: print '  Unable to find minimum in beta'
+
+                # decompose all the psf images using the beta from above
+                nmax=12; psf_cf=[]
+                for i in range(npsf):
                     psfim = psfimages[i]
-                    mask = N.zeros(psfim.shape, dtype=bool)
-                    x_ax, y_ax = N.indices(psfim.shape)
-                    maxv = N.max(psfim)
-                    p_ini = [maxv, (psfim.shape[0]-1)/2.0*1.1, (psfim.shape[1]-1)/2.0*1.1, bm_pix[0]/fwsig*1.3,
-                             bm_pix[1]/fwsig*1.1, bm_pix[2]*2]
-                    para, ierr = func.fit_gaus2d(psfim, p_ini, x_ax, y_ax, mask)
-                    ### first extent is major
-                    if para[3] < para[4]:
-                        para[3:5] = para[4:2:-1]
-                        para[5] += 90
-                    ### clip position angle
-                    para[5] = divmod(para[5], 180)[1]
-
-                    psf_maj[i] = para[3]
-                    psf_min[i] = para[4]
-                    posang = para[5]
-                    while posang >= 180.0:
-                        posang -= 180.0
-                    psf_pa[i] = posang
-
+                    cf = sh.decompose_shapelets(psfim, mask, basis, beta, cen, nmax, mode='')
+                    psf_cf.append(cf)
                     if img.opts.quiet == False:
                         bar.increment()
                 bar.stop()
 
-                # Interpolate Gaussian parameters
-                if img.aperture == None:
-                    psf_maps = [psf_maj, psf_min, psf_pa, psfratio]
-                else:
-                    psf_maps = [psf_maj, psf_min, psf_pa, psfratio, psfratio_aper]
-                nimgs = len(psf_maps)
-                bar = statusbar.StatusBar('Interpolating PSF images ................ : ', 0, nimgs)
-                if img.opts.quiet == False:
-                    bar.start()
-                map_list = mp.parallel_map(func.eval_func_tuple,
-                    itertools.izip(itertools.repeat(self.interp_prop),
-                    psf_maps, itertools.repeat(psfcoords),
-                    itertools.repeat(image.shape)), numcores=opts.ncores,
-                    bar=bar)
-                if img.aperture == None:
-                    psf_maj_int, psf_min_int, psf_pa_int, psf_ratio_int = map_list
+                # transpose the psf image list
+                xt, yt = N.transpose(tile_coord)
+                tr_psf_cf = N.transpose(N.array(psf_cf))
+
+                # interpolate the coefficients across the image. Ok, interpolate in scipy for
+                # irregular grids is crap. doesnt even pass through some of the points.
+                # for now, fit polynomial.
+                compress = 100.0
+                x, y = N.transpose(psfcoords)
+                if len(x) < 3:
+                    mylog.warning('Insufficient number of tiles to do interpolation of PSF variation')
+                    return
+
+                psf_coeff_interp, xgrid, ygrid = self.interp_shapcoefs(nmax, tr_psf_cf, psfcoords, image.shape, \
+                         compress, plot)
+
+                psfshape = psfimages[0].shape
+                skip = 5
+                aa = self.create_psf_grid(psf_coeff_interp, image.shape, xgrid, ygrid, skip, nmax, psfshape, \
+                     basis, beta, cen, totpsfimage, plot)
+                img.psf_images = aa
+        else:
+            if opts.psf_fwhm is None:
+                if ntile < 4:
+                    mylog.warning('Insufficient number of tiles to do interpolation of PSF variation')
+                    return
                 else:
-                    psf_maj_int, psf_min_int, psf_pa_int, psf_ratio_int, psf_ratio_aper_int = map_list
+                    # Fit stacked PSFs with Gaussians and measure aperture fluxes
+                    bm_pix = N.array([img.pixel_beam()[0]*fwsig, img.pixel_beam()[1]*fwsig, img.pixel_beam()[2]])
+                    psf_maj = N.zeros(npsf)
+                    psf_min = N.zeros(npsf)
+                    psf_pa = N.zeros(npsf)
+                    if img.opts.quiet == False:
+                        bar.start()
+                    for i in range(ntile):
+                        psfim = psfimages[i]
+                        mask = N.zeros(psfim.shape, dtype=bool)
+                        x_ax, y_ax = N.indices(psfim.shape)
+                        maxv = N.max(psfim)
+                        p_ini = [maxv, (psfim.shape[0]-1)/2.0*1.1, (psfim.shape[1]-1)/2.0*1.1, bm_pix[0]/fwsig*1.3,
+                                 bm_pix[1]/fwsig*1.1, bm_pix[2]*2]
+                        para, ierr = func.fit_gaus2d(psfim, p_ini, x_ax, y_ax, mask)
+                        ### first extent is major
+                        if para[3] < para[4]:
+                            para[3:5] = para[4:2:-1]
+                            para[5] += 90
+                        ### clip position angle
+                        para[5] = divmod(para[5], 180)[1]
+
+                        psf_maj[i] = para[3]
+                        psf_min[i] = para[4]
+                        posang = para[5]
+                        while posang >= 180.0:
+                            posang -= 180.0
+                        psf_pa[i] = posang
 
-                # Smooth if desired
-                if img.opts.psf_smooth != None:
-                    sm_scale = img.opts.psf_smooth / img.pix2beam([1.0, 1.0, 0.0])[0] / 3600.0 # pixels
-                    if img.opts.aperture == None:
-                        psf_maps = [psf_maj_int, psf_min_int, psf_pa_int, psf_ratio_int]
+                        if img.opts.quiet == False:
+                            bar.increment()
+                    bar.stop()
+
+                    # Interpolate Gaussian parameters
+                    if img.aperture == None:
+                        psf_maps = [psf_maj, psf_min, psf_pa, psfratio]
                     else:
-                        psf_maps = [psf_maj_int, psf_min_int, psf_pa_int, psf_ratio_int, psf_ratio_aper_int]
+                        psf_maps = [psf_maj, psf_min, psf_pa, psfratio, psfratio_aper]
                     nimgs = len(psf_maps)
-                    bar = statusbar.StatusBar('Smoothing PSF images .................... : ', 0, nimgs)
+                    bar = statusbar.StatusBar('Interpolating PSF images ................ : ', 0, nimgs)
                     if img.opts.quiet == False:
                         bar.start()
                     map_list = mp.parallel_map(func.eval_func_tuple,
-                        itertools.izip(itertools.repeat(self.blur_image),
-                        psf_maps, itertools.repeat(sm_scale)), numcores=opts.ncores,
+                        itertools.izip(itertools.repeat(self.interp_prop),
+                        psf_maps, itertools.repeat(psfcoords),
+                        itertools.repeat(image.shape)), numcores=opts.ncores,
                         bar=bar)
                     if img.aperture == None:
                         psf_maj_int, psf_min_int, psf_pa_int, psf_ratio_int = map_list
                     else:
                         psf_maj_int, psf_min_int, psf_pa_int, psf_ratio_int, psf_ratio_aper_int = map_list
 
-                # Make sure all smoothed, interpolated images are ndarrays
-                psf_maj_int = N.array(psf_maj_int)
-                psf_min_int = N.array(psf_min_int)
-                psf_pa_int = N.array(psf_pa_int)
-                psf_ratio_int = N.array(psf_ratio_int)
-                if img.aperture == None:
-                    psf_ratio_aper_int = N.zeros(psf_maj_int.shape, dtype=N.float32)
+                    # Smooth if desired
+                    if img.opts.psf_smooth != None:
+                        sm_scale = img.opts.psf_smooth / img.pix2beam([1.0, 1.0, 0.0])[0] / 3600.0 # pixels
+                        if img.opts.aperture == None:
+                            psf_maps = [psf_maj_int, psf_min_int, psf_pa_int, psf_ratio_int]
+                        else:
+                            psf_maps = [psf_maj_int, psf_min_int, psf_pa_int, psf_ratio_int, psf_ratio_aper_int]
+                        nimgs = len(psf_maps)
+                        bar = statusbar.StatusBar('Smoothing PSF images .................... : ', 0, nimgs)
+                        if img.opts.quiet == False:
+                            bar.start()
+                        map_list = mp.parallel_map(func.eval_func_tuple,
+                            itertools.izip(itertools.repeat(self.blur_image),
+                            psf_maps, itertools.repeat(sm_scale)), numcores=opts.ncores,
+                            bar=bar)
+                        if img.aperture == None:
+                            psf_maj_int, psf_min_int, psf_pa_int, psf_ratio_int = map_list
+                        else:
+                            psf_maj_int, psf_min_int, psf_pa_int, psf_ratio_int, psf_ratio_aper_int = map_list
+
+                    # Make sure all smoothed, interpolated images are ndarrays
+                    psf_maj_int = N.array(psf_maj_int)
+                    psf_min_int = N.array(psf_min_int)
+                    psf_pa_int = N.array(psf_pa_int)
+                    psf_ratio_int = N.array(psf_ratio_int)
+                    if img.aperture == None:
+                        psf_ratio_aper_int = N.zeros(psf_maj_int.shape, dtype=N.float32)
+                    else:
+                        psf_ratio_aper_int = N.array(psf_ratio_aper_int, dtype=N.float32)
+
+                    # Blank with NaNs if needed
+                    mask = img.mask_arr
+                    if isinstance(mask, N.ndarray):
+                        pix_masked = N.where(mask == True)
+                        psf_maj_int[pix_masked] = N.nan
+                        psf_min_int[pix_masked] = N.nan
+                        psf_pa_int[pix_masked] = N.nan
+                        psf_ratio_int[pix_masked] = N.nan
+                        psf_ratio_aper_int[pix_masked] = N.nan
+
+                    # Store interpolated images. The major and minor axis images are
+                    # the sigma in units of arcsec, the PA image in units of degrees east of
+                    # north, the ratio images in units of 1/beam.
+                    img.psf_vary_maj_arr = psf_maj_int * img.pix2beam([1.0, 1.0, 0.0])[0] * 3600.0 # sigma in arcsec
+                    img.psf_vary_min_arr = psf_min_int * img.pix2beam([1.0, 1.0, 0.0])[0] * 3600.0 # sigma in arcsec
+                    img.psf_vary_pa_arr = psf_pa_int
+                    img.psf_vary_ratio_arr = psf_ratio_int # in 1/beam
+                    img.psf_vary_ratio_aper_arr = psf_ratio_aper_int # in 1/beam
+
+                    if opts.output_all:
+                        func.write_image_to_file(img.use_io, img.imagename + '.psf_vary_maj.fits', img.psf_vary_maj_arr*fwsig, img, dir)
+                        func.write_image_to_file(img.use_io, img.imagename + '.psf_vary_min.fits', img.psf_vary_min_arr*fwsig, img, dir)
+                        func.write_image_to_file(img.use_io, img.imagename + '.psf_vary_pa.fits', img.psf_vary_pa_arr, img, dir)
+                        func.write_image_to_file(img.use_io, img.imagename + '.psf_vary_ratio.fits', img.psf_vary_ratio_arr, img, dir)
+                        func.write_image_to_file(img.use_io, img.imagename + '.psf_vary_ratio_aper.fits', img.psf_vary_ratio_aper_arr, img, dir)
+
+            # Loop through source and Gaussian lists and deconvolve the sizes using appropriate beam
+            bar2 = statusbar.StatusBar('Correcting deconvolved source sizes ..... : ', 0, img.nsrc)
+            if img.opts.quiet == False:
+                bar2.start()
+            for src in img.sources:
+                src_pos = img.sky2pix(src.posn_sky_centroid)
+                src_pos_int = (int(src_pos[0]), int(src_pos[1]))
+                gaus_c = img.gaus2pix(src.size_sky, src.posn_sky_centroid)
+                if opts.psf_fwhm is None:
+                    gaus_bm = [psf_maj_int[src_pos_int]*fwsig, psf_min_int[src_pos_int]*fwsig, psf_pa_int[src_pos_int]]
                 else:
-                    psf_ratio_aper_int = N.array(psf_ratio_aper_int, dtype=N.float32)
-
-                # Blank with NaNs if needed
-                mask = img.mask_arr
-                if isinstance(mask, N.ndarray):
-                    pix_masked = N.where(mask == True)
-                    psf_maj_int[pix_masked] = N.nan
-                    psf_min_int[pix_masked] = N.nan
-                    psf_pa_int[pix_masked] = N.nan
-                    psf_ratio_int[pix_masked] = N.nan
-                    psf_ratio_aper_int[pix_masked] = N.nan
-
-                # Store interpolated images. The major and minor axis images are
-                # the sigma in units of arcsec, the PA image in units of degrees east of
-                # north, the ratio images in units of 1/beam.
-                img.psf_vary_maj_arr = psf_maj_int * img.pix2beam([1.0, 1.0, 0.0])[0] * 3600.0 # sigma in arcsec
-                img.psf_vary_min_arr = psf_min_int * img.pix2beam([1.0, 1.0, 0.0])[0] * 3600.0 # sigma in arcsec
-                img.psf_vary_pa_arr = psf_pa_int
-                img.psf_vary_ratio_arr = psf_ratio_int # in 1/beam
-                img.psf_vary_ratio_aper_arr = psf_ratio_aper_int # in 1/beam
-
-                if opts.output_all:
-                    func.write_image_to_file(img.use_io, img.imagename + '.psf_vary_maj.fits', img.psf_vary_maj_arr*fwsig, img, dir)
-                    func.write_image_to_file(img.use_io, img.imagename + '.psf_vary_min.fits', img.psf_vary_min_arr*fwsig, img, dir)
-                    func.write_image_to_file(img.use_io, img.imagename + '.psf_vary_pa.fits', img.psf_vary_pa_arr, img, dir)
-                    func.write_image_to_file(img.use_io, img.imagename + '.psf_vary_ratio.fits', img.psf_vary_ratio_arr, img, dir)
-                    func.write_image_to_file(img.use_io, img.imagename + '.psf_vary_ratio_aper.fits', img.psf_vary_ratio_aper_arr, img, dir)
-
-                # Loop through source and Gaussian lists and deconvolve the sizes using appropriate beam
-                bar2 = statusbar.StatusBar('Correcting deconvolved source sizes ..... : ', 0, img.nsrc)
-                if img.opts.quiet == False:
-                    bar2.start()
-                for src in img.sources:
-                    src_pos = img.sky2pix(src.posn_sky_centroid)
-                    src_pos_int = (int(src_pos[0]), int(src_pos[1]))
-                    gaus_c = img.gaus2pix(src.size_sky, src.posn_sky_centroid)
-                    gaus_bm = [psf_maj_int[src_pos_int]*fwsig, psf_min_int[src_pos_int]*fwsig, psf_pa_int[src_pos_int]*fwsig]
+                    # Use user-specified constant PSF instead
+                    gaus_bm = img.beam2pix(opts.psf_fwhm)
+                gaus_dc, err = func.deconv2(gaus_bm, gaus_c)
+                src.deconv_size_sky = img.pix2gaus(gaus_dc, src_pos)
+                src.deconv_size_skyE = [0.0, 0.0, 0.0]
+                for g in src.gaussians:
+                    gaus_c = img.gaus2pix(g.size_sky, src.posn_sky_centroid)
                     gaus_dc, err = func.deconv2(gaus_bm, gaus_c)
-                    src.deconv_size_sky = img.pix2gaus(gaus_dc, src_pos)
-                    src.deconv_size_skyE = [0.0, 0.0, 0.0]
-                    for g in src.gaussians:
-                        gaus_c = img.gaus2pix(g.size_sky, src.posn_sky_centroid)
-                        gaus_dc, err = func.deconv2(gaus_bm, gaus_c)
-                        g.deconv_size_sky = img.pix2gaus(gaus_dc, g.centre_pix)
-                        g.deconv_size_skyE = [0.0, 0.0, 0.0]
-                        if img.opts.quiet == False:
-                            bar2.spin()
+                    g.deconv_size_sky = img.pix2gaus(gaus_dc, g.centre_pix)
+                    g.deconv_size_skyE = [0.0, 0.0, 0.0]
                     if img.opts.quiet == False:
-                        bar2.increment()
-                bar2.stop()
+                        bar2.spin()
+                if img.opts.quiet == False:
+                    bar2.increment()
+            bar2.stop()
         img.completed_Ops.append('psf_vary')
 
 ##################################################################################################
diff --git a/CEP/PyBDSM/src/python/pybdsm.py b/CEP/PyBDSM/src/python/pybdsm.py
index 0c907bf3f1ab895670043cada73603caab09dd62..87d18c4ac192fa5e3b12c3ee76727a4e40de40e8 100644
--- a/CEP/PyBDSM/src/python/pybdsm.py
+++ b/CEP/PyBDSM/src/python/pybdsm.py
@@ -428,8 +428,8 @@ def write_catalog(**kwargs):
     included in the output file varies with the format used. Use
     "help 'format'" for more information.
 
-    Parameters: outfile, format, srcroot, bbs_patches, incl_wavelet, clobber,
-                catalog_type, incl_empty, correct_proj
+    Parameters: outfile, format, srcroot, bbs_patches, incl_chan, clobber,
+                catalog_type, incl_empty, correct_proj, bbs_patches_mask
 
     For more information about a parameter, use help.  E.g.,
       > help 'bbs_patches'
@@ -453,14 +453,14 @@ def write_catalog(**kwargs):
 
 write_catalog.arg_list = ['bbs_patches', 'format', 'outfile', 'srcroot',
                           'incl_chan', 'clobber', 'catalog_type', 'incl_empty',
-                          'correct_proj']
+                          'correct_proj', 'bbs_patches_mask']
 write_catalog.use_groups = False
 
 
 def export_image(**kwargs):
     """Write an image to disk.
 
-    Parameters: filename, img_type, img_format, incl_wavelet, clobber
+    Parameters: outfile, img_type, img_format, mask_dilation, pad_image, clobber
 
     For more information about a parameter, use help.  E.g.,
       > help 'img_type'
@@ -482,8 +482,8 @@ def export_image(**kwargs):
     except KeyboardInterrupt:
         print "\n\033[31;1mAborted\033[0m"
 
-export_image.arg_list = ['outfile', 'img_type', 'img_format',
-                         'clobber']
+export_image.arg_list = ['outfile', 'img_type', 'img_format', 'mask_dilation',
+                         'pad_image', 'clobber']
 export_image.use_groups = False
 
 
diff --git a/CEP/PyBDSM/src/python/readimage.py b/CEP/PyBDSM/src/python/readimage.py
index 49156ffade54e68ea645e7acd3b53223bea99a35..7924e2ebf6305a4d6d96eff0f97a1272ccc52c62 100644
--- a/CEP/PyBDSM/src/python/readimage.py
+++ b/CEP/PyBDSM/src/python/readimage.py
@@ -194,10 +194,11 @@ class Op_readimage(Op):
             xy = list(xy)
             for i in range(2):
                 xy.append(0)
-            xy_arr = N.array([xy])
             if hasattr(self, 'wcs_pix2world'):
+                xy_arr = N.array([xy[0:2]])
                 sky = self.wcs_pix2world(xy_arr, 0)
             else:
+                xy_arr = N.array([xy])
                 sky = self.wcs_pix2sky(xy_arr, 0)
             return sky.tolist()[0][0:2]
 
@@ -205,10 +206,11 @@ class Op_readimage(Op):
             rd = list(rd)
             for i in range(2):
                 rd.append(0)
-            rd_arr = N.array([rd])
             if hasattr(self, 'wcs_world2pix'):
+                rd_arr = N.array([rd[0:2]])
                 pix = self.wcs_world2pix(rd_arr, 0)
             else:
+                rd_arr = N.array([rd])
                 pix = self.wcs_sky2pix(rd_arr, 0)
             return pix.tolist()[0][0:2]
 
diff --git a/CEP/PyBDSM/src/python/wavelet_atrous.py b/CEP/PyBDSM/src/python/wavelet_atrous.py
index 86510a94518a196201b2891f15fc72e81245c602..f92fdddb82ffc44ea97833b36e2aede96758d79c 100644
--- a/CEP/PyBDSM/src/python/wavelet_atrous.py
+++ b/CEP/PyBDSM/src/python/wavelet_atrous.py
@@ -254,41 +254,8 @@ class Op_wavelet_atrous(Op):
                       mylogger.userinfo(mylog, "Number of valid wavelet Gaussians", str(nwvgaus))
                   else:
                       # Keep all Gaussians and merge islands that overlap
-                      tot_flux = 0.0
-                      wav_rankim_bool = N.array(wimg.pyrank + 1, dtype = bool)
-                      for idx, wvisl in enumerate(wimg.islands):
-                          orig_rankim_bool = N.array(img.pyrank + 1, dtype = bool)
-                          orig_islands = wav_rankim_bool * (img.pyrank + 1) - 1
-                          wav_islands = orig_rankim_bool * (wimg.pyrank + 1) - 1
-                          orig_ids = set(orig_islands.flatten())
-                          orig_ids_arr = N.array(tuple(set(orig_islands.flatten())))
-                          wav_ids =  N.array(tuple(set(wav_islands.flatten())))
-                          if len(wvisl.gaul) > 0:
-
-                              # Get unique island IDs. If an island overlaps with one
-                              # in the original ch0 image, merge them together. If not,
-                              # add the island as a new one.
-                              for wvg in wvisl.gaul:
-                                  tot_flux += wvg.total_flux
-                                  wvg.valid = True
-                              if idx in wav_ids:
-                                  orig_idx = list(set(orig_islands[N.where(wav_islands == idx)]))
-                                  if len(orig_idx) == 1:
-                                      merge_islands(img, img.islands[orig_idx[0]], wvisl)
-                                  else:
-                                      merge_islands(img, img.islands[orig_idx[0]], wvisl)
-                                      for oidx in orig_idx[1:]:
-                                          merge_islands(img, img.islands[orig_idx[0]], img.islands[oidx])
-                                      img.islands = [x for x in img.islands if x.island_id not in orig_idx[1:]]
-                                      renumber_islands(img)
-                              else:
-                                  isl_id = img.islands[-1].island_id + 1
-                                  new_isl = wvisl.copy(img.pixel_beamarea(), image=img.ch0_arr[wvisl.bbox], mean=img.mean_arr[wvisl.bbox], rms=img.rms_arr[wvisl.bbox])
-                                  new_isl.gaul = []
-                                  new_isl.dgaul = []
-                                  new_isl.island_id = isl_id
-                                  img.islands.append(new_isl)
-                                  copy_gaussians(img, new_isl, wvisl)
+                      tot_flux = check_islands_for_overlap(img, wimg)
+
                       # Now renumber the islands and adjust the rank image before going to next wavelet image
                       renumber_islands(img)
 
@@ -648,5 +615,39 @@ def renumber_islands(img):
     img.gaussians = gaussian_list
 
 
-
-
+def check_islands_for_overlap(img, wimg):
+    """Checks for overlaps between img and wimg islands"""
+    tot_flux = 0.0
+    wav_rankim_bool = N.array(wimg.pyrank + 1, dtype = bool)
+    for idx, wvisl in enumerate(wimg.islands):
+        orig_rankim_bool = N.array(img.pyrank + 1, dtype = bool)
+        orig_islands = wav_rankim_bool * (img.pyrank + 1) - 1
+        wav_islands = orig_rankim_bool * (wimg.pyrank + 1) - 1
+        wav_ids =  N.array(tuple(set(wav_islands.flatten())))
+        if len(wvisl.gaul) > 0:
+
+            # Get unique island IDs. If an island overlaps with one
+            # in the original ch0 image, merge them together. If not,
+            # add the island as a new one.
+            for wvg in wvisl.gaul:
+                tot_flux += wvg.total_flux
+                wvg.valid = True
+            if idx in wav_ids:
+                orig_idx = list(set(orig_islands[N.where(wav_islands == idx)]))
+                if len(orig_idx) == 1:
+                    merge_islands(img, img.islands[orig_idx[0]], wvisl)
+                else:
+                    merge_islands(img, img.islands[orig_idx[0]], wvisl)
+                    for oidx in orig_idx[1:]:
+                        merge_islands(img, img.islands[orig_idx[0]], img.islands[oidx])
+                    img.islands = [x for x in img.islands if x.island_id not in orig_idx[1:]]
+                    renumber_islands(img)
+            else:
+                isl_id = img.islands[-1].island_id + 1
+                new_isl = wvisl.copy(img.pixel_beamarea(), image=img.ch0_arr[wvisl.bbox], mean=img.mean_arr[wvisl.bbox], rms=img.rms_arr[wvisl.bbox])
+                new_isl.gaul = []
+                new_isl.dgaul = []
+                new_isl.island_id = isl_id
+                img.islands.append(new_isl)
+                copy_gaussians(img, new_isl, wvisl)
+    return tot_flux