diff --git a/Dockerfile b/Dockerfile
index 0117c242336dce81d21404b3d8a5d67922098ac2..b67d46b7ab091018285938497cbddd5ad121b778 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -252,7 +252,7 @@ RUN wget -q -O /WSRT_Measures.ztar \
     rm /WSRT_Measures.ztar
 
 # Some python stuff
-RUN python3 -m pip install h5py pandas pyyaml astropy matplotlib scipy 
+RUN python3 -m pip install h5py pandas pyyaml astropy matplotlib scipy shapely bdsf 
 #    cd /src && \
 #   git clone https://github.com/lofar-astron/PyBDSF.git && \
 #  cd /src/PyBDSF && \
@@ -262,7 +262,9 @@ RUN python3 -m pip install h5py pandas pyyaml astropy matplotlib scipy
 # AImCal
 ADD imcal.py /opt/imcal.py
 ADD cluster.py /opt/cluster.py
+ADD imcal.py /opt/nvss_cutout.py
 ADD imcal.yml /opt/imcal.yml
+ADD nvss.csv.zip /opt/nvss.csv.zip
 RUN ln -s /opt/imcal.py /usr/local/bin/imcal.py
 
 
diff --git a/cluster.py b/cluster.py
index c3fc276b0cf0d18151f463be543221f1c6a1896c..898171aabfe773c5085d172a9753bad6a002e9c3 100755
--- a/cluster.py
+++ b/cluster.py
@@ -29,6 +29,12 @@ from matplotlib.patches import Circle, Rectangle, Ellipse
 # from sklearn.cluster import KMeans
 from scipy.spatial import Voronoi, cKDTree
 
+from shapely.geometry import Polygon
+import shapely.geometry
+import shapely.ops
+import h5py
+
+
 logging.basicConfig(level=logging.DEBUG)
 
 def ra2deg(ra):
@@ -656,6 +662,258 @@ def voronoi_clustering(fig, ax, df, wcs, resid_data, nbright, nclusters,
     return vor
 
 
+def read_dir_fromh5(h5):
+    """
+    Read in the direction info from a H5 file
+    Parameters
+    ----------
+    h5 : str
+        h5 filename
+
+    Returns
+    ----------
+    sourcedir: np.array
+        Array containing directions (ra, dec in units of radians)
+    """
+
+    H5 = h5py.File(h5, mode="r")
+    sourcedir = H5['sol000/source'][:]["dir"]
+    if len(sourcedir) < 2:
+        print("Error: H5 seems to contain only one direction")
+        sys.exit(1)
+    H5.close()
+    return sourcedir
+
+
+def tessellate(x_pix, y_pix, w, dist_pix, bbox, nouter=64, plot_tessellation=True):
+    """
+    Returns Voronoi tessellation vertices
+
+    Parameters
+    ----------
+    x_pix : array
+        Array of x pixel values for tessellation centers
+    y_pix : array
+        Array of y pixel values for tessellation centers
+    w : WCS object
+        WCS for transformation from pix to world coordinates
+    dist_pix : float
+        Distance in pixels from center to outer boundary of facets
+    nouter : int
+        Number of points to generate on the outer boundary for constraining
+        the Voronoi tessellation. Defaults to 64
+    plot_tessellation : bool
+        Plot tessellation
+
+    Returns
+    -------
+    list, np.2darray
+        List of shapely Polygons, and np.2darray of corresponding (Voronoi) points (ra,dec in degrees)
+    """
+
+    # Get x, y coords for directions in pixels. We use the input calibration sky
+    # model for this, as the patch positions written to the h5parm file by DP3 may
+    # be different
+    xy = []
+    for RAvert, Decvert in zip(x_pix, y_pix):
+        xy.append((RAvert, Decvert))
+
+    # Generate array of outer points used to constrain the facets
+    means = np.ones((nouter, 2)) * np.array(xy).mean(axis=0)
+    offsets = []
+    angles = [np.pi / (nouter / 2.0) * i for i in range(0, nouter)]
+    for ang in angles:
+        offsets.append([np.cos(ang), np.sin(ang)])
+    scale_offsets = dist_pix * np.array(offsets)
+    outer_box = means + scale_offsets
+
+    # Tessellate and clip
+    points_all = np.vstack([xy, outer_box])
+    # print(points_all)
+
+    vor = Voronoi(points_all)
+
+    # Filter out the infinite regions
+    region_indices = [
+        region_idx
+        for region_idx in vor.point_region
+        if -1 not in vor.regions[region_idx]
+    ]
+    polygons = []
+    for idx in region_indices:
+        vertex_coords = vor.vertices[vor.regions[idx]]
+        polygons.append(Polygon(vertex_coords))
+
+    clipped_polygons = []
+    for polygon in polygons:
+        # facet_poly = Polygon(facet)
+        clipped_polygons.append(polygon_intersect(bbox, polygon))
+
+    if plot_tessellation:
+        import matplotlib.pyplot as plt
+
+        [plt.plot(*poly.exterior.xy) for poly in clipped_polygons]
+        plt.plot(points_all[:,0], points_all[:,1], 'or',)
+        plt.xlabel("Right Ascension [pixels]")
+        plt.ylabel("Declination [pixels]")
+        plt.axis("square")
+        plt.tight_layout()
+        plt.show()
+
+    verts = []
+    for poly in clipped_polygons:
+        verts_xy = poly.exterior.xy
+        verts_deg = []
+        for x, y in zip(verts_xy[0], verts_xy[1]):
+            x_y = np.array([[y, x, 0.0, 0.0]])
+            ra_deg, dec_deg = w.wcs_pix2world(x, y, 1)
+            verts_deg.append((ra_deg, dec_deg))
+        verts.append(verts_deg)
+
+    # Reorder to match the initial ordering
+    ind = []
+    for poly in polygons:
+        for j, (xs, ys) in enumerate(zip(x_pix, y_pix)):
+            if poly.contains(shapely.geometry.Point(xs, ys)):
+                ind.append(j)
+                break
+    verts = [verts[i] for i in ind]
+
+    ra_point, dec_point = w.wcs_pix2world(x_pix, y_pix, 1)
+    return [Polygon(vert) for vert in verts], np.vstack((ra_point, dec_point)).T
+
+
+def generate_centroids(
+    xmin, ymin, xmax, ymax, npoints_x, npoints_y, distort_x=0.0, distort_y=0.0
+):
+    """
+    Generate centroids for the Voronoi tessellation. These points are essentially
+    generated from a distorted regular grid.
+
+    Parameters
+    ----------
+    xmin : float
+        Min-x pixel index, typically 0
+    ymin : float
+        Min-y pixel index, typically 0
+    xmax : float
+        Max-x pixel index, typically image width
+    ymax : float
+        Max-y pixel index, typically image height
+    npoints_x : int
+        Number of points to generate in width direction
+    npoints_y : int
+        Number of points to generate in height direction
+    distort_x : float, optional
+        "Cell width" fraction by which to distort the x points, by default 0.0
+    distort_y : float, optional
+        "Cell height" fraction by which to distory the y points, by default 0.0
+
+    Returns
+    -------
+    X,Y : np.1darray
+        Flattened arrays with X,Y coordinates
+    """
+
+    x_int = np.linspace(xmin, xmax, npoints_x)
+    y_int = np.linspace(ymin, ymax, npoints_y)
+
+    np.random.seed(0)
+
+    # Strip the points on the boundary
+    x = x_int[1:-1]
+    y = y_int[1:-1]
+    X, Y = np.meshgrid(x, y)
+
+    xtol = np.diff(x)[0]
+    dX = np.random.uniform(low=-distort_x * xtol, high=distort_x * xtol, size=X.shape)
+    X = X + dX
+
+    ytol = np.diff(y)[0]
+    dY = np.random.uniform(low=-distort_x * ytol, high=distort_y * ytol, size=Y.shape)
+    Y = Y + dY
+    return X.flatten(), Y.flatten()
+
+
+def polygon_intersect(poly1, poly2):
+    """
+    Returns the intersection of polygon2 with polygon1
+    """
+    clip = poly1.intersection(poly2)
+    return clip
+
+
+def write_ds9(fname, h5, image, points=None):
+    """
+    Write ds9 regions file, given a list of polygons
+    and (optionally) a set of points attached to
+
+    Parameters
+    ----------
+    fname : str
+        Filename for output file
+    points : np.2darray, optional
+        Array of point coordinates (ra, dec in degrees) that should be
+        attached to a facet, by default None
+    """
+
+    imheader = fits.getheader(image)
+    w = WCS(imheader).dropaxis(-1).dropaxis(-1)
+
+    # # Image size (in pixels)
+    xmin = 0
+    xmax = imheader['NAXIS1']
+    ymin = 0
+    ymax = imheader['NAXIS2']
+ 
+    # To cut the Voronoi tessellation on the bounding box, we need
+    # a "circumscribing circle"
+    dist_pix = np.sqrt((xmax - xmin) ** 2 + (ymax - ymin) ** 2)
+    
+    
+    # load in the directions from the H5
+    sourcedir = read_dir_fromh5(h5)
+    
+    # make ra and dec arrays and coordinates c
+    ralist = sourcedir[:, 0]
+    declist = sourcedir[:, 1]
+    c = SkyCoord(ra=ralist * u.rad, dec=declist * u.rad)
+    
+    # convert from ra,dec to x,y pixel
+    x, y = w.wcs_world2pix(c.ra.degree, c.dec.degree, 1)    
+
+    bbox = Polygon([(xmin, ymin), (xmax, ymin), (xmax, ymax), (xmin, ymax)])
+    polygons, points = tessellate(x, y, w, dist_pix, bbox, plot_tessellation=False)
+
+    if points is not None:
+        assert (
+            len(polygons) == points.shape[0]
+        ), "Number of polygons and number of points should match"
+
+    # Write header
+    header = [
+        "# Region file format: DS9 version 4.1",
+        'global color=green dashlist=8 3 width=1 font="helvetica 10 normal roman" select=1',
+        "fk5",
+        "\n",
+    ]
+    with open(fname, "w") as f:
+        f.writelines("\n".join(header))
+        polygon_strings = []
+        for i, polygon in enumerate(polygons):
+            poly_string = "polygon("
+            xv, yv = polygon.exterior.xy
+            for (x, y) in zip(xv[:-1], yv[:-1]):
+                poly_string = f"{poly_string}{x:.5f},{y:.5f},"
+            # Strip trailing comma
+            poly_string = poly_string[:-1] + ")"
+            if points is not None:
+                poly_string += f"\npoint({points[i, 0]:.5f}, {points[i, 1]:.5f})"
+            polygon_strings.append(poly_string)
+        f.write("\n".join(polygon_strings))
+
+
+
 def main(img, resid, model, clustering_method='Voronoi', add_manual=False, nclusters=10, boxsize=250,
          nbright=80, cluster_radius=5, cluster_overlap=1.6):
     """
@@ -719,6 +977,7 @@ if __name__ == "__main__":
     img = base + 'image.fits'
     resid = base + 'residual.fits'
     model = base + 'sources.txt'
+    h5 = ''
     # img = sys.argv[1]
     # resid = sys.argv[2]
     # model = sys.argv[3]
diff --git a/cluster_to_reg.py b/cluster_to_reg.py
deleted file mode 100644
index e37fcd2ce79c90cb9cece5d5e1f34c26cab553e3..0000000000000000000000000000000000000000
--- a/cluster_to_reg.py
+++ /dev/null
@@ -1,432 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-
-from __future__ import division, absolute_import, print_function
-
-
-
-
-import numpy as np
-from scipy.spatial import Voronoi
-
-import polygon as Polygon
-
-def test():
-
-    RadiusTot=1.
-    Poly=np.array([[-RadiusTot,-RadiusTot],
-                   [+RadiusTot,-RadiusTot],
-                   [+RadiusTot,+RadiusTot],
-                   [-RadiusTot,+RadiusTot]])*1
-
-    Line=np.array([[0.,0.],
-                   [5.,5.]])
-
-    print((CutLineInside(Poly,Line)))
-
-def GiveABLin(P0,P1):
-    x0,y0=P0
-    x1,y1=P1
-    a=(y1-y0)/(x1-x0)
-    b=y1-a*x1
-    return b,a
-
-def GiveB(P0,P1):
-    x0,y0=P0
-    x1,y1=P1
-
-    B=np.array([x0-x1,y0-y1])
-    B/=np.sqrt(np.sum(B**2))
-    if B[0]<0: B=-B
-    return B
-
-def CutLineInside(Poly,Line):
-    P=Polygon.Polygon(Poly)
-
-    dx=1e-4
-    PLine=np.array(Line.tolist()+Line.tolist()[::-1]).reshape((4,2))
-    #PLine[2,0]+=dx
-    #PLine[3,0]+=2*dx
-    PLine[2:,:]+=np.random.randn(2,2)*1e-6
-    P0=Polygon.Polygon(PLine)
-    PP=np.array(P0&P)[0].tolist()
-    PP.append(PP[0])
-
-    B0=GiveB(Line[0],Line[1])
-    B=[GiveB(PP[i],PP[i+1]) for i in range(len(PP)-1)]
-
-    PLine=[]
-    for iB in range(len(B)):
-        d=np.sum((B[iB]-B0)**2)
-        print((d,PP[iB],PP[iB+1]))
-        if d==0:
-            PLine.append([PP[iB],PP[iB+1]])
-
-
-    LOut=np.array(PLine[0])
-
-    return LOut
-
-
-def voronoi_finite_polygons_2d(vor, radius=None):
-    """
-    Reconstruct infinite voronoi regions in a 2D diagram to finite
-    regions.
-    Parameters
-    ----------
-    vor : Voronoi
-        Input diagram
-    radius : float, optional
-        Distance to 'points at infinity'.
-    Returns
-    -------
-    regions : list of tuples
-        Indices of vertices in each revised Voronoi regions.
-    vertices : list of tuples
-        Coordinates for revised Voronoi vertices. Same as coordinates
-        of input vertices, with 'points at infinity' appended to the
-        end.
-    """
-
-    if vor.points.shape[1] != 2:
-        raise ValueError("Requires 2D input")
-
-    new_regions = []
-    new_vertices = vor.vertices.tolist()
-
-    center = vor.points.mean(axis=0)
-    if radius is None:
-        radius = vor.points.ptp().max()
-
-    # Construct a map containing all ridges for a given point
-    all_ridges = {}
-    for (p1, p2), (v1, v2) in zip(vor.ridge_points, vor.ridge_vertices):
-        all_ridges.setdefault(p1, []).append((p2, v1, v2))
-        all_ridges.setdefault(p2, []).append((p1, v1, v2))
-
-    # Reconstruct infinite regions
-    for p1, region in enumerate(vor.point_region):
-        vertices = vor.regions[region]
-
-        if all(v >= 0 for v in vertices):
-            # finite region
-            new_regions.append(vertices)
-            continue
-
-
-        # reconstruct a non-finite region
-        if p1 not in list(all_ridges.keys()):
-            new_regions.append(vertices)
-            continue
-        ridges = all_ridges[p1]
-        new_region = [v for v in vertices if v >= 0]
-
-        for p2, v1, v2 in ridges:
-            if v2 < 0:
-                v1, v2 = v2, v1
-            if v1 >= 0:
-                # finite ridge: already in the region
-                continue
-
-            # Compute the missing endpoint of an infinite ridge
-
-            t = vor.points[p2] - vor.points[p1] # tangent
-            t /= np.linalg.norm(t)
-            n = np.array([-t[1], t[0]])  # normal
-
-            midpoint = vor.points[[p1, p2]].mean(axis=0)
-            direction = np.sign(np.dot(midpoint - center, n)) * n
-
-            V=vor.vertices[v2]
-            R=np.sqrt(np.sum(V**2))
-
-            # while R>0.1:
-            #     V*=0.9
-            #     R=np.sqrt(np.sum(V**2))
-            #     #print R
-
-            vor.vertices[v2][:]=V[:]
-
-            ThisRad=radius
-            far_point = vor.vertices[v2] + direction * radius
-            R=np.sqrt(np.sum(far_point**2))
-            ThisRad=R
-
-            # while R>1:
-            #     ThisRad*=0.9
-            #     far_point = vor.vertices[v2] + direction * ThisRad
-            #     R=np.sqrt(np.sum(far_point**2))
-            #     print "=============="
-            #     print R,np.sqrt(np.sum(vor.vertices[v2]**2))
-            #     print vor.vertices[v2]
-            #     print direction
-            #     print ThisRad
-            #     #if R>1000:
-            #     #    stop
-
-            # RadiusTot=.3
-            # Poly=np.array([[-RadiusTot,-RadiusTot],
-            #                [+RadiusTot,-RadiusTot],
-            #                [+RadiusTot,+RadiusTot],
-            #                [-RadiusTot,+RadiusTot]])*1
-            # stop
-            # PT.CutLineInside(Poly,Line)
-
-
-            new_region.append(len(new_vertices))
-            new_vertices.append(far_point.tolist())
-
-        # sort region counterclockwise
-        vs = np.asarray([new_vertices[v] for v in new_region])
-        c = vs.mean(axis=0)
-        angles = np.arctan2(vs[:,1] - c[1], vs[:,0] - c[0])
-        new_region = np.array(new_region)[np.argsort(angles)]
-
-        # finish
-        new_regions.append(new_region.tolist())
-
-    regions, vertices=new_regions, np.asarray(new_vertices)
-
-    return regions, vertices
-
-
-HEADER = '\033[95m'
-OKBLUE = '\033[94m'
-OKGREEN = '\033[92m'
-WARNING = '\033[93m'
-FAIL = '\033[91m'
-ENDC = '\033[0m'
-bold='\033[1m'
-nobold='\033[0m'
-Separator="================================%s=================================="
-silent=0
-
-def Str(strin0,col="red",Bold=True):
-    if silent==1: return strin0
-    strin=str(strin0)
-    if col=="red":
-        ss=FAIL
-    if col=="green":
-        ss=OKGREEN
-    elif col=="yellow":
-        ss=WARNING
-    elif col=="blue":
-        ss=OKBLUE
-    elif col=="green":
-        ss=OKGREEN
-    elif col=="white":
-        ss=""
-    ss="%s%s%s"%(ss,strin,ENDC)
-    if Bold: ss="%s%s%s"%(bold,ss,nobold)
-    return ss
-
-def Sep(strin=None,D=1):
-    if D!=1:
-        return Str(Separator%("="*len(strin)))
-    else:
-        return Str(Separator%(strin))
-
-def Title(strin,Big=False):
-    print()
-    print()
-    if Big: print((Sep(strin,D=0)))
-    print((Sep(strin)))
-    if Big: print((Sep(strin,D=0)))
-    print()
-
-def disable():
-    HEADER = ''
-    OKBLUE = ''
-    OKGREEN = ''
-    WARNING = ''
-    FAIL = ''
-    ENDC = ''
-
-
-class ClassCoordConv():
-    def __init__(self,rac,decc):
-
-        rarad=rac
-        decrad=decc
-        self.rarad=rarad
-        self.decrad=decrad
-        cos=np.cos
-        sin=np.sin
-        mrot=np.array([[cos(rarad)*cos(decrad), sin(rarad)*cos(decrad),sin(decrad)],[-sin(rarad),cos(rarad),0.],[-cos(rarad)*sin(decrad),-sin(rarad)*sin(decrad),cos(decrad)]]).T
-        vm=np.array([[0.,0.,1.]]).T
-        vl=np.array([[0.,1., 0.]]).T
-        vn=np.array([[1., 0, 0.]]).T
-        self.vl2=np.dot(mrot,vl)
-        self.vm2=np.dot(mrot,vm)
-        self.vn2=np.dot(mrot,vn)
-        self.R=np.array([[cos(decrad)*cos(rarad),cos(decrad)*sin(rarad),sin(decrad)]]).T
-
-
-    def lm2radec(self,l_list,m_list):
-
-        ra_list=np.zeros(l_list.shape,dtype=np.float)
-        dec_list=np.zeros(l_list.shape,dtype=np.float)
-
-        for i in range(l_list.shape[0]):
-            l=l_list[i]
-            m=m_list[i]
-            if (l_list[i]==0.)&(m_list[i]==0.):
-                ra_list[i]=self.rarad
-                dec_list[i]=self.decrad
-                continue
-            Rp=self.R+self.vl2*l+self.vm2*m-(1.-np.sqrt(1.-l**2-m**2))*self.vn2
-            dec_list[i]=np.arcsin(Rp[2])
-            ra_list[i]=np.arctan(Rp[1]/Rp[0])
-            if Rp[0]<0.: ra_list[i]+=np.pi
-
-        return ra_list,dec_list
-
-    def radec2lm(self,ra,dec):
-        l = np.cos(dec) * np.sin(ra - self.rarad)
-        m = np.sin(dec) * np.cos(self.decrad) - np.cos(dec) * np.sin(self.decrad) * np.cos(ra - self.rarad)
-        return l,m
-
-
-
-
-
-class VoronoiToReg():
-    def __init__(self,rac,decc):
-        self.rac=rac
-        self.decc=decc
-        self.CoordMachine=ClassCoordConv(rac,decc)
-
-    def ToReg(self,regFile,xc,yc,radius=0.1,Col="red"):
-        print("Writing voronoi in: %s"%Str(regFile,col="blue"))
-        f=open(regFile,"w")
-        f.write("# Region file format: DS9 version 4.1\n")
-        ss0='global color=green dashlist=8 3 width=1 font="helvetica 10 normal roman" select=1 highlite=1 dash=0'
-        ss1=' fixed=0 edit=1 move=1 delete=1 include=1 source=1\n'
-
-        f.write(ss0+ss1)
-        f.write("fk5\n")
-
-        CoordMachine=self.CoordMachine
-
-        xy=np.zeros((xc.size,2),np.float32)
-        xy[:,0]=xc
-        xy[:,1]=yc
-        vor = Voronoi(xy)
-        regions, vertices = voronoi_finite_polygons_2d(vor,radius=radius)
-
-
-        for region in regions:
-            polygon0 = vertices[region]
-            P=polygon0.tolist()
-            polygon=np.array(P+[P[0]])
-            for iline in range(polygon.shape[0]-1):
-
-                x0,y0=CoordMachine.lm2radec(np.array([polygon[iline][0]]),np.array([polygon[iline][1]]))
-                x1,y1=CoordMachine.lm2radec(np.array([polygon[iline+1][0]]),np.array([polygon[iline+1][1]]))
-
-                x0*=180./np.pi
-                y0*=180./np.pi
-                x1*=180./np.pi
-                y1*=180./np.pi
-
-
-                f.write("line(%f,%f,%f,%f) # line=0 0 color=%s dash=1\n"%(x0,y0,x1,y1,Col))
-                #f.write("line(%f,%f,%f,%f) # line=0 0 color=red dash=1\n"%(x1,y0,x0,y1))
-
-        f.close()
-
-
-    def VorToReg(self,regFile,vor,radius=0.1,Col="red"):
-        print("Writing voronoi in: %s"%Str(regFile,col="blue"))
-
-        f=open(regFile,"w")
-        f.write("# Region file format: DS9 version 4.1\n")
-        ss0='global color=green dashlist=8 3 width=1 font="helvetica 10 normal roman" select=1 highlite=1 dash=0'
-        ss1=' fixed=0 edit=1 move=1 delete=1 include=1 source=1\n'
-
-        f.write(ss0+ss1)
-        f.write("fk5\n")
-
-        CoordMachine=self.CoordMachine
-
-        regions, vertices = vor.regions,vor.vertices
-
-
-        for region in regions:
-            if len(region)==0: continue
-            polygon0 = vertices[region]
-            P=polygon0.tolist()
-            polygon=np.array(P+[P[0]])
-            for iline in range(polygon.shape[0]-1):
-
-                x0,y0=CoordMachine.lm2radec(np.array([polygon[iline][0]]),np.array([polygon[iline][1]]))
-                x1,y1=CoordMachine.lm2radec(np.array([polygon[iline+1][0]]),np.array([polygon[iline+1][1]]))
-
-                x0*=180./np.pi
-                y0*=180./np.pi
-                x1*=180./np.pi
-                y1*=180./np.pi
-
-                f.write("line(%f,%f,%f,%f) # line=0 0 color=%s dash=1\n"%(x0,y0,x1,y1,Col))
-                #f.write("line(%f,%f,%f,%f) # line=0 0 color=red dash=1\n"%(x1,y0,x0,y1))
-
-        f.close()
-
-    def PolygonToReg(self,regFile,LPolygon,radius=0.1,Col="red",labels=None):
-        print("Writing voronoi in: %s"%Str(regFile,col="blue"))
-
-        f=open(regFile,"w")
-        f.write("# Region file format: DS9 version 4.1\n")
-        ss0='global color=green dashlist=8 3 width=1 font="helvetica 10 normal roman" select=1 highlite=1 dash=0'
-        ss1=' fixed=0 edit=1 move=1 delete=1 include=1 source=1\n'
-
-        f.write(ss0+ss1)
-        f.write("fk5\n")
-
-        CoordMachine=self.CoordMachine
-
-
-        for iFacet,polygon0 in zip(list(range(len(LPolygon))),LPolygon):
-            #polygon0 = vertices[region]
-            P=polygon0.tolist()
-            if len(polygon0)==0: continue
-            polygon=np.array(P+[P[0]])
-            ThisText=""
-            if labels is not None:
-                lmean0=np.mean(polygon[:,0])
-                mmean0=np.mean(polygon[:,1])
-
-                lmean,mmean,ThisText=labels[iFacet]
-                # print "!!!!======"
-                # print lmean0,mmean0
-                # print lmean,mmean
-
-                xm,ym=CoordMachine.lm2radec(np.array([lmean]),np.array([mmean]))
-                xm*=180./np.pi
-                ym*=180./np.pi
-                f.write("point(%f,%f) # text={%s} point=circle 5 color=red width=2\n"%(xm,ym,ThisText))
-
-            for iline in range(polygon.shape[0]-1):
-
-
-                L0,M0=np.array([polygon[iline][0]]),np.array([polygon[iline][1]])
-                x0,y0=CoordMachine.lm2radec(L0,M0)
-                L1,M1=np.array([polygon[iline+1][0]]),np.array([polygon[iline+1][1]])
-                x1,y1=CoordMachine.lm2radec(L1,M1)
-
-                x0*=180./np.pi
-                y0*=180./np.pi
-                x1*=180./np.pi
-                y1*=180./np.pi
-
-                # print "===================="
-                # print "[%3.3i] %f %f %f %f"%(iline,x0,y0,x1,y1)
-                # print "       %s"%str(L0)
-                # print "       %s"%str(L1)
-                # print "       %s"%str(M0)
-                # print "       %s"%str(M1)
-                f.write("line(%f,%f,%f,%f) # line=0 0 color=%s dash=1 \n"%(x0,y0,x1,y1,Col))
-
-                #f.write("line(%f,%f,%f,%f) # line=0 0 color=red dash=1\n"%(x1,y0,x0,y1))
-
-        f.close()
\ No newline at end of file
diff --git a/imcal.py b/imcal.py
index 41ed22ef4808e2c3e8e28b490fe46352214700f6..cc4b5e61fb472c9629c0842633e82d97a8918ed1 100755
--- a/imcal.py
+++ b/imcal.py
@@ -27,12 +27,95 @@ import astropy.units as u
 from astropy.io import fits
 
 from cluster import main as cluster
+from cluster import write_ds9
 
+from nvss_cutout import main as nvss_cutout
+
+from radio_beam import Beam
+from radio_beam import EllipticalGaussian2DKernel
+from scipy.fft import ifft2, ifftshift
 
 _POOL_TIME = 300 # SECONDS
 _MAX_TIME = 1 * 3600 # SECONDS
 _MAX_POOL = _MAX_TIME // _POOL_TIME
 
+def execute_binary(binary, args, simg=None):
+    if simg:
+        command = [f'singularity exec {simg} {binary}'] + args
+    else:
+        command = [f'{binary}'] + args
+    logging.debug('executing %s', ','.join(command))
+    dppp_process = subprocess.Popen(command)
+    for i in range(_MAX_POOL):
+        try:
+            return_code = dppp_process.wait(_POOL_TIME)
+            logging.debug('DPPP process %s finished with status: %s', dppp_process.pid, return_code)
+            return return_code
+        except TimeoutExpired as e:
+            logging.debug('DPPP process %s still running', dppp_process.pid)
+            continue 
+
+
+def fft_psf(bmaj, bmin, bpa, size=3073):
+    SIGMA_TO_FWHM = np.sqrt(8*np.log(2))
+    fmaj = size / (bmin / SIGMA_TO_FWHM) / 2 / np.pi
+    fmin = size / (bmaj / SIGMA_TO_FWHM) / 2 / np.pi
+    fpa = bpa + 90
+    angle = np.deg2rad(90+fpa)
+    fkern = EllipticalGaussian2DKernel(fmaj, fmin, angle, x_size=size, y_size=size)
+    fkern.normalize('peak')
+    fkern = fkern.array
+    return fkern
+
+def reconvolve_gaussian_kernel(img, old_maj, old_min, old_pa, new_maj, new_min, new_pa):
+    """
+    convolve image with a gaussian kernel without FFTing it
+    bmaj, bmin -- in pixels,
+    bpa -- in degrees from top clockwise (like in Beam)
+    inverse -- use True to deconvolve.
+    NOTE: yet works for square image without NaNs
+    """
+    size = len(img)
+    imean = img.mean()
+    img -= imean
+    fimg = np.fft.fft2(img)
+    krel = fft_psf(new_maj, new_min, new_pa, size) / fft_psf(old_maj, old_min, old_pa, size)
+    fconv = fimg * ifftshift(krel)
+    return ifft2(fconv).real + imean
+
+
+def fits_reconvolve_psf(fitsfile, newpsf, out=None):
+    """ Convolve image with deconvolution of (newpsf, oldpsf) """
+    # newparams = newpsf.to_header_keywords()
+    with fits.open(fitsfile) as hdul:
+        hdr = hdul[0].header
+        currentpsf = Beam.from_fits_header(hdr)
+        if currentpsf != newpsf:
+            kmaj1 = (currentpsf.major.to('deg').value/hdr['CDELT2'])
+            kmin1 = (currentpsf.minor.to('deg').value/hdr['CDELT2'])
+            kpa1 = currentpsf.pa.to('deg').value
+            kmaj2 = (newpsf.major.to('deg').value/hdr['CDELT2'])
+            kmin2 = (newpsf.minor.to('deg').value/hdr['CDELT2'])
+            kpa2 = newpsf.pa.to('deg').value
+            norm = newpsf.to_value() / currentpsf.to_value()
+            if len(hdul[0].data.shape) == 4:
+                conv_data = hdul[0].data[0,0,...]
+            elif len(hdul[0].data.shape) == 2:
+                conv_data = hdul[0].data
+            # deconvolve with the old PSF
+            # conv_data = convolve_gaussian_kernel(conv_data, kmaj1, kmin1, kpa1, inverse=True)
+            # convolve to the new PSF
+            conv_data = norm * reconvolve_gaussian_kernel(conv_data, kmaj1, kmin1, kpa1,
+                                                                     kmaj2, kmin2, kpa2)
+
+            if len(hdul[0].data.shape) == 4:
+                hdul[0].data[0,0,...] = conv_data
+            elif len(hdul[0].data.shape) == 2:
+                hdul[0].data = conv_data
+            hdr = newpsf.attach_to_header(hdr)
+        fits.writeto(out, data=hdul[0].data, header=hdr, overwrite=True)
+    return out
+
 
 def modify_filename(fname, string, ext=None):
     """ name.ext --> name<string>.ext """
@@ -45,7 +128,7 @@ def modify_filename(fname, string, ext=None):
 def wsclean(msin, wsclean_bin='wsclean', datacolumn='DATA', outname=None, pixelsize=3, imagesize=3072, mgain=0.8, multifreq=0, autothresh=0.3,
             automask=3, niter=1000000, multiscale=False, save_source_list=True,
             clearfiles=True, clip_model_level=None,
-            fitsmask=None, kwstring=''):
+            fitsmask=None, simg=None, kwstring=''):
     """
     wsclean
     """
@@ -70,6 +153,9 @@ def wsclean(msin, wsclean_bin='wsclean', datacolumn='DATA', outname=None, pixels
     cmd = f'wsclean -name {outname} -data-column {datacolumn} -size {imagesize} {imagesize} -scale {pixelsize}asec -niter {niter} \
             {kwstring} {msin}'
     cmd = " ".join(cmd.split())
+    if simg:
+        cmd = f'singularity exec {simg} ' + cmd
+
     logging.debug("Running command: %s", cmd)
     subprocess.call(cmd, shell=True)
 
@@ -86,30 +172,32 @@ def wsclean(msin, wsclean_bin='wsclean', datacolumn='DATA', outname=None, pixels
     return 0
 
 
-def smoothImage(imgfits) :
+def smoothImage(imgfits, psf=30, out=None) :
     """
     Smoothe an image
     """
-    cmd = f'smoFits.py {imgfits}'
-    logging.debug("Running command: %s", cmd)
-    subprocess.call(cmd, shell=True)
-    return
+    if out is None:
+        out = os.path.basename(imgfits.replace('.fits', '-smooth.fits'))
+    return fits_reconvolve_psf(imgfits, Beam(psf*u.arcsec), out=out)
 
 
-def create_mask(imgfits, residfits, clipval, outname='mask.fits'):
+def create_mask(imgfits, residfits, clipval, outname='mask.fits', simg=None):
     """
     Create mask using Tom's code (e-mail on 1 Jul 2021)
     """
-    cmd = f'makeNoiseMapFitsLow {imgfits} {residfits} noise.fits noiseMap.fits'
-    logging.debug("Running command: %s", cmd)
-    subprocess.call(cmd, shell=True)
-    cmd = f'makeMaskFits noiseMap.fits {outname} {clipval}'
-    logging.debug("Running command: %s", cmd)
-    subprocess.call(cmd, shell=True)
+    cmd1 = f'makeNoiseMapFitsLow {imgfits} {residfits} noise.fits noiseMap.fits'
+    cmd2 = f'makeMaskFits noiseMap.fits {outname} {clipval}'
+    if simg:
+        cmd1 = f'singularity exec {simg} ' + cmd1
+        cmd2 = f'singularity exec {simg} ' + cmd2
+    logging.debug("Running command: %s", cmd1)
+    subprocess.call(cmd1, shell=True)
+    logging.debug("Running command: %s", cmd2)
+    subprocess.call(cmd2, shell=True)
     return outname
 
 
-def makeNoiseImage(imgfits, residfits, low=False) :
+def makeNoiseImage(imgfits, residfits, low=False, simg=None) :
     """
     Create mask using Tom's code (e-mail on 1 Jul 2021)
     """
@@ -117,68 +205,90 @@ def makeNoiseImage(imgfits, residfits, low=False) :
       cmd = f'makeNoiseMapFitsLow {imgfits} {residfits} noiseLow.fits noiseMapLow.fits'
     else :
       cmd = f'makeNoiseMapFits {imgfits} {residfits} noise.fits noiseMap.fits'
+    if simg:
+        cmd = f'singularity exec {simg} ' + cmd
+      
     logging.debug("Running command: %s", cmd)
     subprocess.call(cmd, shell=True)
     return
 
 
 def makeCombMask(ima1='noiseMap.fits', ima2='noiseMapLow.fits',
-		 clip1=5, clip2=7, outname='mask.fits') :
+		 clip1=5, clip2=7, outname='mask.fits', simg=None) :
     """
     Create mask using Tom's code (e-mail on 1 Jul 2021)
     """
     cmd = f'makeCombMaskFits {ima1} {ima2} {outname} {clip1} {clip2}'
+    if simg:
+        cmd = f'singularity exec {simg} ' + cmd
+    
     logging.debug("Running command: %s", cmd)
     subprocess.call(cmd, shell=True)
     return
 
 
-def get_image_max(msin):
+def get_image_ra_dec_min_max(msin, simg=None):
     """
-    Determine maximum image value for msin
+    Determine image center coords, min and max values for msin
     """
     cmd = f'wsclean -niter 0 -size 3072 3072 -scale 3arcsec -use-wgridder {msin}'
+    if simg:
+        cmd = f'singularity exec {simg} ' + cmd
     subprocess.call(cmd, shell=True)
-    return np.nanmax(fits.getdata('wsclean-image.fits'))
+    data = fits.getdata('wsclean-image.fits')
+    header = fits.getheader('wsclean-image.fits')
+    
+    return header['CRVAL1'], header['CRVAL2'], np.nanmin(data), np.nanmax(data)
 
 
-def makesourcedb(modelfile, out=None):
+def makesourcedb(modelfile, out=None, simg=None):
     """ Make sourcedb file from a clustered model """
     out = out or os.path.splitext(modelfile)[0] + '.sourcedb'
     cmd = 'makesourcedb in={} out={}'.format(modelfile, out)
+    if simg:
+        cmd = f'singularity exec {simg} ' + cmd
     logging.debug("Running command: %s", cmd)
     subprocess.call(cmd, shell=True)
     return out
 
 
-def bbs2model(inp, out=None):
+def bbs2model(inp, out=None, simg=None):
     """ Convert model file to AO format """
     out = out or os.path.splitext(inp)[0] + '.ao'
     cmd = 'bbs2model {} {}'.format(inp, out)
+    if simg:
+        cmd = f'singularity exec {simg} ' + cmd
     logging.debug("Running command: %s", cmd)
     subprocess.call(cmd, shell=True)
     return out
 
 
-def render(bkgr, model, out=None):
+def render(bkgr, model, out=None, simg=None):
     out = out or os.path.split(bkgr)[0] + '/restored.fits'
     cmd = 'render -a -r -t {} -o {} {}'.format(bkgr, out, model)
+    if simg:
+        cmd = f'singularity exec {simg} ' + cmd
     logging.debug("Running command: %s", cmd)
     subprocess.call(cmd, shell=True)
     return out
 
 
-def execute_dppp(args):
-    command = ['DPPP'] + args
+        
+def execute_dppp(args, simg=None):
+    if simg:
+        command = ['singularity', 'exec', f'{simg}', 'DP3'] + args
+    else:
+        command = ['DP3'] + args
+
     logging.debug('executing %s', ','.join(command))
     dppp_process = subprocess.Popen(command)
     for i in range(_MAX_POOL):
         try:
             return_code = dppp_process.wait(_POOL_TIME)
-            logging.debug('DPPP process %s finished with status: %s', dppp_process.pid, return_code)
+            logging.debug('DP3 process %s finished with status: %s', dppp_process.pid, return_code)
             return return_code
         except TimeoutExpired as e:
-            logging.debug('DPPP process %s still running', dppp_process.pid)
+            logging.debug('DP3 process %s still running', dppp_process.pid)
             continue
 
 
@@ -190,7 +300,7 @@ def check_return_code(return_code):
         pass
 
 
-def split_ms(msin_path, startchan, nchan=0, msout_path=''):
+def split_ms(msin_path, startchan, nchan=0, msout_path='', simg=None):
     """
     use casacore.tables.msutil.msconcat() to concat the new MS files
     """
@@ -203,14 +313,14 @@ def split_ms(msin_path, startchan, nchan=0, msout_path=''):
                     f'msin.startchan={startchan}',
                     f'msin.nchan={nchan}',
                     f'msout={msout_path}']
-    return_code = execute_dppp(command_args)
+    return_code = execute_dppp(command_args, simg=simg)
     logging.debug('Split of %s returned status code %s', msin_path, return_code)
     check_return_code(return_code)
     return msout_path
 
 
 def dical(msin, srcdb, msout=None, h5out=None, solint=1, startchan=0, split_nchan=0,
-          mode='phaseonly', cal_nchan=0, uvlambdamin=500):
+          mode='phaseonly', cal_nchan=0, uvlambdamin=500, simg=None):
     """ direction independent calibration with DPPP """
     h5out = h5out or modify_filename(msin, f'_dical_dt{solint}_{mode}', ext='.h5')
     msout = msout or modify_filename(msin, f'_dical_dt{solint}_{mode}')
@@ -228,18 +338,19 @@ def dical(msin, srcdb, msout=None, h5out=None, solint=1, startchan=0, split_ncha
     if startchan or split_nchan:
         logging.info('Calibrating MS channels: %d - %d', startchan, split_nchan)
         command_args += [f'msin.startchan={startchan}', f'msin.nchan={split_nchan}']
-    return_code = execute_dppp(command_args)
+    return_code = execute_dppp(command_args, simg=simg)
     logging.debug('DICAL returned status code %s', return_code)
     check_return_code(return_code)
     return msout
 
+
 def ddecal(msin, srcdb, msout=None, h5out=None, solint=120, nfreq=30,
-           startchan=0, nchan=0,  mode='diagonal', uvlambdamin=500, subtract=True):
+           startchan=0, nchan=0,  mode='diagonal', uvlambdamin=500, subtract=True, simg=None):
     """ Perform direction dependent calibration with DPPP """
     h5out = h5out or os.path.split(msin)[0] + '/ddcal.h5'
     msbase = os.path.basename(msin).split('.')[0]
     msout = msout or '{}_{}_{}.MS'.format(msbase,mode, solint)
-    cmd = 'DPPP msin={msin} msout={msout} \
+    cmd = 'DP3 msin={msin} msout={msout} \
           msin.startchan={startchan} \
           msin.nchan={nchan} \
           msout.overwrite=true \
@@ -257,17 +368,22 @@ def ddecal(msin, srcdb, msout=None, h5out=None, solint=120, nfreq=30,
           '.format(msin=msin, msout=msout, startchan=startchan, nchan=nchan, mode=mode,
             srcdb=srcdb, solint=solint, h5out=h5out, subtract=subtract, nfreq=nfreq,
             uvlambdamin=uvlambdamin)
+    if simg:
+        cmd = f'singularity exec {simg} ' + cmd
     cmd = " ".join(cmd.split())
     logging.debug("Running command: %s", cmd)
     subprocess.call(cmd, shell=True)
     return msout, h5out
 
 
-def phase_shift(msin, new_center, msout=None):
+def phase_shift(msin, new_center, msout=None, simg=None):
     """ new_center examples: [12h31m34.5, 52d14m07.34] or [187.5deg, 52.45deg] """
     msout = msout or '.'
-    cmd = "DPPP msin={msin} msout={msout} msout.overwrite=True steps=[phaseshift] \
+    cmd = "DP3 msin={msin} msout={msout} msout.overwrite=True steps=[phaseshift] \
            phaseshift.phasecenter={new_center}".format(**locals())
+    if simg:
+        cmd = f'singularity exec {simg} ' + cmd
+
     cmd = " ".join(cmd.split())
     subprocess.call(cmd, shell=True)
 
@@ -279,7 +395,8 @@ def view_sols(h5param, outname=None):
             grp = f['sol000/{}'.format(key)]
             data = grp['val'][()]
             time = grp['time'][()]
-            ants = ['RT2','RT3','RT4','RT5','RT6','RT7','RT8','RT9','RTA','RTB','RTC','RTD']
+            # ants = ['RT2','RT3','RT4','RT5','RT6','RT7','RT8','RT9','RTA','RTB','RTC','RTD']
+            ants = [_.decode() for _ in grp['ant'][()]]
             fig = plt.figure(figsize=[20, 15])
             fig.suptitle('Freq. averaged {} gain solutions'.format(key.rstrip('000')))
             for i, ant in enumerate(ants):
@@ -325,14 +442,17 @@ def view_sols(h5param, outname=None):
             fig1, ax1 = plot_sols(h5param, 'amplitude000')
             fig1.savefig(f'{outname}_amp.png')
         except:
+            fig1 = ax1 = None
             logging.error('No amplitude solutions found')
 
         try:
             fig2, ax2 = plot_sols(h5param, 'phase000')
             fig2.savefig(f'{outname}_phase.png')
         except:
+            fig2 = ax2 = None
             logging.error('No phase solutions found')
-
+    # return fig1, ax1, fig2, ax2
+    
 
 def remove_model_components_below_level(model, level=0.0, out=None):
     """
@@ -361,7 +481,7 @@ def remove_model_components_below_level(model, level=0.0, out=None):
     return out
 
 
-def main(msin, steps='all', outbase=None, cfgfile='imcal.yml'):
+def main(msin, steps='all', outbase=None, cfgfile='imcal.yml', force=False):
 
     msin = msin.rstrip('/')
     logging.info('Processing {}'.format(msin))
@@ -370,6 +490,12 @@ def main(msin, steps='all', outbase=None, cfgfile='imcal.yml'):
 
     with open(cfgfile) as f:
         cfg = yaml.safe_load(f)
+    simg = cfg['global']['singularity_image_path']
+    
+    if steps == 'all':
+        steps = ['nvss', 'mask', 'dical', 'ddcal']
+    else:
+        steps = steps.split(',')
 
 # define file names:
     mspath = os.path.split(os.path.abspath(msin))[0]
@@ -384,9 +510,11 @@ def main(msin, steps='all', outbase=None, cfgfile='imcal.yml'):
     img1 = outbase + '_1'
     img2 = outbase + '_2'
     img3 = outbase + '_3'
-    img_final = outbase + '-dical'
-    img_ddsub = outbase + '-ddsub'
-    img_ddcal = outbase + '-ddcal'
+    img_dical = outbase + '-dical'
+    img_ddsub_1 = outbase + '-ddsub-1'
+    img_ddsub_2 = outbase + '-ddsub-2'
+    img_ddcal_1 = outbase + '-ddcal'
+    img_ddcal_2 = outbase + '-ddcal'
 
     mask0 = outbase + '-mask0.fits'
     mask1 = outbase + '-mask1.fits'
@@ -412,117 +540,149 @@ def main(msin, steps='all', outbase=None, cfgfile='imcal.yml'):
     h5_dd = outbase + '_ddcal.h5'
 
 
-    if os.path.exists(img_ddcal+'-image.fits'):
+    
+    if not force and os.path.exists(img_ddcal_2+'-image.fits'):
         logging.info('The final image exists. Exiting...')
         return 0
 
-
-    if cfg['nvss']:
-       logging.debug('Using NVSS catalog for initial phase calibration')
+# get image parameters
+    img_ra, img_dec, img_min, img_max = get_image_ra_dec_min_max(dical0, )
+    
+    if 'nvss' in steps and cfg['nvss']:
+       nvss_model = nvss_cutout('wsclean-image.fits', nvsscat='/opt/nvss.csv.zip', clip=cfg['nvsscal']['clip_model'])
        if (not os.path.exists(ms_split)) and (cfg['split']['startchan'] or cfg['split']['nchan']):
            ms_split = split_ms(msin, msout_path=ms_split, **cfg['split'])
 
-       makesourcedb('nvss-model.txt', out=nvssMod)
+       makesourcedb(nvss_model, out=nvssMod, simg=simg)
 
-       dical(ms_split, nvssMod, msout=dical0, h5out=h5_0, **cfg['nvss'])
+       dical(ms_split, nvssMod, msout=dical0, h5out=h5_0, **cfg['nvsscal'])
        view_sols(h5_0, outname=msbase+'_sols_dical0')
-
     else :
        # if (not os.path.exists(ms_split)) and (cfg['split']['startchan'] or cfg['split']['nchan']):
        ms_split = split_ms(msin, msout_path=dical0, **cfg['split'])
 
-    if not os.path.exists(img0 +'-image.fits') and (not os.path.exists(img0 +'-MFS-image.fits')):
-        img_max = get_image_max(dical0)
-        threshold = img_max/cfg['clean0']['max_over_thresh']
-        threshold = max(threshold,0.001)
-        wsclean(dical0, outname=img0, automask=None, save_source_list=False, multifreq=False, mgain=None,
-            kwstring=f'-threshold {threshold}')
-        create_mask(img0 +'-image.fits', img0 +'-residual.fits', clipval=10, outname=mask0)
-
+    if 'mask' in steps:
+        if not force and (os.path.exists(img0 +'-image.fits') or (os.path.exists(img0 +'-MFS-image.fits'))):
+            logging.info('mask step: Image exists, use --f to overwrite...')
+        else:
+            threshold = img_max/cfg['clean0']['max_over_thresh']
+            threshold = max(threshold, 0.001)
+            wsclean(dical0, outname=img0, automask=None, save_source_list=False, multifreq=False, mgain=None,
+                    kwstring=f'-threshold {threshold}')
+            create_mask(img0 +'-image.fits', img0 +'-residual.fits', clipval=10, outname=mask0, )
+    
+    if 'dical' in steps:
 # clean1
-    if not os.path.exists(img1 +'-image.fits') and (not os.path.exists(img1 +'-MFS-image.fits')):
-        wsclean(dical0, fitsmask=mask0, outname=img1, **cfg['clean1']) # fast shallow clean
-
-    if not os.path.exists(model1):
-        makesourcedb(img1+'-sources.txt', out=model1)
+        if not force and (os.path.exists(img1 +'-image.fits') or (os.path.exists(img1 +'-MFS-image.fits'))):
+            logging.info('dical/clean1 step: Image exists, use --f to overwrite...')
+        else:
+            wsclean(dical0, fitsmask=mask0, outname=img1, **cfg['clean1']) # fast shallow clean
+            makesourcedb(img1+'-sources.txt', out=model1)
+    
 # dical1
-    if not os.path.exists(dical1):
-        dical1 = dical(dical0, model1, msout=dical1, h5out=h5_1, **cfg['dical1'])
-        view_sols(h5_1, outname=msbase+'_sols_dical1')
+        if not force and os.path.exists(dical1):
+            logging.debug('dical/dical1 step: MS exists, , use --f to overwrite...')
+        else:
+            dical1 = dical(dical0, model1, msout=dical1, h5out=h5_1, **cfg['dical1'])
+            view_sols(h5_1, outname=msbase+'_sols_dical1')
 # clean2
-    if (not os.path.exists(img2 +'-image.fits')) and (not os.path.exists(img2 +'-MFS-image.fits')):
-        wsclean(dical1, fitsmask=mask0, outname=img2, **cfg['clean2'])
-        smoothImage(img2+'-residual.fits')
-        makeNoiseImage(img2 +'-image.fits', img2 +'-residual.fits')
-        makeNoiseImage(img2 +'-residual-smooth.fits', img2 +'-residual.fits',low=True)
-        makeCombMask(outname=mask1,clip1=7,clip2=15)
-
-    if not os.path.exists(model2):
-        makesourcedb(img2+'-sources.txt', out=model2)
+        if not force and (os.path.exists(img2 +'-image.fits') or (os.path.exists(img2 +'-MFS-image.fits'))):
+            logging.info('dical/cean2 step: Image exists, use --f to overwrite...')
+        else:
+            wsclean(dical1, fitsmask=mask0, outname=img2, **cfg['clean2'])
+            smoothImage(img2+'-residual.fits')
+            makeNoiseImage(img2 +'-image.fits', img2 +'-residual.fits', )
+            makeNoiseImage(img2 +'-residual-smooth.fits', img2 +'-residual.fits', low=True, )
+            makeCombMask(outname=mask1, clip1=7, clip2=15, )
+    
+            makesourcedb(img2+'-sources.txt', out=model2, )
 
 # dical2
-    if not os.path.exists(dical2):
-        dical2 = dical(dical1, model2, msout=dical2, h5out=h5_2, **cfg['dical2'])
-        view_sols(h5_2, outname=msbase+'_sols_dical2')
+        if not force and os.path.exists(dical2):
+            logging.debug('dical/dical2 step: MS exists, , use --f to overwrite...')
+        else:
+            dical2 = dical(dical1, model2, msout=dical2, h5out=h5_2, **cfg['dical2'])
+            view_sols(h5_2, outname=msbase+'_sols_dical2')
 # clean3
-    if (not os.path.exists(img3 +'-image.fits')) and (not os.path.exists(img3 +'-MFS-image.fits')):
-        wsclean(dical2, fitsmask=mask1, outname=img3, **cfg['clean3'])
-        smoothImage(img3+'-residual.fits')
-        makeNoiseImage(img3 +'-image.fits', img3 +'-residual.fits')
-        makeNoiseImage(img3 +'-residual-smooth.fits', img3 +'-residual.fits',low=True)
-        makeCombMask(outname=mask2,clip1=5,clip2=10)
-
-
-    if not os.path.exists(model3):
-        makesourcedb(img3+'-sources.txt', out=model3)
+        if not force and (os.path.exists(img3 +'-image.fits') or (os.path.exists(img3 +'-MFS-image.fits'))):
+            logging.info('dical/cean3 step: Image exists, use --f to overwrite...')
+        else:
+            wsclean(dical2, fitsmask=mask1, outname=img3, **cfg['clean3'])
+            smoothImage(img3+'-residual.fits')
+            makeNoiseImage(img3 +'-image.fits', img3 +'-residual.fits', )
+            makeNoiseImage(img3 +'-residual-smooth.fits', img3 +'-residual.fits', low=True, )
+            makeCombMask(outname=mask2, clip1=5, clip2=10)
+    
+            makesourcedb(img3+'-sources.txt', out=model3)
 
 # dical3
-    if not os.path.exists(dical3):
-        dical3 = dical(dical2, model3, msout=dical3, h5out=h5_3, **cfg['dical3'])
-        view_sols(h5_3, outname=msbase+'_sols_dical3')
+        if not force and os.path.exists(dical3):
+            logging.debug('dical/dical3 step: MS exists, use --f to overwrite...')
+        else:
+            dical3 = dical(dical2, model3, msout=dical3, h5out=h5_3, **cfg['dical3'], )
+            view_sols(h5_3, outname=msbase+'_sols_dical3')
 
 # clean4
-    if (not os.path.exists(img_final +'-image.fits')) and (not os.path.exists(img_final +'-MFS-image.fits')):
-        wsclean(dical3, fitsmask=mask2, outname=img_final, **cfg['clean4'])
-        smoothImage(img_final+'-residual.fits')
-        makeNoiseImage(img_final +'-image.fits', img_final +'-residual.fits')
-        makeNoiseImage(img_final +'-residual-smooth.fits', img_final +'-residual.fits',low=True)
-        makeCombMask(outname=mask3,clip1=5,clip2=7)
-
-
+        if not force and (os.path.exists(img_dical +'-image.fits') or (os.path.exists(img_dical +'-MFS-image.fits'))):
+            logging.info('dical/cean4 step: Image exists, use --f to overwrite...')
+        else:
+            wsclean(dical3, fitsmask=mask2, outname=img_dical,  **cfg['clean4'])
+            smoothImage(img_dical+'-residual.fits')
+            makeNoiseImage(img_dical +'-image.fits', img_dical +'-residual.fits', )
+            makeNoiseImage(img_dical +'-residual-smooth.fits', img_dical +'-residual.fits',low=True, )
+            makeCombMask(outname=mask3, clip1=5, clip2=7, )
+
+    if 'ddcal' in steps:
 # Cluster
-    if (not os.path.exists(img_final +'-clustered.txt')):
-        clustered_model = cluster(img_final+'-image.fits', img_final+'-residual.fits', img_final+'-sources.txt', **cfg['cluster'])
-
+        if not force and os.path.exists(img_dical +'-clustered.txt'):
+            logging.info('ddcal/clustering step: cluster file exists, use --f to overwrite...')
+        else:
+            clustered_model = cluster(img_dical+'-image.fits', img_dical+'-residual.fits', img_dical+'-sources.txt', **cfg['cluster'])
 # Makesourcedb
-        clustered_sdb = makesourcedb(clustered_model, img_final+'-clustered.sourcedb')
+            clustered_sdb = makesourcedb(clustered_model, img_dical+'-clustered.sourcedb', )
 
 # DDE calibration + peeling everything
-    if (not os.path.exists(ddsub)):
-        ddsub, h5out = ddecal(dical3, clustered_sdb, msout=ddsub, h5out=h5_dd, **cfg['ddcal'])
+        if not force and os.path.exists(ddsub):
+            logging.debug('ddcal/ddecal step: MS exists, use --f to overwrite...')
+        else:
+            ddsub, h5out = ddecal(dical3, clustered_sdb, msout=ddsub, h5out=h5_dd, **cfg['ddcal'])
 
 # view the solutions and save figure
         view_sols(h5_dd, outname=msbase+'_sols_ddcal')
 
-    if (not os.path.exists(img_ddsub+'-image.fits')):
-        wsclean(ddsub, fitsmask=mask3,outname=img_ddsub, **cfg['clean5'])
+        if not force and os.path.exists(img_ddsub_1+'-image.fits'):
+            pass
+        else:
+            wsclean(ddsub, fitsmask=mask3,outname=img_ddsub_1, **cfg['clean5'])
 #TAO        wsclean(ddsub,outname=img_ddsub, **cfg['clean5'])
 
-    aomodel = bbs2model(img_final+'-sources.txt', img_final+'-model.ao')
-
-    render(img_ddsub+'-image.fits', aomodel, out=img_ddcal+'-image.fits')
-
-    smoothImage(img_ddsub+'-image.fits')
-    makeNoiseImage(img_ddcal +'-image.fits', img_ddsub +'-residual.fits')
-    makeNoiseImage(img_ddsub +'-image-smooth.fits', img_ddsub +'-residual.fits',low=True)
-    makeCombMask(outname=mask4,clip1=3.5,clip2=5)
-
-    if (not os.path.exists(img_ddsub+'-2-image.fits')):
-        wsclean(ddsub, fitsmask=mask4,outname=img_ddsub+'-2', **cfg['clean5'])
+        aomodel = bbs2model(img_dical+'-sources.txt', img_dical+'-model.ao', )
+    
+        render(img_ddsub_1+'-image.fits', aomodel, out=img_ddcal_1+'-image.fits')
+    
+        smoothImage(img_ddsub_1+'-image.fits')
+        makeNoiseImage(img_ddcal_1 +'-image.fits', img_ddsub_1 +'-residual.fits', )
+        makeNoiseImage(img_ddcal_1 +'-image-smooth.fits', img_ddsub_1 +'-residual.fits',low=True, )
+        makeCombMask(outname=mask4, clip1=3.5, clip2=5, )
+
+        if not force and os.path.exists(img_ddsub_2+'-image.fits'):
+            pass
+        else:
+            wsclean(ddsub, fitsmask=mask4, outname=img_ddsub_2, **cfg['clean5'])
 #TAO        wsclean(ddsub,outname=img_ddsub, **cfg['clean5'])
+    
+        aomodel = bbs2model(img_dical+'-sources.txt', img_dical+'-model.ao', )
+        render(img_ddsub_2+'-image.fits', aomodel, out=img_ddcal_2+'-image.fits', )
+
+# test facet imaging:
+    # ds9_file = 'ddfacets.reg'
+    # ddvis = outbase + '_ddvis.MS'
+    # h5_ddvis = 'ddsols.h5'
+    # clustered_sdb = img_dical+'-clustered.sourcedb'
+    # # ddvis = ddecal(dical3, clustered_sdb, msout=ddvis, subtract=False, h5out=h5_ddvis, **cfg['ddcal'])
+    # # write_ds9(ds9_file, h5_ddvis, img_ddcal+'-image.fits')
+    # wsclean(ddvis, fitsmask=mask3, save_source_list=False, outname='img-facet', **cfg['clean6'],)
 
-    aomodel = bbs2model(img_final+'-sources.txt', img_final+'-model.ao')
-    render(img_ddsub+'-2-image.fits', aomodel, out=img_ddcal+'-2-image.fits')
 
     return 0
 
@@ -538,7 +698,8 @@ if __name__ == "__main__":
     parser.add_argument('-c', '--config', action='store',
                         dest='configfile', help='Config file', type=str)
     parser.add_argument('-o', '--outbase', default=None, help='output prefix', type=str)
-    parser.add_argument('-s', '--steps', default='all', help='steps to run', type=str)
+    parser.add_argument('-s', '--steps', default='all', help='steps to run. Example: "nvss,mask,dical,ddcal"', type=str)
+    parser.add_argument('-f', '--force', action='store_false', help='Overwrite the existing files')
 
     args = parser.parse_args()
     configfile = args.configfile or \
diff --git a/imcal.yml b/imcal.yml
index ff55ab5c7320fd69313e200143f08a4efd4142cc..7ae17e911dbac5fa3e3d5019cdcc7ca6196431e5 100644
--- a/imcal.yml
+++ b/imcal.yml
@@ -1,15 +1,7 @@
-#:===========================================================================
+#===========================================================================
 # Settings for imcal
-#:===========================================================================
-global: # provide executables here
-    singularity_image_path: $HOME/lofar-pipeline.sif # leave empty to not use the singularity image
-    dppp_bin: 'DP3'
-    wsclean_bin: 'wsclean'
-    makesourcedb_bin: 'makesourcedb'
-    bbs2model_bin: 'bbs2model'
-    render_bin: 'render'
-
-    steps: 'all' # or list, e.g. ['clean1',[]], etc...
+#===========================================================================
+# global: # provide executables here
 
 ############################## SPLIT ##########################################
 split:
@@ -20,7 +12,8 @@ split:
 nvss: False
     # calibrate against the NVSS catalog. Generally works well
     # except for cases with and extended Apertif source unresolved by NVSS
-nvssCal:
+nvsscal:
+    clip_model: 0.001 # Clip NVSS model to not to have sources weaker (Jy)
     solint: 60
     mode: 'phaseonly'
     uvlambdamin: 500 # Ignore baselines / channels with UV < uvlambdamin wavele$
@@ -121,6 +114,18 @@ clean5:
     clip_model_level: null
     kwstring: '-use-wgridder -parallel-deconvolution 1400 -parallel-gridding 8 -deconvolution-channels 3 -weight briggs -1.5'
 
+
+# clean6:
+#     imagesize: 3072
+#     pixelsize: 3
+#     multifreq: 6
+#     automask: 3.5
+#     autothresh: 0.5
+#     multiscale: True
+#     clip_model_level: null
+#     kwstring: '-facet-regions ddfacets.reg -apply-facet-solutions ddsols.h5 amplitude000,phase000 -use-wgridder -parallel-deconvolution 1400 -parallel-gridding 8 -deconvolution-channels 3 -weight briggs -1.5'
+
+
 ### END
 
 
diff --git a/nvss.csv.zip b/nvss.csv.zip
new file mode 100644
index 0000000000000000000000000000000000000000..1e62c9798a347440140bc7fa7e7b3eda7cdb121e
Binary files /dev/null and b/nvss.csv.zip differ
diff --git a/nvss_cutout.py b/nvss_cutout.py
new file mode 100644
index 0000000000000000000000000000000000000000..920992d672b6580a20e1b9d80b59780a85e0f140
--- /dev/null
+++ b/nvss_cutout.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+Created on Sat Apr  9 15:10:29 2022
+
+@author: kutkin
+"""
+from astropy.coordinates import Angle
+from astropy.io import fits
+import numpy as np
+import logging
+import os
+import pandas as pd
+
+
+def wsrt_beam(radius) :
+    """ model for old WSRT beam (r in degrees) """
+    return np.cos(np.minimum(70*1.4*np.deg2rad(radius),1.0881))**6
+    
+
+def load_nvss(nvss_catalog_file):
+    """ read NVSS catalog and return dataframe with [ra, dec, flux, err] columns """
+    logging.info('Loading NVSS catalog from: %s', nvss_catalog_file)
+    from zipfile import ZipFile
+    if os.path.exists('nvss.csv'):
+        logging.debug('NVSS catalog is already unzipped. Continuing...')
+    else:
+        nvsszip = ZipFile(nvss_catalog_file)
+        nvsszip.extractall()
+    df = pd.read_csv('nvss.csv')
+    df[['flux', 'err']] /= 1e3 # to Jy
+    return df
+
+
+def main(img, nvsscat='/opt/nvss.csv.zip', cutoff=0.001, outname=None):
+    header = fits.getheader(img)
+    imsizedeg = abs(header['NAXIS1'] / 2 * header['CDELT1'])
+    ra0, dec0 = header['CRVAL1'], header['CRVAL2']
+    if ra0 < 0:
+        ra0 += 360.0
+    def scale_for_beam(df):
+        raFac = np.cos(np.radians(dec0))
+        radius = np.sqrt((df.ra-ra0)**2*raFac**2 + (df.dec-dec0)**2)
+        beamFac = wsrt_beam(radius)
+        res = df.flux*beamFac
+        return res
+    nvss_df = load_nvss(nvsscat)
+    nvss_df = nvss_df.query('abs(ra - @ra0) <= @imsizedeg & abs(dec - @dec0) <= @imsizedeg')
+    nvss_df['flux_scaled'] = nvss_df.apply(scale_for_beam, axis=1)
+    
+    if outname is None:
+        outname = os.path.splitext(img)[0] + '_nvss_model.txt'
+        
+    with open(outname, 'w') as fout:
+        fout.write('Format = Name, Type, Ra, Dec, I, SpectralIndex, LogarithmicSI, \
+                    ReferenceFrequency=\'1364100669.00262\', MajorAxis, MinorAxis, Orientation\n')
+        ns = 0
+        for index, row in nvss_df.iterrows():
+            rah, ram, ras = Angle(row.ra, unit='deg').hms
+            dd, dm, ds = Angle(row.ra, unit='deg').dms            
+            outstr = 's0s{},POINT,{}:{}:{:.3f},{}.{}.{:.3f},{},'.format(ns, rah, ram, ras, dd, dm, ds, row.flux_scaled)
+            outstr=outstr+'[],false,1370228271.48438,,,\n'
+            fout.write(outstr)
+            ns += 1
+    logging.info('Wrote NVSS model to %s', outname)
+    return outname
+
+        
+if __name__ == "__main__":
+    img = 'wsclean-image.fits'
+    nvsscat = 'nvss.csv.zip'
+    cutoff = 0.001
+    main(img, cutoff=0.001, nvsscat=nvsscat, outname=None)
+
+