From ef4fbdaf1f94e379af11bbb50f2dfa3c176ddbfc Mon Sep 17 00:00:00 2001 From: AK <kutkin@gmail.com> Date: Mon, 3 Jul 2023 14:47:40 +0000 Subject: [PATCH] fix matplotlib version --- Dockerfile | 24 ++++++++++++------------ autocorrelations.py | 9 ++++++--- cluster.py | 5 +++-- 3 files changed, 21 insertions(+), 17 deletions(-) mode change 100644 => 100755 autocorrelations.py diff --git a/Dockerfile b/Dockerfile index 795026f..cc5bee0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,7 +4,7 @@ FROM ubuntu:20.04 as builder # It lives on the head of its dependencies. # Install all build-time dependencies -RUN export DEBIAN_FRONTEND=noninteractive && \ +RUN export DEBIAN_FRONTEND=noninteractive && \ apt-get update && \ apt-get install -y \ bison \ @@ -13,7 +13,7 @@ RUN export DEBIAN_FRONTEND=noninteractive && \ casacore-dev \ cmake \ flex \ - gfortran \ + gfortran \ git \ libblas-dev \ libboost-date-time-dev \ @@ -61,7 +61,7 @@ RUN git clone --depth 1 --branch ${DYSCO_VERSION} \ cd dysco/build && \ cmake .. -DPORTABLE=${PORTABLE} && \ make install -j`nproc` - + ARG IDG_VERSION=master # IDG doesn't work with --depth 1, because it needs all branches to # determine its version :-( @@ -137,8 +137,8 @@ RUN git clone --depth 1 --branch ${WSCLEAN_VERSION} \ #ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/karma/amd64_Linux_libc6.3/lib/ #ENV KARMABASE="/usr/local/karma/amd64_Linux_libc6.3" - - + + # Do not use `pip` from the Debian repository, but fetch it from PyPA. # This way, we are sure that the latest versions of `pip`, `setuptools`, and # `wheel` are installed in /usr/local, the only directory we're going to copy @@ -162,7 +162,7 @@ RUN \ make -j4 && \ cp bbs2model cluster editmodel render /usr/local/bin/ && \ cd - + # Tom's stuff ADD makemask /src/makemask RUN cd /src/makemask && \ @@ -173,10 +173,10 @@ RUN cd /src/makemask && \ cp makeMaskFits makeNoiseMapFits makeCombMaskFits makeNoiseMapFitsLow /usr/local/bin/ && \ cd -# gcc getMaxFits.c -o getMaxFits -lcfitsio -lm && \ +# gcc getMaxFits.c -o getMaxFits -lcfitsio -lm && \ # gcc locNoiseMed.c -o locNoiseMed -lcfitsio -lm && \ # gcc cookbook.c -o cookbook -lcfitsio -lm && \ - + #--------------------------------------------------------------------------- # The image will now be rebuilt without adding the sources, in order to # reduce the size of the image. @@ -199,7 +199,7 @@ ARG DP3_VERSION=master ARG WSCLEAN_VERSION=master -# Only install run-time required packages +# Only install run-time required packages RUN export DEBIAN_FRONTEND=noninteractive && \ apt-get update && \ apt-get install -y \ @@ -253,13 +253,13 @@ RUN wget -q -O /WSRT_Measures.ztar \ rm /WSRT_Measures.ztar # Some python stuff -RUN python3 -m pip install h5py pandas pyyaml astropy matplotlib scipy shapely bdsf ipython radio_beam scikit-learn +RUN python3 -m pip install h5py pandas pyyaml astropy matplotlib==3.5.2 scipy shapely bdsf ipython radio_beam scikit-learn # cd /src && \ # git clone https://github.com/lofar-astron/PyBDSF.git && \ # cd /src/PyBDSF && \ # python3 -m pip install . && \ -# cd - +# cd + # AImCal ADD imcal.py /opt/imcal.py ADD cluster.py /opt/cluster.py diff --git a/autocorrelations.py b/autocorrelations.py old mode 100644 new mode 100755 index d85452d..4940001 --- a/autocorrelations.py +++ b/autocorrelations.py @@ -27,9 +27,10 @@ def get_autocorr(tab, ant, avg='time', flagged=False): data[q.getcol('FLAG')] = np.nan if avg.lower() == 'time': return abs(np.nanmean(data, axis=0)) - elif avg.lower().startswith('freq'): + elif avg.lower().startswith('freq') and len(data.shape)==3: + print(data.shape) return abs(np.nanmean(data, axis=1)) - elif avg.lower().startswith('pol'): + elif avg.lower().startswith('pol') and len(data.shape)==3: return abs(np.nanmean(data, axis=2)) else: logging.error('Unknown average keywodr, must be time/freq/pol...') @@ -99,8 +100,10 @@ def plot_autocorrs(ms, flagged=False): antname = antnames[i] avg_time = get_autocorr(tab, ant, avg='time', flagged=flagged) # TIME_AVG avg_freq = get_autocorr(tab, ant, avg='freq', flagged=flagged) # FREQ_AVG + if not isinstance(avg_time, np.ndarray) or not isinstance(avg_freq, np.ndarray): + continue med_time_avgs.append(np.nanmedian(avg_time, axis=0)) - med_freq_avgs.append(np.nanmedian(avg_freq, axis=0)) + med_freq_avgs.append(np.nanmedian(avg_freq, axis=1)) for fig, res in zip ([fig1, fig2], [avg_time, avg_freq]): ax = fig.add_subplot(ny, nx, i+1) at = AnchoredText(antname, prop=dict(size=9), frameon=True, loc='upper left') diff --git a/cluster.py b/cluster.py index 9da0885..e175628 100755 --- a/cluster.py +++ b/cluster.py @@ -932,7 +932,7 @@ def main(img, resid, model, clustering_method='Voronoi', add_manual=False, nclus # racen = f[0].header['CRVAL1'] # deccen = f[0].header['CRVAL2'] fig = plt.figure(figsize=[12,12]) - ax = fig.add_subplot(1,1,1, projection=wcs.celestial) + ax = fig.add_subplot(1,1,1, projection=wcs.celestial[0]) vmin, vmax = np.percentile(image_data, 5), np.percentile(image_data, 95) ax.imshow(resid_data, vmin=vmin, vmax=vmax, origin='lower')#cmap='gray', vmin=2e-5, vmax=0.1)#, norm=LogNorm()) @@ -960,4 +960,5 @@ def main(img, resid, model, clustering_method='Voronoi', add_manual=False, nclus ### if __name__ == "__main__": if __name__ == "__main__": - main(img, resid, model, clustering_method='Voronoi', nclusters=6) + pass + # main(img, resid, model, clustering_method='Voronoi', nclusters=6) -- GitLab