diff --git a/.gitattributes b/.gitattributes index 86ab2eb848770c958c9423a04c70a2dc488899a7..6b3e29c040766e8420c5d60620fa8218f96305fe 100644 --- a/.gitattributes +++ b/.gitattributes @@ -295,6 +295,8 @@ CEP/Calibration/pystationresponse/test/tStationBeamNCP.in.MS/table.info -text CEP/Calibration/pystationresponse/test/tStationBeamNCP.in.MS/table.lock -text CEP/Calibration/pystationresponse/test/tStationBeamNCP.py -text CEP/Calibration/pystationresponse/test/tStationBeamNCP.sh -text +CEP/Calibration/pystationresponse/test/tpystationresponse.py -text +CEP/Calibration/pystationresponse/test/tpystationresponse.sh -text CEP/DP3/DPPP/etc/CMakeLists.txt -text CEP/DP3/DPPP/etc/DPPP.log_prop -text CEP/DP3/DPPP/include/DPPP/Apply.h -text @@ -312,6 +314,7 @@ CEP/DP3/DPPP/include/DPPP/EstimateMixed.h -text CEP/DP3/DPPP/include/DPPP/EstimateNew.h -text CEP/DP3/DPPP/include/DPPP/GainCal.h -text CEP/DP3/DPPP/include/DPPP/GaussianSource.h -text +CEP/DP3/DPPP/include/DPPP/GridInterpolate.h -text CEP/DP3/DPPP/include/DPPP/H5Parm.h -text CEP/DP3/DPPP/include/DPPP/H5ParmPredict.h -text CEP/DP3/DPPP/include/DPPP/ModelComponent.h -text @@ -327,6 +330,7 @@ CEP/DP3/DPPP/include/DPPP/ScaleData.h -text CEP/DP3/DPPP/include/DPPP/Simulate.h -text CEP/DP3/DPPP/include/DPPP/Simulator.h -text CEP/DP3/DPPP/include/DPPP/SourceDBUtil.h -text +CEP/DP3/DPPP/include/DPPP/Split.h -text CEP/DP3/DPPP/include/DPPP/StManParsetKeys.h -text CEP/DP3/DPPP/include/DPPP/StefCal.h -text CEP/DP3/DPPP/include/DPPP/Stokes.h -text @@ -347,6 +351,7 @@ CEP/DP3/DPPP/src/EstimateMixed.cc -text CEP/DP3/DPPP/src/EstimateNew.cc -text CEP/DP3/DPPP/src/GainCal.cc -text CEP/DP3/DPPP/src/GaussianSource.cc -text +CEP/DP3/DPPP/src/GridInterpolate.cc -text CEP/DP3/DPPP/src/H5Parm.cc -text CEP/DP3/DPPP/src/H5ParmPredict.cc -text CEP/DP3/DPPP/src/ModelComponent.cc -text @@ -363,6 +368,7 @@ CEP/DP3/DPPP/src/Simulate.cc -text CEP/DP3/DPPP/src/Simulator.cc -text CEP/DP3/DPPP/src/SolTab.cc -text CEP/DP3/DPPP/src/SourceDBUtil.cc -text +CEP/DP3/DPPP/src/Split.cc -text CEP/DP3/DPPP/src/StefCal.cc -text CEP/DP3/DPPP/src/Stokes.cc -text CEP/DP3/DPPP/src/SubtractMixed.cc -text @@ -381,6 +387,9 @@ CEP/DP3/DPPP/test/tApplyCal.run -text CEP/DP3/DPPP/test/tApplyCal.sh -text CEP/DP3/DPPP/test/tApplyCal2.run -text CEP/DP3/DPPP/test/tApplyCal2.sh -text +CEP/DP3/DPPP/test/tApplyCalH5.cc -text +CEP/DP3/DPPP/test/tApplyCalH5.run -text +CEP/DP3/DPPP/test/tApplyCalH5.sh -text CEP/DP3/DPPP/test/tApplyCal_parmdbscript -text CEP/DP3/DPPP/test/tDemix.in_MS.tgz -text CEP/DP3/DPPP/test/tDemix.run -text @@ -392,6 +401,7 @@ CEP/DP3/DPPP/test/tGainCal.tab.tgz -text CEP/DP3/DPPP/test/tGainCalH5Parm.run -text CEP/DP3/DPPP/test/tGainCalH5Parm.sh -text CEP/DP3/DPPP/test/tGainCal_ref -text +CEP/DP3/DPPP/test/tGridInterpolate.cc -text CEP/DP3/DPPP/test/tH5Parm.cc -text CEP/DP3/DPPP/test/tH5Parm.sh -text CEP/DP3/DPPP/test/tMultiApplyCal.run -text @@ -425,6 +435,7 @@ CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/CMakeLists.txt -text CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/Constraint.h -text CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/DDECal.h -text CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/KLFitter.h -text +CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/KernelSmoother.h -text CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/Matrix2x2.h -text CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/MultiDirSolver.h -text CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/PieceWisePhaseFitter.h -text @@ -434,6 +445,7 @@ CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/Register.h -text CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/RotationAndDiagonalConstraint.h -text CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/RotationConstraint.h -text CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/ScreenConstraint.h -text +CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/SmoothnessConstraint.h -text CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/Stopwatch.h -text CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/TECConstraint.h -text CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/screenfitter.h -text @@ -447,6 +459,7 @@ CEP/DP3/DPPP_DDECal/src/Register.cc -text CEP/DP3/DPPP_DDECal/src/RotationAndDiagonalConstraint.cc -text CEP/DP3/DPPP_DDECal/src/RotationConstraint.cc -text CEP/DP3/DPPP_DDECal/src/ScreenConstraint.cc -text +CEP/DP3/DPPP_DDECal/src/SmoothnessConstraint.cc -text CEP/DP3/DPPP_DDECal/src/Stopwatch.cc -text CEP/DP3/DPPP_DDECal/src/TECConstraint.cc -text CEP/DP3/DPPP_DDECal/src/screenfitter.cc -text @@ -458,6 +471,20 @@ CEP/DP3/DPPP_DDECal/test/tDDECal.sh -text CEP/DP3/DPPP_DDECal/test/tDDECal_ref -text CEP/DP3/DPPP_DDECal/test/tRotationConstraint.cc -text CEP/DP3/DPPP_DDECal/test/tRotationConstraint.sh -text +CEP/DP3/DPPP_Interpolate/CMake/CheckCXXSymbolExists.cmake -text +CEP/DP3/DPPP_Interpolate/CMake/FindCFITSIO.cmake -text +CEP/DP3/DPPP_Interpolate/CMake/FindCasacore.cmake -text +CEP/DP3/DPPP_Interpolate/CMakeLists-standalone.txt -text +CEP/DP3/DPPP_Interpolate/CMakeLists.txt -text +CEP/DP3/DPPP_Interpolate/include/DPPP_Interpolate/CMakeLists.txt -text +CEP/DP3/DPPP_Interpolate/include/DPPP_Interpolate/Interpolate.h -text +CEP/DP3/DPPP_Interpolate/include/DPPP_Interpolate/buffered_lane.h -text +CEP/DP3/DPPP_Interpolate/include/DPPP_Interpolate/lane.h -text +CEP/DP3/DPPP_Interpolate/src/CMakeLists.txt -text +CEP/DP3/DPPP_Interpolate/src/Interpolate.cc -text +CEP/DP3/DPPP_Interpolate/test/CMakeLists.txt -text +CEP/DP3/DPPP_Interpolate/test/tInterpolateStep.cc -text +CEP/DP3/DPPP_Interpolate/test/tInterpolateStep.sh -text CEP/DP3/PythonDPPP/CMakeLists.txt -text CEP/DP3/PythonDPPP/include/PythonDPPP/CMakeLists.txt -text CEP/DP3/PythonDPPP/include/PythonDPPP/DPStepBase.h -text @@ -1025,6 +1052,7 @@ CEP/Pipeline/framework/lofarpipe/support/deprecated/clusterhandler.py eol=lf CEP/Pipeline/framework/lofarpipe/support/deprecated/clusterlogger.py eol=lf CEP/Pipeline/framework/lofarpipe/support/deprecated/ipython.py eol=lf CEP/Pipeline/framework/lofarpipe/support/deprecated/lofarrecipe.py eol=lf +CEP/Pipeline/framework/lofarpipe/support/feedback_version.py -text CEP/Pipeline/framework/lofarpipe/support/group_data.py eol=lf CEP/Pipeline/framework/lofarpipe/support/jobserver.py eol=lf CEP/Pipeline/framework/lofarpipe/support/lofarexceptions.py eol=lf @@ -1086,6 +1114,9 @@ CEP/Pipeline/recipes/sip/helpers/WritableParmDB.py -text CEP/Pipeline/recipes/sip/helpers/__init__.py eol=lf CEP/Pipeline/recipes/sip/helpers/data_quality.py eol=lf CEP/Pipeline/recipes/sip/helpers/metadata.py eol=lf +CEP/Pipeline/recipes/sip/helpers/test/CMakeLists.txt -text +CEP/Pipeline/recipes/sip/helpers/test/t_metadata.py -text +CEP/Pipeline/recipes/sip/helpers/test/t_metadata.sh -text CEP/Pipeline/recipes/sip/master/__init__.py eol=lf CEP/Pipeline/recipes/sip/master/copier.py -text CEP/Pipeline/recipes/sip/master/deprecated/bbs.py eol=lf @@ -1222,6 +1253,8 @@ CEP/Pipeline/test/test_framework/__init__.py eol=lf CEP/Pipeline/test/test_framework/fixture/__init__.py eol=lf CEP/Pipeline/test/test_framework/fixture/gsmutils.py eol=lf CEP/Pipeline/test/test_framework/fixture/lofar/__init__.py eol=lf +CEP/Pipeline/test/test_framework/fixture/lofar/common/__init__.py -text +CEP/Pipeline/test/test_framework/fixture/lofar/common/defaultmailaddresses.py -text CEP/Pipeline/test/test_framework/fixture/lofar/gsm/__init__.py -text CEP/Pipeline/test/test_framework/fixture/lofar/gsm/gsmutils.py -text CEP/Pipeline/test/test_framework/fixture/lofar/mstools.py -text @@ -1646,16 +1679,20 @@ LCS/Messaging/test/tTimeOut.cc -text LCS/PyCommon/CMakeLists.txt -text LCS/PyCommon/__init__.py -text LCS/PyCommon/cache.py -text +LCS/PyCommon/cep4_utils.py -text LCS/PyCommon/datetimeutils.py -text LCS/PyCommon/defaultmailaddresses.py -text LCS/PyCommon/factory.py -text LCS/PyCommon/flask_utils.py -text LCS/PyCommon/math.py -text LCS/PyCommon/postgres.py -text -LCS/PyCommon/subprocess.py -text +LCS/PyCommon/subprocess_utils.py -text LCS/PyCommon/test/python-coverage.sh eol=lf LCS/PyCommon/test/t_cache.py -text LCS/PyCommon/test/t_cache.sh -text +LCS/PyCommon/test/t_cep4_utils.py -text +LCS/PyCommon/test/t_cep4_utils.run -text +LCS/PyCommon/test/t_cep4_utils.sh -text LCS/PyCommon/test/t_dbcredentials.run eol=lf LCS/PyCommon/test/t_dbcredentials.sh eol=lf LCS/PyCommon/test/t_defaultmailaddresses.py -text @@ -1728,6 +1765,7 @@ LCS/WinCCWrapper/src/__init__.py -text LCS/WinCCWrapper/test/CMakeLists.txt -text LCS/WinCCWrapper/test/WinCCGet.cc -text LCS/WinCCWrapper/test/WinCCSet.cc -text +LCS/WinCCWrapper/test/mock.py -text LCS/doc/package.dox -text LCU/Firmware/images/ap3b_v6_2.hex -text LCU/Firmware/images/ap3b_v7_13.bit -text @@ -1866,6 +1904,7 @@ LCU/StationTest/rmfiles.sh eol=lf LCU/StationTest/rsp_version.sh eol=lf LCU/StationTest/rsp_xc_160.sh eol=lf LCU/StationTest/rsp_xc_200.sh eol=lf +LCU/StationTest/rspctlprobe.py -text LCU/StationTest/serdes.sh eol=lf LCU/StationTest/station_production.py eol=lf LCU/StationTest/stationtest.py eol=lf @@ -1963,6 +2002,7 @@ LCU/StationTest/xc_160_verify.sh eol=lf LCU/StationTest/xc_200_setup.sh eol=lf LCU/StationTest/xc_200_verify.sh eol=lf LCU/checkhardware/check_hardware.py -text +LCU/checkhardware/checkhardware_lib/CMakeLists.txt -text LCU/checkhardware/checkhardware_lib/__init__.py -text LCU/checkhardware/checkhardware_lib/data.py -text LCU/checkhardware/checkhardware_lib/db.py -text @@ -1974,6 +2014,7 @@ LCU/checkhardware/checkhardware_lib/lofar.py -text LCU/checkhardware/checkhardware_lib/reporting.py -text LCU/checkhardware/checkhardware_lib/rsp.py -text LCU/checkhardware/checkhardware_lib/settings.py -text +LCU/checkhardware/checkhardware_lib/spectrum_checks/CMakeLists.txt -text LCU/checkhardware/checkhardware_lib/spectrum_checks/__init__.py -text LCU/checkhardware/checkhardware_lib/spectrum_checks/cable_reflection.py -text LCU/checkhardware/checkhardware_lib/spectrum_checks/down.py -text @@ -2043,7 +2084,14 @@ LCU/checkhardware/do_station_test.sh -text svneol=unset#application/x-shellscrip LCU/checkhardware/rtsm.py -text LCU/checkhardware/show_bad_spectra.py -text LCU/checkhardware/show_test_result.py -text +LCU/checkhardware/test/CMakeLists.txt -text +LCU/checkhardware/test/t_check_hardware.py -text +LCU/checkhardware/test/t_check_hardware.run -text +LCU/checkhardware/test/t_check_hardware.sh -text +LCU/checkhardware/test/test-check_hardware.conf -text LCU/checkhardware/update_pvss.py -text +LTA/LTACommon/CMakeLists.txt -text +LTA/LTACommon/LTA-SIP.xsd -text LTA/LTAIngest/LTAIngestClient/bin/CMakeLists.txt -text LTA/LTAIngest/LTAIngestClient/bin/ingestaddjobstoqueue -text LTA/LTAIngest/LTAIngestClient/bin/ingestmonitor -text @@ -2054,10 +2102,14 @@ LTA/LTAIngest/LTAIngestClient/lib/ingestbuslistener.py -text LTA/LTAIngest/LTAIngestClient/lib/rpc.py -text LTA/LTAIngest/LTAIngestCommon/CMakeLists.txt -text LTA/LTAIngest/LTAIngestCommon/config.py -text +LTA/LTAIngest/LTAIngestCommon/srm.py -text LTA/LTAIngest/LTAIngestCommon/test/CMakeLists.txt -text LTA/LTAIngest/LTAIngestCommon/test/t_job.py -text LTA/LTAIngest/LTAIngestCommon/test/t_job.run -text LTA/LTAIngest/LTAIngestCommon/test/t_job.sh -text +LTA/LTAIngest/LTAIngestCommon/test/t_srm.py -text +LTA/LTAIngest/LTAIngestCommon/test/t_srm.run -text +LTA/LTAIngest/LTAIngestCommon/test/t_srm.sh -text LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/bin/CMakeLists.txt -text LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/bin/ingestjobmanagementserver -text LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/bin/ingestjobmanagementserver.ini -text @@ -2078,8 +2130,6 @@ LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/bin/ingesttransferserver.i LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/bin/ltacp -text LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/bin/md5a32bc/CMakeLists.txt -text LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/bin/md5a32bc/md5a32bc.c -text -LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/etc/CMakeLists.txt -text -LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/etc/LTA-SIP.xsd -text LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/lib/CMakeLists.txt -text LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/lib/ingesttransferserver.py -text LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/lib/ltacp.py -text @@ -2091,6 +2141,8 @@ LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/t_ingestpipeline.sh - LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/t_ltacp.py -text LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/t_ltacp.run -text LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/t_ltacp.sh -text +LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/t_sip.py -text +LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/t_sip.sh -text LTA/LTAIngest/LTAIngestServer/LTAIngestWebServer/CMakeLists.txt -text LTA/LTAIngest/LTAIngestServer/LTAIngestWebServer/bin/CMakeLists.txt -text LTA/LTAIngest/LTAIngestServer/LTAIngestWebServer/bin/ingestwebserver -text @@ -2104,22 +2156,37 @@ LTA/LTAIngest/test/CMakeLists.txt -text LTA/doc/package.dox -text LTA/ltastorageoverview/CMakeLists.txt -text LTA/ltastorageoverview/bin/CMakeLists.txt -text +LTA/ltastorageoverview/bin/ltastorageoverviewreport -text LTA/ltastorageoverview/bin/ltastorageoverviewscraper -text +LTA/ltastorageoverview/bin/ltastorageoverviewscraper.ini -text +LTA/ltastorageoverview/bin/ltastorageoverviewwebservice -text +LTA/ltastorageoverview/bin/ltastorageoverviewwebservice.ini -text LTA/ltastorageoverview/doc/lta_storage_overview.md -text LTA/ltastorageoverview/lib/CMakeLists.txt -text LTA/ltastorageoverview/lib/__init__.py -text -LTA/ltastorageoverview/lib/create_db_ltastorageoverview.sql -text +LTA/ltastorageoverview/lib/ingesteventhandler.py -text +LTA/ltastorageoverview/lib/ltaso/create_db_ltastorageoverview.sql -text LTA/ltastorageoverview/lib/report.py -text LTA/ltastorageoverview/lib/scraper.py -text LTA/ltastorageoverview/lib/store.py -text LTA/ltastorageoverview/lib/webservice/__init__.py -text LTA/ltastorageoverview/lib/webservice/templates/index.html -text LTA/ltastorageoverview/lib/webservice/webservice.py -text -LTA/ltastorageoverview/ltastorageoverview_build.sh -text LTA/ltastorageoverview/test/CMakeLists.txt -text +LTA/ltastorageoverview/test/common_test_ltastoragedb.py -text +LTA/ltastorageoverview/test/db_performance_test.py -text +LTA/ltastorageoverview/test/integration_test_store.py -text +LTA/ltastorageoverview/test/integration_test_store.run -text +LTA/ltastorageoverview/test/integration_test_store.sh -text +LTA/ltastorageoverview/test/test_ingesteventhandler.py -text +LTA/ltastorageoverview/test/test_ingesteventhandler.run -text +LTA/ltastorageoverview/test/test_ingesteventhandler.sh -text LTA/ltastorageoverview/test/test_lso_webservice.py -text LTA/ltastorageoverview/test/test_lso_webservice.run -text LTA/ltastorageoverview/test/test_lso_webservice.sh -text +LTA/ltastorageoverview/test/test_scraper.py -text +LTA/ltastorageoverview/test/test_scraper.run -text +LTA/ltastorageoverview/test/test_scraper.sh -text LTA/ltastorageoverview/test/test_store.py -text LTA/ltastorageoverview/test/test_store.run -text LTA/ltastorageoverview/test/test_store.sh -text @@ -2129,7 +2196,6 @@ LTA/sip/bin/feedback2sip -text LTA/sip/bin/validatesip -text LTA/sip/bin/visualizesip -text LTA/sip/lib/CMakeLists.txt -text -LTA/sip/lib/LTA-SIP.xsd -text LTA/sip/lib/__init__.py -text LTA/sip/lib/constants.py -text LTA/sip/lib/constants_generator.py -text @@ -2269,6 +2335,7 @@ MAC/APL/PAC/ICAL_Protocol/src/SpectralWindow.cc -text MAC/APL/PAC/ICAL_Protocol/src/SubArray.cc -text MAC/APL/PAC/ITRFBeamServer/src/StatCal.cc -text MAC/APL/PAC/ITRFBeamServer/src/StatCal.h -text +MAC/APL/PAC/ITRFBeamServer/src/beamctl.log_prop -text MAC/APL/PAC/ITRFBeamServer/src/iBeamServer.conf.in -text MAC/APL/PAC/ITRFBeamServer/src/ibeamctl.conf.in -text MAC/APL/PAC/ITRFBeamServer/test/AntennaField.conf -text @@ -2368,6 +2435,7 @@ MAC/APL/PIC/RSP_Driver/src/SetSwapxyCmd.cc -text MAC/APL/PIC/RSP_Driver/src/SetSwapxyCmd.h -text MAC/APL/PIC/RSP_Driver/src/UpdBitModeCmd.cc -text MAC/APL/PIC/RSP_Driver/src/UpdBitModeCmd.h -text +MAC/APL/PIC/RSP_Driver/src/rspctl.log_prop -text MAC/APL/PIC/RSP_Driver/test/tCableAttenuation.in_1 -text MAC/APL/PIC/RSP_Driver/test/tCableAttenuation.in_2 -text MAC/APL/PIC/RSP_Driver/test/tCableAttenuation.in_3 -text @@ -2401,6 +2469,7 @@ MAC/APL/PIC/TBB_Driver/src/StopTimedCmd.h -text MAC/APL/PIC/TBB_Driver/src/UdpCmd.cc -text MAC/APL/PIC/TBB_Driver/src/UdpCmd.h -text MAC/APL/PIC/TBB_Driver/src/UdpIpTools.cc -text +MAC/APL/PIC/TBB_Driver/src/tbbctl.log_prop -text MAC/APL/PIC/TBB_Driver/test/StubRawEvent.cc -text MAC/APL/PIC/TBB_Driver/test/StubRawEvent.h -text MAC/APL/PIC/doc/package.dox -text @@ -3263,6 +3332,7 @@ MAC/Navigator2/data/PVSS[!!-~]performance[!!-~]results.xls -text MAC/Navigator2/data/pow.mib -text MAC/Navigator2/dplist/maincu_system.dpl -text MAC/Navigator2/dplist/station_system.dpl -text +MAC/Navigator2/panels/Alerts/alarmsWinCCOA_Filtering.pnl -text MAC/Navigator2/panels/Alerts/lofar_alarms.pnl -text MAC/Navigator2/panels/FRENKM/AssignAlarmClass.pnl -text MAC/Navigator2/panels/FRENKM/AssingSumAlarm.pnl -text @@ -3379,8 +3449,10 @@ MAC/Navigator2/panels/TrendPanel.pnl -text MAC/Navigator2/panels/emptyPanel.pnl -text MAC/Navigator2/panels/main.pnl -text MAC/Navigator2/panels/navigator.pnl -text +MAC/Navigator2/panels/navigator_iltswitch.pnl -text MAC/Navigator2/panels/nopanel.pnl -text MAC/Navigator2/panels/objects/Alerts/AESRow.pnl -text +MAC/Navigator2/panels/objects/Alerts/Station_Alerts.pnl -text MAC/Navigator2/panels/objects/Alerts/alarmsWinCCOA.pnl -text MAC/Navigator2/panels/objects/Alerts/alarmsWinCCOA_3.10.pnl -text MAC/Navigator2/panels/objects/FRENKM_STATION.pnl -text @@ -3522,8 +3594,10 @@ MAC/Navigator2/panels/objects/show_legenda.pnl -text MAC/Navigator2/panels/objects/swlevel.pnl -text MAC/Navigator2/panels/objects/systemMainLine.pnl -text MAC/Navigator2/panels/vision/WebClient_start.pnl -text +MAC/Navigator2/panels/vision/aes/AESComments.pnl -text MAC/Navigator2/panels/vision/aes/AES_properties.pnl -text MAC/Navigator2/panels/vision/aes/AEScreen.pnl -text +MAC/Navigator2/panels/vision/aes/AS_detail_DP.pnl -text MAC/Navigator2/panels/vision/aes/saved_AESRow.pnl -text MAC/Navigator2/pictures/16_empty.gif -text svneol=unset#image/gif MAC/Navigator2/pictures/16_hand_right.gif -text svneol=unset#image/gif @@ -3870,6 +3944,37 @@ MAC/_System/lofar29.sysconf -text svneol=native#application/octet-stream MAC/_System/lofar30.journal -text svneol=native#application/octet-stream MAC/_System/lofar30.sysconf -text svneol=native#application/octet-stream MAC/doc/package.dox -text +QA/CMakeLists.txt -text +QA/QA_Common/CMakeLists.txt -text +QA/QA_Common/bin/CMakeLists.txt -text +QA/QA_Common/bin/create_test_hypercube -text +QA/QA_Common/bin/find_hdf5 -text +QA/QA_Common/bin/show_hdf5_info -text +QA/QA_Common/lib/CMakeLists.txt -text +QA/QA_Common/lib/__init__.py -text +QA/QA_Common/lib/geoconversions.py -text +QA/QA_Common/lib/hdf5_io.py -text +QA/QA_Common/lib/utils.py -text +QA/QA_Common/test/CMakeLists.txt -text +QA/QA_Common/test/create_test_hypercube -text +QA/QA_Common/test/t_hdf5_io.py -text +QA/QA_Common/test/t_hdf5_io.run -text +QA/QA_Common/test/t_hdf5_io.sh -text +QA/QA_Common/test/test_utils.py -text +QA/QA_Service/CMakeLists.txt -text +QA/QA_Service/bin/CMakeLists.txt -text +QA/QA_Service/bin/qa_service -text +QA/QA_Service/bin/qa_service.ini -text +QA/QA_Service/bin/qa_webservice.ini -text +QA/QA_Service/lib/CMakeLists.txt -text +QA/QA_Service/lib/QABusListener.py -text +QA/QA_Service/lib/__init__.py -text +QA/QA_Service/lib/config.py -text +QA/QA_Service/lib/qa_service.py -text +QA/QA_Service/test/CMakeLists.txt -text +QA/QA_Service/test/t_qa_service.py -text +QA/QA_Service/test/t_qa_service.run -text +QA/QA_Service/test/t_qa_service.sh -text RTCP/Cobalt/BrokenAntennaInfo/CMakeLists.txt -text RTCP/Cobalt/BrokenAntennaInfo/test/CMakeLists.txt -text RTCP/Cobalt/BrokenAntennaInfo/test/debugbeaminfo.py -text @@ -4045,6 +4150,7 @@ RTCP/Cobalt/GPUProc/test/Kernels/tKernelFunctions.sh eol=lf RTCP/Cobalt/GPUProc/test/Kernels/tKernelPerformance.py eol=lf RTCP/Cobalt/GPUProc/test/Kernels/tKernelPerformance.run eol=lf RTCP/Cobalt/GPUProc/test/Kernels/tKernelPerformance.sh eol=lf +RTCP/Cobalt/GPUProc/test/Kernels/tZeroingKernel.sh eol=lf RTCP/Cobalt/GPUProc/test/Kernels/visualizeBeamformer.py eol=lf RTCP/Cobalt/GPUProc/test/Pipelines/tCorrelatorPipelineProcessObs.sh eol=lf RTCP/Cobalt/GPUProc/test/Storage/tStorageProcesses.queue -text @@ -4128,6 +4234,7 @@ RTCP/Cobalt/OutputProc/test/tTBB_Writer-transient.in_1/rw_20110719_110541_1110.d RTCP/Cobalt/OutputProc/test/tTBB_Writer-transient.in_1/tTBB_Writer-transient-refout.raw -text RTCP/Cobalt/OutputProc/test/tTBB_Writer-transient.run eol=lf RTCP/Cobalt/OutputProc/test/tTBB_Writer-transient.sh eol=lf +RTCP/Cobalt/Tools/plot_cobalt_flagging.py -text RTCP/Cobalt/clAmdFft/appmlEnv.sh -text RTCP/Cobalt/clAmdFft/bin32/clAmdFft.Client -text RTCP/Cobalt/clAmdFft/bin32/clAmdFft.Client-1.8.291 -text @@ -4454,6 +4561,7 @@ SAS/OTDB/test/tQueryPIC.cc -text SAS/OTDB/test/t_getTreeGroup.py -text SAS/OTDB/test/t_getTreeGroup.run -text SAS/OTDB/test/t_getTreeGroup.sh -text +SAS/OTDB/test/test_db_consistency.cc -text SAS/OTDB/test/unittest_db.dump.gz -text svneol=unset#application/x-gzip SAS/OTDB_Services/CMakeLists.txt -text SAS/OTDB_Services/OTDB.ini -text @@ -4564,9 +4672,15 @@ SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/lib/propagator.py -te SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/lib/rotspservice.py -text SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/lib/translator.py -text SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/CMakeLists.txt -text +SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_propagator.py -text +SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_propagator.run -text +SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_propagator.sh -text SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_rotspservice.py -text SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_rotspservice.run -text SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_rotspservice.sh -text +SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_translator.py -text +SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_translator.run -text +SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_translator.sh -text SAS/ResourceAssignment/ResourceAssigner/CMakeLists.txt -text SAS/ResourceAssignment/ResourceAssigner/bin/CMakeLists.txt -text SAS/ResourceAssignment/ResourceAssigner/bin/resourceassigner -text @@ -4582,6 +4696,7 @@ SAS/ResourceAssignment/ResourceAssigner/lib/resource_availability_checker.py -te SAS/ResourceAssignment/ResourceAssigner/lib/schedulechecker.py -text SAS/ResourceAssignment/ResourceAssigner/lib/schedulers.py -text SAS/ResourceAssignment/ResourceAssigner/test/CMakeLists.txt -text +SAS/ResourceAssignment/ResourceAssigner/test/radb_common_testing.py -text SAS/ResourceAssignment/ResourceAssigner/test/t_resource_availability_checker.py -text SAS/ResourceAssignment/ResourceAssigner/test/t_resource_availability_checker.run -text SAS/ResourceAssignment/ResourceAssigner/test/t_resource_availability_checker.sh -text @@ -4600,12 +4715,10 @@ SAS/ResourceAssignment/ResourceAssignmentDatabase/config.py -text SAS/ResourceAssignment/ResourceAssignmentDatabase/doc/ResourceAssignmentDatabase.md -text SAS/ResourceAssignment/ResourceAssignmentDatabase/doc/package.dox -text SAS/ResourceAssignment/ResourceAssignmentDatabase/radb.py -text -SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/CMakeLists.txt -text SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/README -text SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/add_functions_and_triggers.sql -text SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/add_notifications.sql -text SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/add_resource_allocation_statics.sql -text -SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/add_triggers.sql -text SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/add_virtual_instrument.sql -text SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/create_add_notifications.sql.py -text SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/create_add_virtual_instrument.sql.py -text @@ -4616,6 +4729,7 @@ SAS/ResourceAssignment/ResourceAssignmentDatabase/radbpglistener -text SAS/ResourceAssignment/ResourceAssignmentDatabase/radbpglistener.ini -text SAS/ResourceAssignment/ResourceAssignmentDatabase/radbpglistener.py -text SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/CMakeLists.txt -text +SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_common_testing.py -text SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_performance_test.py -text SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/t_radb.py -text SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/t_radb.run -text @@ -4833,6 +4947,7 @@ SAS/ResourceAssignment/ResourceAssignmentEstimator/test/CMakeLists.txt -text SAS/ResourceAssignment/ResourceAssignmentEstimator/test/__init__.py -text SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.in_beam_observation -text SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.in_calibration_pipeline -text +SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.in_calibration_pipeline_dysco -text SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.in_calibration_pipeline_predecessor_558022 -text SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.in_imaging_pipeline -text SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.in_interferometer_observation -text @@ -4848,6 +4963,7 @@ SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_est SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.in_pulsar_pipeline -text SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.out_beam_observation -text SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.out_calibration_pipeline -text +SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.out_calibration_pipeline_dysco -text SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.out_imaging_pipeline -text SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.out_interferometer_observation -text SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.out_long_baseline_observation -text @@ -5086,6 +5202,7 @@ SAS/TriggerServices/bin/triggercancellationservice.ini -text SAS/TriggerServices/bin/triggerrestinterface -text SAS/TriggerServices/bin/triggerservice -text SAS/TriggerServices/bin/triggerservice.ini -text +SAS/TriggerServices/config/dbcredentials_trigger_restinterface.ini -text SAS/TriggerServices/django_rest/CMakeLists.txt -text SAS/TriggerServices/django_rest/db.sqlite3 -text SAS/TriggerServices/django_rest/manage.py -text @@ -5161,6 +5278,7 @@ SAS/XML_generator/test/test_regression.in_data/txt/old_Long_Baseline_test.txt -t SAS/XML_generator/test/test_regression.in_data/txt/old_input.txt -text SAS/XML_generator/test/test_regression.in_data/txt/old_pulsar_pipe_test.txt -text SAS/XML_generator/test/test_regression.in_data/txt/test_LB.txt -text +SAS/XML_generator/test/test_regression.in_data/txt/test_dysco.txt -text SAS/XML_generator/test/test_regression.in_data/txt/test_input.txt -text SAS/XML_generator/test/test_regression.in_data/txt/test_input_cep4.txt -text SAS/XML_generator/test/test_regression.in_data/txt/test_input_commensal_obs_DRAGNET.txt -text @@ -5181,6 +5299,7 @@ SAS/XML_generator/test/test_regression.in_data/xml/input.xml -text SAS/XML_generator/test/test_regression.in_data/xml/lc4_019_1.xml -text SAS/XML_generator/test/test_regression.in_data/xml/pulsar_pipe_test.xml -text SAS/XML_generator/test/test_regression.in_data/xml/test_LB.xml -text +SAS/XML_generator/test/test_regression.in_data/xml/test_dysco.xml -text SAS/XML_generator/test/test_regression.in_data/xml/test_input.xml -text SAS/XML_generator/test/test_regression.in_data/xml/test_input_cep4.xml -text SAS/XML_generator/test/test_regression.in_data/xml/test_input_long_baseline_pipeline.xml -text diff --git a/.subversion/config b/.subversion/config index 87c5fdaaf1975f147bef669481e08043f74731c6..d585732d67c39b35756133f9a3cfc8a6f9ae47c6 100644 --- a/.subversion/config +++ b/.subversion/config @@ -11,9 +11,10 @@ ### Valid password stores: ### gnome-keyring (Unix-like systems) ### kwallet (Unix-like systems) +### gpg-agent (Unix-like systems) ### keychain (Mac OS X) ### windows-cryptoapi (Windows) -password-stores = gnome-keyring,kwallet,keychain,windows-cryptoapi +password-stores = gnome-keyring,kwallet,gpg-agent,keychain,windows-cryptoapi ### To disable all password stores, use an empty list: # password-stores = ### @@ -25,6 +26,13 @@ password-stores = gnome-keyring,kwallet,keychain,windows-cryptoapi ### using KWallet. It defaults to 'no'. # kwallet-svn-application-name-with-pid = yes ### +### Set ssl-client-cert-file-prompt to 'yes' to cause the client +### to prompt for a path to a client cert file when the server +### requests a client cert but no client cert file is found in the +### expected place (see the 'ssl-client-cert-file' option in the +### 'servers' configuration file). Defaults to 'no'. +# ssl-client-cert-file-prompt = no +### ### The rest of the [auth] section in this file has been deprecated. ### Both 'store-passwords' and 'store-auth-creds' can now be ### specified in the 'servers' file in your config directory @@ -86,14 +94,14 @@ password-stores = gnome-keyring,kwallet,keychain,windows-cryptoapi ### path separator. A single backslash will be treated as an ### escape for the following character. -### Section for configuring miscelleneous Subversion options. +### Section for configuring miscellaneous Subversion options. [miscellany] ### Set global-ignores to a set of whitespace-delimited globs ### which Subversion will ignore in its 'status' output, and ### while importing or adding files and directories. ### '*' matches leading dots, e.g. '*.rej' matches '.foo.rej'. -# global-ignores = *.o *.lo *.la *.al .libs *.so *.so.[0-9]* *.a *.pyc *.pyo -# *.rej *~ #*# .#* .*.swp .DS_Store +# global-ignores = *.o *.lo *.la *.al .libs *.so *.so.[0-9]* *.a *.pyc *.pyo __pycache__ +# *.rej *~ #*# .#* .*.swp .DS_Store [Tt]humbs.db ### Set log-encoding to the default encoding for log messages # log-encoding = latin1 ### Set use-commit-times to make checkout/update/switch/revert @@ -123,6 +131,16 @@ enable-auto-props = yes ### ra_local (the file:// scheme). The value represents the number ### of MB used by the cache. # memory-cache-size = 16 +### Set diff-ignore-content-type to 'yes' to cause 'svn diff' to +### attempt to show differences of all modified files regardless +### of their MIME content type. By default, Subversion will only +### attempt to show differences for files believed to have human- +### readable (non-binary) content. This option is especially +### useful when Subversion is configured (via the 'diff-cmd' +### option) to employ an external differencing tool which is able +### to show meaningful differences for binary file formats. [New +### in 1.9] +# diff-ignore-content-type = no ### Section for configuring automatic properties. [auto-props] @@ -208,3 +226,24 @@ README = svn:eol-style=native Doxyfile = svn:eol-style=native Makefile = svn:eol-style=native;svn:keywords=Author Date Id Revision makefile = svn:eol-style=native;svn:keywords=Author Date Id Revision + +### Section for configuring working copies. +[working-copy] +### Set to a list of the names of specific clients that should use +### exclusive SQLite locking of working copies. This increases the +### performance of the client but prevents concurrent access by +### other clients. Third-party clients may also support this +### option. +### Possible values: +### svn (the command line client) +# exclusive-locking-clients = +### Set to true to enable exclusive SQLite locking of working +### copies by all clients using the 1.8 APIs. Enabling this may +### cause some clients to fail to work properly. This does not have +### to be set for exclusive-locking-clients to work. +# exclusive-locking = false +### Set the SQLite busy timeout in milliseconds: the maximum time +### the client waits to get access to the SQLite database before +### returning an error. The default is 10000, i.e. 10 seconds. +### Longer values may be useful when exclusive locking is enabled. +# busy-timeout = 10000 diff --git a/CEP/Calibration/StationResponse/include/StationResponse/ITRFDirection.h b/CEP/Calibration/StationResponse/include/StationResponse/ITRFDirection.h index 1247be6c5e239f2282efd999cd305d8b0cedc6ba..280a6111a99443ddbe180c8eb45bdd5a72906d45 100644 --- a/CEP/Calibration/StationResponse/include/StationResponse/ITRFDirection.h +++ b/CEP/Calibration/StationResponse/include/StationResponse/ITRFDirection.h @@ -29,9 +29,9 @@ #include <StationResponse/Types.h> #include <Common/lofar_smartptr.h> -#include <measures/Measures/MeasFrame.h> -#include <measures/Measures/MeasConvert.h> -#include <measures/Measures/MCDirection.h> +#include <casacore/measures/Measures/MeasFrame.h> +#include <casacore/measures/Measures/MeasConvert.h> +#include <casacore/measures/Measures/MCDirection.h> namespace LOFAR { @@ -53,8 +53,8 @@ public: vector3r_t at(real_t time) const; private: - mutable casa::MeasFrame itsFrame; - mutable casa::MDirection::Convert itsConverter; + mutable casacore::MeasFrame itsFrame; + mutable casacore::MDirection::Convert itsConverter; }; // @} diff --git a/CEP/Calibration/StationResponse/include/StationResponse/LofarMetaDataUtil.h b/CEP/Calibration/StationResponse/include/StationResponse/LofarMetaDataUtil.h index a091e71dfcee646585b6fef3db01369648739593..57327b0ed0eb3c0bfbdd7857ef71b4335822a401 100644 --- a/CEP/Calibration/StationResponse/include/StationResponse/LofarMetaDataUtil.h +++ b/CEP/Calibration/StationResponse/include/StationResponse/LofarMetaDataUtil.h @@ -29,8 +29,9 @@ // LOFAR observations stored in MS format. #include <StationResponse/Station.h> -#include <ms/MeasurementSets/MeasurementSet.h> -#include <ms/MeasurementSets/MSAntennaColumns.h> +#include <casacore/ms/MeasurementSets/MeasurementSet.h> +#include <casacore/ms/MeasurementSets/MSAntennaColumns.h> +#include <casacore/measures/Measures/MDirection.h> namespace LOFAR { @@ -40,18 +41,22 @@ namespace StationResponse // \addtogroup StationResponse // @{ -Station::Ptr readStation(const casa::MeasurementSet &ms, unsigned int id); +Station::Ptr readStation(const casacore::MeasurementSet &ms, unsigned int id); template <typename T> -void readStations(const casa::MeasurementSet &ms, T out_it) +void readStations(const casacore::MeasurementSet &ms, T out_it) { - casa::ROMSAntennaColumns antenna(ms.antenna()); + casacore::ROMSAntennaColumns antenna(ms.antenna()); for(unsigned int i = 0; i < antenna.nrow(); ++i) { *out_it++ = readStation(ms, i); } } +// Read the tile beam direction from a LOFAR MS. If it is not defined, +// this function returns the delay center. +casacore::MDirection readTileBeamDirection(const casacore::MeasurementSet &ms); + // @} } //# namespace StationResponse diff --git a/CEP/Calibration/StationResponse/src/AntennaField.cc b/CEP/Calibration/StationResponse/src/AntennaField.cc index 864749d83740a67af94bfab99f9c022aa1b40df0..d3ca5879353d523a70109c631d1046055f08b713 100644 --- a/CEP/Calibration/StationResponse/src/AntennaField.cc +++ b/CEP/Calibration/StationResponse/src/AntennaField.cc @@ -26,7 +26,7 @@ #include <StationResponse/Constants.h> #include <StationResponse/MathUtil.h> #include <ElementResponse/ElementResponse.h> -#include <measures/Measures/MeasFrame.h> +#include <casacore/measures/Measures/MeasFrame.h> namespace LOFAR { diff --git a/CEP/Calibration/StationResponse/src/ITRFDirection.cc b/CEP/Calibration/StationResponse/src/ITRFDirection.cc index 7d2e774f56aef51e8c80fecdfdf7301c7a64eed0..f7f9c49e02551bfac1b6afbdeb0c14acbeaa449d 100644 --- a/CEP/Calibration/StationResponse/src/ITRFDirection.cc +++ b/CEP/Calibration/StationResponse/src/ITRFDirection.cc @@ -23,9 +23,9 @@ #include <lofar_config.h> #include <StationResponse/ITRFDirection.h> -#include <measures/Measures/MPosition.h> -#include <measures/Measures/MDirection.h> -#include <measures/Measures/MEpoch.h> +#include <casacore/measures/Measures/MPosition.h> +#include <casacore/measures/Measures/MDirection.h> +#include <casacore/measures/Measures/MEpoch.h> namespace LOFAR { @@ -35,39 +35,39 @@ namespace StationResponse ITRFDirection::ITRFDirection(const vector3r_t &position, const vector2r_t &direction) { - casa::MVPosition mvPosition(position[0], position[1], position[2]); - casa::MPosition mPosition(mvPosition, casa::MPosition::ITRF); - itsFrame = casa::MeasFrame(casa::MEpoch(), mPosition); + casacore::MVPosition mvPosition(position[0], position[1], position[2]); + casacore::MPosition mPosition(mvPosition, casacore::MPosition::ITRF); + itsFrame = casacore::MeasFrame(casacore::MEpoch(), mPosition); // Order of angles seems to be longitude (along the equator), lattitude // (towards the pole). - casa::MVDirection mvDirection(direction[0], direction[1]); - casa::MDirection mDirection(mvDirection, casa::MDirection::J2000); - itsConverter = casa::MDirection::Convert(mDirection, - casa::MDirection::Ref(casa::MDirection::ITRF, itsFrame)); + casacore::MVDirection mvDirection(direction[0], direction[1]); + casacore::MDirection mDirection(mvDirection, casacore::MDirection::J2000); + itsConverter = casacore::MDirection::Convert(mDirection, + casacore::MDirection::Ref(casacore::MDirection::ITRF, itsFrame)); } ITRFDirection::ITRFDirection(const vector3r_t &position, const vector3r_t &direction) { - casa::MVPosition mvPosition(position[0], position[1], position[2]); - casa::MPosition mPosition(mvPosition, casa::MPosition::ITRF); - itsFrame = casa::MeasFrame(casa::MEpoch(), mPosition); + casacore::MVPosition mvPosition(position[0], position[1], position[2]); + casacore::MPosition mPosition(mvPosition, casacore::MPosition::ITRF); + itsFrame = casacore::MeasFrame(casacore::MEpoch(), mPosition); - casa::MVDirection mvDirection(direction[0], direction[1], direction[2]); - casa::MDirection mDirection(mvDirection, casa::MDirection::J2000); - itsConverter = casa::MDirection::Convert(mDirection, - casa::MDirection::Ref(casa::MDirection::ITRF, itsFrame)); + casacore::MVDirection mvDirection(direction[0], direction[1], direction[2]); + casacore::MDirection mDirection(mvDirection, casacore::MDirection::J2000); + itsConverter = casacore::MDirection::Convert(mDirection, + casacore::MDirection::Ref(casacore::MDirection::ITRF, itsFrame)); } vector3r_t ITRFDirection::at(real_t time) const { // Cannot use MeasFrame::resetEpoch(Double), because that assumes the // argument is UTC in (fractional) days (MJD). - itsFrame.resetEpoch(casa::Quantity(time, "s")); + itsFrame.resetEpoch(casacore::Quantity(time, "s")); - const casa::MDirection &mITRF = itsConverter(); - const casa::MVDirection &mvITRF = mITRF.getValue(); + const casacore::MDirection &mITRF = itsConverter(); + const casacore::MVDirection &mvITRF = mITRF.getValue(); vector3r_t itrf = {{mvITRF(0), mvITRF(1), mvITRF(2)}}; return itrf; diff --git a/CEP/Calibration/StationResponse/src/LofarMetaDataUtil.cc b/CEP/Calibration/StationResponse/src/LofarMetaDataUtil.cc index c01e5cf0c2361126af16f88b3f77305cfdd45055..4d96af8dfe4f3f0c1b7b945d625e66bc5d977282 100644 --- a/CEP/Calibration/StationResponse/src/LofarMetaDataUtil.cc +++ b/CEP/Calibration/StationResponse/src/LofarMetaDataUtil.cc @@ -29,40 +29,37 @@ #include <StationResponse/TileAntenna.h> #include <StationResponse/DualDipoleAntenna.h> -#include <measures/Measures/MDirection.h> -#include <measures/Measures/MPosition.h> -#include <measures/Measures/MCDirection.h> -#include <measures/Measures/MCPosition.h> -#include <measures/Measures/MeasTable.h> -#include <measures/Measures/MeasConvert.h> -#include <measures/TableMeasures/ScalarMeasColumn.h> - -#include <ms/MeasurementSets/MSAntenna.h> -#if defined(casacore) -#include <ms/MSSel/MSSelection.h> -#include <ms/MSSel/MSAntennaParse.h> -#else -#include <ms/MeasurementSets/MSSelection.h> -#include <ms/MeasurementSets/MSAntennaParse.h> -#endif -#include <ms/MeasurementSets/MSAntennaColumns.h> -#include <ms/MeasurementSets/MSDataDescription.h> -#include <ms/MeasurementSets/MSDataDescColumns.h> -#include <ms/MeasurementSets/MSField.h> -#include <ms/MeasurementSets/MSFieldColumns.h> -#include <ms/MeasurementSets/MSObservation.h> -#include <ms/MeasurementSets/MSObsColumns.h> -#include <ms/MeasurementSets/MSPolarization.h> -#include <ms/MeasurementSets/MSPolColumns.h> -#include <ms/MeasurementSets/MSSpectralWindow.h> -#include <ms/MeasurementSets/MSSpWindowColumns.h> +#include <casacore/measures/Measures/MDirection.h> +#include <casacore/measures/Measures/MPosition.h> +#include <casacore/measures/Measures/MCDirection.h> +#include <casacore/measures/Measures/MCPosition.h> +#include <casacore/measures/Measures/MeasTable.h> +#include <casacore/measures/Measures/MeasConvert.h> +#include <casacore/measures/TableMeasures/ScalarMeasColumn.h> + +#include <stdexcept> + +#include <casacore/ms/MeasurementSets/MSAntenna.h> +#include <casacore/ms/MSSel/MSSelection.h> +#include <casacore/ms/MSSel/MSAntennaParse.h> +#include <casacore/ms/MeasurementSets/MSAntennaColumns.h> +#include <casacore/ms/MeasurementSets/MSDataDescription.h> +#include <casacore/ms/MeasurementSets/MSDataDescColumns.h> +#include <casacore/ms/MeasurementSets/MSField.h> +#include <casacore/ms/MeasurementSets/MSFieldColumns.h> +#include <casacore/ms/MeasurementSets/MSObservation.h> +#include <casacore/ms/MeasurementSets/MSObsColumns.h> +#include <casacore/ms/MeasurementSets/MSPolarization.h> +#include <casacore/ms/MeasurementSets/MSPolColumns.h> +#include <casacore/ms/MeasurementSets/MSSpectralWindow.h> +#include <casacore/ms/MeasurementSets/MSSpWindowColumns.h> namespace LOFAR { namespace StationResponse { -using namespace casa; +using namespace casacore; bool hasColumn(const Table &table, const string &column) { @@ -109,6 +106,32 @@ void transformToFieldCoordinates(TileAntenna::TileConfig &config, } } +AntennaField::CoordinateSystem readCoordinateSystemAartfaac( + const Table &table, unsigned int id) +{ + ROArrayQuantColumn<Double> c_position(table, "POSITION", "m"); + + // Read antenna field center (ITRF). + Vector<Quantity> aips_position = c_position(id); + assert(aips_position.size() == 3); + + vector3r_t position = {{aips_position(0).getValue(), + aips_position(1).getValue(), aips_position(2).getValue()}}; + + TableRecord keywordset = table.keywordSet(); + Matrix<double> aips_axes; + keywordset.get("AARTFAAC_COORDINATE_AXES", aips_axes); + assert(aips_axes.shape().isEqual(IPosition(2, 3, 3))); + + vector3r_t p = {{aips_axes(0, 0), aips_axes(1, 0), aips_axes(2, 0)}}; + vector3r_t q = {{aips_axes(0, 1), aips_axes(1, 1), aips_axes(2, 1)}}; + vector3r_t r = {{aips_axes(0, 2), aips_axes(1, 2), aips_axes(2, 2)}}; + + AntennaField::CoordinateSystem system = {position, {p, q, r}}; + + return system; +} + AntennaField::CoordinateSystem readCoordinateSystem(const Table &table, unsigned int id) { @@ -175,7 +198,7 @@ AntennaField::Ptr readAntennaField(const Table &table, unsigned int id) DualDipoleAntenna::Ptr model(new DualDipoleAntenna()); field = AntennaField::Ptr(new AntennaFieldLBA(name, system, model)); } - else + else // HBA, HBA0, HBA1 { TileAntenna::TileConfig config = readTileConfig(table, id); transformToFieldCoordinates(config, system.axes); @@ -188,6 +211,36 @@ AntennaField::Ptr readAntennaField(const Table &table, unsigned int id) return field; } +AntennaField::Ptr readAntennaFieldAartfaac(const Table &table, const string &ant_type, + unsigned int id) +{ + AntennaField::Ptr field; + AntennaField::CoordinateSystem system = readCoordinateSystemAartfaac(table, id); + + if (ant_type == "LBA") + { + DualDipoleAntenna::Ptr model(new DualDipoleAntenna()); + field = AntennaField::Ptr(new AntennaFieldLBA(ant_type, system, model)); + } + else // HBA + { + // TODO: implement this + throw std::runtime_error("HBA for Aartfaac is not implemented yet."); + } + + // Add only one antenna to the field (no offset, always enabled) + AntennaField::Antenna antenna; + antenna.position[0] = 0.; + antenna.position[1] = 0.; + antenna.position[2] = 0.; + antenna.enabled[0] = true; + antenna.enabled[1] = true; + + field->addAntenna(antenna); + + return field; +} + void readStationPhaseReference(const Table &table, unsigned int id, const Station::Ptr &station) { @@ -226,16 +279,57 @@ Station::Ptr readStation(const MeasurementSet &ms, unsigned int id) readStationPhaseReference(ms.antenna(), id, station); // Read antenna field information. - Table tab_field = getSubTable(ms, "LOFAR_ANTENNA_FIELD"); - tab_field = tab_field(tab_field.col("ANTENNA_ID") == static_cast<Int>(id)); + ROScalarColumn<String> telescope_name_col(getSubTable(ms, "OBSERVATION"), + "TELESCOPE_NAME"); + string telescope_name = telescope_name_col(0); + + if (telescope_name == "LOFAR") + { + Table tab_field = getSubTable(ms, "LOFAR_ANTENNA_FIELD"); + tab_field = tab_field(tab_field.col("ANTENNA_ID") == static_cast<Int>(id)); - for(size_t i = 0; i < tab_field.nrow(); ++i) + for(size_t i = 0; i < tab_field.nrow(); ++i) + { + station->addField(readAntennaField(tab_field, i)); + } + } + else if (telescope_name == "AARTFAAC") { - station->addField(readAntennaField(tab_field, i)); + ROScalarColumn<String> ant_type_col(getSubTable(ms, "OBSERVATION"), + "AARTFAAC_ANTENNA_TYPE"); + string ant_type = ant_type_col(0); + + Table tab_field = getSubTable(ms, "ANTENNA"); + station -> addField(readAntennaFieldAartfaac(tab_field, ant_type, id)); } return station; } +MDirection readTileBeamDirection(const casacore::MeasurementSet &ms) { + MDirection tileBeamDir; + + Table fieldTable = getSubTable(ms, "FIELD"); + + if (fieldTable.nrow() != 1) { + throw std::runtime_error("MS has multiple fields, this does not work with the LOFAR beam library."); + } + + if (hasColumn(fieldTable, "LOFAR_TILE_BEAM_DIR")) + { + ROArrayMeasColumn<MDirection> tileBeamCol(fieldTable, + "LOFAR_TILE_BEAM_DIR"); + tileBeamDir = *(tileBeamCol(0).data()); + } + else + { + ROArrayMeasColumn<MDirection> tileBeamCol(fieldTable, + "DELAY_DIR"); + tileBeamDir = *(tileBeamCol(0).data()); + } + + return tileBeamDir; +} + } //# namespace StationResponse } //# namespace LOFAR diff --git a/CEP/Calibration/StationResponse/src/makeresponseimage.cc b/CEP/Calibration/StationResponse/src/makeresponseimage.cc index 91cf2e4789eb58ffe524628a1faba2fa364a4081..aa545ed0ca194e826fd40a80e320758ca6855a62 100644 --- a/CEP/Calibration/StationResponse/src/makeresponseimage.cc +++ b/CEP/Calibration/StationResponse/src/makeresponseimage.cc @@ -30,33 +30,33 @@ #include <Common/LofarLogger.h> #include <Common/SystemUtil.h> #include <Common/Version.h> -#include <coordinates/Coordinates/CoordinateSystem.h> -#include <coordinates/Coordinates/SpectralCoordinate.h> -#include <coordinates/Coordinates/StokesCoordinate.h> -#include <coordinates/Coordinates/DirectionCoordinate.h> -#include <images/Images/PagedImage.h> -#include <measures/Measures/MCDirection.h> -#include <measures/Measures/MCPosition.h> -#include <measures/Measures/MDirection.h> -#include <measures/Measures/MeasConvert.h> -#include <measures/Measures/MeasTable.h> -#include <measures/Measures/MEpoch.h> -#include <measures/Measures/MPosition.h> -#include <ms/MeasurementSets/MeasurementSet.h> -#include <ms/MeasurementSets/MSDataDescription.h> -#include <ms/MeasurementSets/MSDataDescColumns.h> -#include <ms/MeasurementSets/MSField.h> -#include <ms/MeasurementSets/MSFieldColumns.h> -#include <ms/MeasurementSets/MSObservation.h> -#include <ms/MeasurementSets/MSObsColumns.h> -#include <ms/MeasurementSets/MSSpectralWindow.h> -#include <ms/MeasurementSets/MSSpWindowColumns.h> -#include <tables/Tables/ExprNode.h> +#include <casacore/coordinates/Coordinates/CoordinateSystem.h> +#include <casacore/coordinates/Coordinates/SpectralCoordinate.h> +#include <casacore/coordinates/Coordinates/StokesCoordinate.h> +#include <casacore/coordinates/Coordinates/DirectionCoordinate.h> +#include <casacore/images/Images/PagedImage.h> +#include <casacore/measures/Measures/MCDirection.h> +#include <casacore/measures/Measures/MCPosition.h> +#include <casacore/measures/Measures/MDirection.h> +#include <casacore/measures/Measures/MeasConvert.h> +#include <casacore/measures/Measures/MeasTable.h> +#include <casacore/measures/Measures/MEpoch.h> +#include <casacore/measures/Measures/MPosition.h> +#include <casacore/ms/MeasurementSets/MeasurementSet.h> +#include <casacore/ms/MeasurementSets/MSDataDescription.h> +#include <casacore/ms/MeasurementSets/MSDataDescColumns.h> +#include <casacore/ms/MeasurementSets/MSField.h> +#include <casacore/ms/MeasurementSets/MSFieldColumns.h> +#include <casacore/ms/MeasurementSets/MSObservation.h> +#include <casacore/ms/MeasurementSets/MSObsColumns.h> +#include <casacore/ms/MeasurementSets/MSSpectralWindow.h> +#include <casacore/ms/MeasurementSets/MSSpWindowColumns.h> +#include <casacore/tables/Tables/ExprNode.h> // There is no wrapped include file lofar_iterator.h. #include <iterator> -using namespace casa; +using namespace casacore; using namespace LOFAR; using namespace LOFAR::StationResponse; using LOFAR::operator<<; @@ -131,18 +131,18 @@ namespace /*! * \brief Convert an ITRF position given as a StationResponse::vector3r_t - * instance to a casa::MPosition. + * instance to a casacore::MPosition. */ MPosition toMPositionITRF(const vector3r_t &position); /*! - * \brief Convert a casa::MPosition instance to a + * \brief Convert a casacore::MPosition instance to a * StationResponse::vector3r_t instance. */ vector3r_t fromMPosition(const MPosition &position); /*! - * \brief Convert a casa::MDirection instance to a + * \brief Convert a casacore::MDirection instance to a * StationResponse::vector3r_t instance. */ vector3r_t fromMDirection(const MDirection &direction); diff --git a/CEP/Calibration/pystationresponse/CMakeLists.txt b/CEP/Calibration/pystationresponse/CMakeLists.txt index c2f387f96dd82c332a6a834325bca78aa27e9726..3549dded7b503aeb5ff73b08f5512d2fe64ec7e1 100644 --- a/CEP/Calibration/pystationresponse/CMakeLists.txt +++ b/CEP/Calibration/pystationresponse/CMakeLists.txt @@ -3,9 +3,9 @@ lofar_package(pystationresponse 1.0 DEPENDS StationResponse) include(LofarFindPackage) -lofar_find_package(Boost REQUIRED COMPONENTS python) lofar_find_package(Python 2.6 REQUIRED) -lofar_find_package(Pyrap REQUIRED) +lofar_find_package(Boost REQUIRED COMPONENTS python) +lofar_find_package(Casacore REQUIRED COMPONENTS python) add_subdirectory(src) add_subdirectory(test) diff --git a/CEP/Calibration/pystationresponse/src/__init__.py b/CEP/Calibration/pystationresponse/src/__init__.py index a3e5a59bbd6a680a804bc8102892ca96e6f8495b..87961234ba1774f50361024fa1c918deebf67484 100755 --- a/CEP/Calibration/pystationresponse/src/__init__.py +++ b/CEP/Calibration/pystationresponse/src/__init__.py @@ -19,7 +19,7 @@ # # $Id$ -import _stationresponse +from ._stationresponse import StationResponse class stationresponse(object): """ @@ -64,7 +64,7 @@ class stationresponse(object): time = subtable.getcell("TIME", 0) print time, response.evaluateChannel(time, 0, 0) """ - self._response = _stationresponse.StationResponse(msname, inverse, + self._response = StationResponse(msname, inverse, useElementResponse, useArrayFactor, useChanFreq) def version (self, type='other'): diff --git a/CEP/Calibration/pystationresponse/test/CMakeLists.txt b/CEP/Calibration/pystationresponse/test/CMakeLists.txt index d6da7e50cd0f2f61fc170023180b5a89addb6dad..c902b87fa880df69c9ea5972a30fba43bde5d70f 100644 --- a/CEP/Calibration/pystationresponse/test/CMakeLists.txt +++ b/CEP/Calibration/pystationresponse/test/CMakeLists.txt @@ -2,4 +2,14 @@ include(LofarCTest) -#lofar_add_test(tStationBeamNCP) # test not enabled because pyrap is not found +include(FindPythonModule) + +find_python_module(pyrap) +if(PYTHON_PYRAP_FOUND) + #This test is disabled due to boost-python linking problems on CEP3 + #lofar_add_test(tStationBeamNCP) +else(PYTHON_PYRAP_FOUND) + message(WARNING "Python-casacore was not found, disabling tStationBeamNCP") +endif(PYTHON_PYRAP_FOUND) + +lofar_add_test(tpystationresponse) diff --git a/CEP/Calibration/pystationresponse/test/tStationBeamNCP.py b/CEP/Calibration/pystationresponse/test/tStationBeamNCP.py index f16fbf80fa5475e7fe7a54f47a9c6f0129dbc2d4..81e76b0c26afb8496e68b6941d9fb057104a8aef 100644 --- a/CEP/Calibration/pystationresponse/test/tStationBeamNCP.py +++ b/CEP/Calibration/pystationresponse/test/tStationBeamNCP.py @@ -1,4 +1,5 @@ "Test the Station Beam at the NCP. Rationale: when pointing at the NCP all stations should have (almost) the same beam" +from __future__ import print_function import sys @@ -17,8 +18,8 @@ a=[mys.evaluateStation(time=times[0],station=st) for st in range(20)] for a1 in a: for a2 in a: - if np.linalg.norm(a1-a2)>1.e-3: - print "a1=",a1,"\na2=",a2,"\nnorm=",np.linalg.norm(a1-a2) - sys.exit(1) + if np.linalg.norm(a1-a2)>1.e-3: + print("a1=",a1,"\na2=",a2,"\nnorm=",np.linalg.norm(a1-a2)) + sys.exit(1) sys.exit(0) diff --git a/CEP/Calibration/pystationresponse/test/tpystationresponse.py b/CEP/Calibration/pystationresponse/test/tpystationresponse.py new file mode 100644 index 0000000000000000000000000000000000000000..124019cde47864809c455e288cfb7cd8fb60bf4f --- /dev/null +++ b/CEP/Calibration/pystationresponse/test/tpystationresponse.py @@ -0,0 +1,5 @@ +from __future__ import print_function + +import lofar.stationresponse + + diff --git a/CEP/Calibration/pystationresponse/test/tpystationresponse.sh b/CEP/Calibration/pystationresponse/test/tpystationresponse.sh new file mode 100755 index 0000000000000000000000000000000000000000..f967776d2c39cf2a894a13444bb6bd9a53d5fc6c --- /dev/null +++ b/CEP/Calibration/pystationresponse/test/tpystationresponse.sh @@ -0,0 +1,2 @@ +#!/bin/sh +./runctest.sh tpystationresponse diff --git a/CEP/DP3/CMakeLists.txt b/CEP/DP3/CMakeLists.txt index e71ce7ee44ab4c75ee271dae06657761caaf6e9c..d66c282e71e7b17be773fe0fad36d470eae3866b 100644 --- a/CEP/DP3/CMakeLists.txt +++ b/CEP/DP3/CMakeLists.txt @@ -10,8 +10,9 @@ lofar_find_package(Armadillo) if(${ARMADILLO_FOUND}) if(CMAKE_CXX_FLAGS MATCHES ".*\\+\\+11.*") lofar_add_package(DPPP_DDECal) +lofar_add_package(DPPP_Interpolate) else() - message(WARNING "DPPP_DDECal will not build if you have no C++11 support, please build in directory gnucxx11_opt") + message(WARNING "DPPP_DDECal and DPPP_Interpolate will not build if you have no C++11 support, please build in directory gnucxx11_opt") endif() else() message(WARNING "Armadillo was not found, NOT building DPPP_DDECal") diff --git a/CEP/DP3/DPPP/include/DPPP/CMakeLists.txt b/CEP/DP3/DPPP/include/DPPP/CMakeLists.txt index e8751997627009bb40da412a5e228e7b7015d97f..d760372fb01e50e01a557235c2dc92b76cd088dc 100644 --- a/CEP/DP3/DPPP/include/DPPP/CMakeLists.txt +++ b/CEP/DP3/DPPP/include/DPPP/CMakeLists.txt @@ -17,8 +17,8 @@ set(inst_HEADERS ApplyBeam.h ApplyBeam.tcc Predict.h OneApplyCal.h GainCal.h StefCal.h PhaseFitter.h - StManParsetKeys.h H5Parm.h DummyStep.h H5ParmPredict.h - Upsample.h + StManParsetKeys.h H5Parm.h DummyStep.h H5ParmPredict.h GridInterpolate.h + Upsample.h Split.h ) # Create symbolic link to include directory. diff --git a/CEP/DP3/DPPP/include/DPPP/DPInput.h b/CEP/DP3/DPPP/include/DPPP/DPInput.h index 2a439714ad397e07cd4c8baf8d7fbbdb05ee9a33..4f1370ca9d62fa35a43081abf96253da958cc344 100644 --- a/CEP/DP3/DPPP/include/DPPP/DPInput.h +++ b/CEP/DP3/DPPP/include/DPPP/DPInput.h @@ -63,6 +63,9 @@ namespace LOFAR { class DPInput: public DPStep { public: + // Define the shared pointer for this type. + typedef shared_ptr<DPInput> ShPtr; + virtual ~DPInput(); // Read the UVW at the given row numbers into the buffer. diff --git a/CEP/DP3/DPPP/include/DPPP/DPRun.h b/CEP/DP3/DPPP/include/DPPP/DPRun.h index 06121de8e4becf3470a3b7f9c7516d5e4f7d410c..53a85043bbd3058c99c176ae45eee1823000a939 100644 --- a/CEP/DP3/DPPP/include/DPPP/DPRun.h +++ b/CEP/DP3/DPPP/include/DPPP/DPRun.h @@ -64,10 +64,12 @@ namespace LOFAR { static void execute (const std::string& parsetName, int argc=0, char* argv[] = 0); - private: // Create the step objects. - static DPStep::ShPtr makeSteps (const ParameterSet& parset); + static DPStep::ShPtr makeSteps (const ParameterSet& parset, + const std::string& prefix, + DPInput* reader); + private: // Create an output step, either an MSWriter or an MSUpdater // If no data are modified (for example if only count was done), // still an MSUpdater is created, but it will not write anything. @@ -79,7 +81,7 @@ namespace LOFAR { // If there is a writer, the reader needs to read the visibility data. // reader should be the original reader static DPStep::ShPtr makeOutputStep(MSReader* reader, - const ParameterSet& parset, const string& prefix, bool multipleInputs, + const ParameterSet& parset, const string& prefix, casacore::String& currentMSName); // The map to create a step object from its type name. diff --git a/CEP/DP3/DPPP/include/DPPP/GridInterpolate.h b/CEP/DP3/DPPP/include/DPPP/GridInterpolate.h new file mode 100644 index 0000000000000000000000000000000000000000..cfd05379232468c37b20e9281adee20df4f75d69 --- /dev/null +++ b/CEP/DP3/DPPP/include/DPPP/GridInterpolate.h @@ -0,0 +1,67 @@ +//# GridInterpolate.h: Interpolate data from regular 2d grid to another +//# Copyright (C) 2018 +//# ASTRON (Netherlands Institute for Radio Astronomy) +//# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +//# +//# This file is part of the LOFAR software suite. +//# The LOFAR software suite is free software: you can redistribute it and/or +//# modify it under the terms of the GNU General Public License as published +//# by the Free Software Foundation, either version 3 of the License, or +//# (at your option) any later version. +//# +//# The LOFAR software suite is distributed in the hope that it will be useful, +//# but WITHOUT ANY WARRANTY; without even the implied warranty of +//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +//# GNU General Public License for more details. +//# +//# You should have received a copy of the GNU General Public License along +//# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +//# +//# $Id: GridInterpolate.h 37169 2017-04-19 12:41:21Z dijkema $ +//# +//# @author Tammo Jan Dijkema + +#ifndef DPPP_GRIDINTERPOLATE_H +#define DPPP_GRIDINTERPOLATE_H + +// @file +// @brief Interpolate data from regular 2d grid to another + +#include <vector> +#include <cassert> +#include <stdexcept> + +namespace LOFAR { + //! Get the nearest-neighbor indices + ///*! \param ax_src[in] Vector with points where the data is defined. + // Should be increasing. + // * \param ax_tgt[in] Vector with the points at which the values are + // needed. Should be increasing. + // * \param[out] indices Vector (same length as ax_tgt) with for each number + // in ax_src, the index of the nearest point in ax_src. + // * \param[in] nearest Get the nearest point. If false, gets the largest + // point that is smaller. + // */ + void getAxisIndices(const std::vector<double>& ax_src, + const std::vector<double>& ax_tgt, + std::vector<size_t>& indices, + bool nearest = true); + + //! Regrid 2d-gridded data onto another 2d grid + /*! \param[in] x_src x-axis on which the data is defined + * \param[in] y_src y-axis on which the data is defined + * \param[in] x_tgt x-axis on which the data will be evaluated + * \param[in] y_tgt y-axis on which the data will be evaluated + * \param[in] vals_src original data, y-axis varies fastest + * \param[out] vals_tgt regridded data, y-axis varies fastest + */ + void gridNearestNeighbor(const std::vector<double>& x_src, + const std::vector<double>& y_src, + const std::vector<double>& x_tgt, + const std::vector<double>& y_tgt, + const double* vals_src, + double* vals_tgt, + bool nearest = true); +} + +#endif diff --git a/CEP/DP3/DPPP/include/DPPP/H5Parm.h b/CEP/DP3/DPPP/include/DPPP/H5Parm.h index 5a28d1f13d672039a42d34d51c161c2a77f1118c..d2a3a3930886394b5538e0d475d65bc64c07259f 100644 --- a/CEP/DP3/DPPP/include/DPPP/H5Parm.h +++ b/CEP/DP3/DPPP/include/DPPP/H5Parm.h @@ -162,6 +162,12 @@ namespace LOFAR { pol, dir); } + std::vector<double> getValuesOrWeights( + const std::string& valOrWeight, + const std::string& antName, + const std::vector<double>& times, + const std::vector<double>& freqs, + uint pol, uint dir); private: // Get the values or weights of this SolTab for a given antenna. std::vector<double> getValuesOrWeights( diff --git a/CEP/DP3/DPPP/include/DPPP/MSWriter.h b/CEP/DP3/DPPP/include/DPPP/MSWriter.h index 49004f9d7795c3946c177a88935d6742c7016a5c..08500b34fe78e484c435d1e85a3e2edb3a35f32f 100644 --- a/CEP/DP3/DPPP/include/DPPP/MSWriter.h +++ b/CEP/DP3/DPPP/include/DPPP/MSWriter.h @@ -170,7 +170,7 @@ namespace LOFAR { string itsOutName; DPBuffer itsBuffer; casacore::Table itsMS; - const ParameterSet& itsParset; //# parset for writing history + ParameterSet itsParset; //# parset for writing history casacore::String itsDataColName; casacore::String itsWeightColName; double itsInterval; diff --git a/CEP/DP3/DPPP/include/DPPP/OneApplyCal.h b/CEP/DP3/DPPP/include/DPPP/OneApplyCal.h index 77d90978e3aa66eac729a5b75cd84cf566e493c7..2170a7f8630fa818ce556b0978f38f2be7e461e5 100644 --- a/CEP/DP3/DPPP/include/DPPP/OneApplyCal.h +++ b/CEP/DP3/DPPP/include/DPPP/OneApplyCal.h @@ -94,6 +94,10 @@ namespace LOFAR { // Check the number of polarizations in the parmdb or h5parm uint nPol(const std::string& parmName); + // Replace values by NaN on places where weight is zero + static void applyFlags(std::vector<double>& values, + const std::vector<double>& weights); + static std::string correctTypeToString(CorrectType); static CorrectType stringToCorrectType(const string&); @@ -103,10 +107,12 @@ namespace LOFAR { string itsName; string itsParmDBName; bool itsUseH5Parm; + string itsSolSetName; boost::shared_ptr<BBS::ParmFacade> itsParmDB; H5Parm itsH5Parm; string itsSolTabName; H5Parm::SolTab itsSolTab; + H5Parm::SolTab itsSolTab2; // in the case of full Jones, amp and phase table need to be open CorrectType itsCorrectType; bool itsInvert; uint itsTimeSlotsPerParmUpdate; diff --git a/CEP/DP3/DPPP/include/DPPP/PhaseFitter.h b/CEP/DP3/DPPP/include/DPPP/PhaseFitter.h index 3a70a964e9e5556d98bcb8697d6805be1f1a3281..93caa23c16b3e948654de3aed0f5f952027b16c3 100644 --- a/CEP/DP3/DPPP/include/DPPP/PhaseFitter.h +++ b/CEP/DP3/DPPP/include/DPPP/PhaseFitter.h @@ -193,7 +193,9 @@ class PhaseFitter * * @returns Array of @ref Size() doubles with the weights. */ - double* WeightData() { return _weights.data(); } + double* WeightData() { + return _weights.data(); + } /** * Constant array of weights, as described above. diff --git a/CEP/DP3/DPPP/include/DPPP/Split.h b/CEP/DP3/DPPP/include/DPPP/Split.h new file mode 100644 index 0000000000000000000000000000000000000000..f35b956e1ffb30cc6c6cf81cbc38728b5545e77c --- /dev/null +++ b/CEP/DP3/DPPP/include/DPPP/Split.h @@ -0,0 +1,81 @@ +//# Split.h: DPPP step class to Split visibilities from a source model +//# Copyright (C) 2013 +//# ASTRON (Netherlands Institute for Radio Astronomy) +//# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +//# +//# This file is part of the LOFAR software suite. +//# The LOFAR software suite is free software: you can redistribute it and/or +//# modify it under the terms of the GNU General Public License as published +//# by the Free Software Foundation, either version 3 of the License, or +//# (at your option) any later version. +//# +//# The LOFAR software suite is distributed in the hope that it will be useful, +//# but WITHOUT ANY WARRANTY; without even the implied warranty of +//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +//# GNU General Public License for more details. +//# +//# You should have received a copy of the GNU General Public License along +//# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +//# +//# $Id: +//# +//# @author Tammo Jan Dijkema + +#ifndef DPPP_Split_H +#define DPPP_Split_H + +// @file +// @brief DPPP step class to Split visibilities from a source model + +#include <DPPP/DPInput.h> +#include <DPPP/DPBuffer.h> + +#include <utility> + +namespace LOFAR { + + class ParameterSet; + + namespace DPPP { + // @ingroup NDPPP + + // This class is an empty DPStep subclass to use as implementation template + + class Split: public DPStep + { + public: + // Construct the object. + // Parameters are obtained from the parset using the given prefix. + Split (DPInput*, const ParameterSet&, const string& prefix); + + virtual ~Split(); + + // Process the data. + // It keeps the data. + // When processed, it invokes the process function of the next step. + virtual bool process (const DPBuffer&); + + // Finish the processing of this step and subsequent steps. + virtual void finish(); + + // Update the general info. + virtual void updateInfo (const DPInfo&); + + // Show the step parameters. + virtual void show (std::ostream&) const; + + // Show the timings. + virtual void showTimings (std::ostream&, double duration) const; + + private: + //# Data members. + string itsName; + + std::vector<std::string> itsReplaceParms; // The names of the parameters that differ along the substeps + std::vector<DPStep::ShPtr> itsSubsteps; + }; + + } //# end namespace +} + +#endif diff --git a/CEP/DP3/DPPP/include/DPPP/StManParsetKeys.h b/CEP/DP3/DPPP/include/DPPP/StManParsetKeys.h index b9849826eb89cc814bf793752aec5d4bd5d78db5..158b0e022ca712f9b62288dede17372f375ae792 100644 --- a/CEP/DP3/DPPP/include/DPPP/StManParsetKeys.h +++ b/CEP/DP3/DPPP/include/DPPP/StManParsetKeys.h @@ -20,7 +20,9 @@ namespace LOFAR { void Set(const ParameterSet& parset, const std::string& prefix) { - stManName = toLower(parset.getString(prefix+"storagemanager", string())); + stManName = toLower(parset.getString(prefix+"storagemanager", + parset.getString(prefix+"storagemanager.name", + string()))); if(stManName == "dysco") { dyscoDataBitRate = parset.getInt( prefix+"storagemanager.databitrate", 10); diff --git a/CEP/DP3/DPPP/src/CMakeLists.txt b/CEP/DP3/DPPP/src/CMakeLists.txt index d250a3d1045cb5286e0d77c8e485ae51948625f8..cb4eb74985e8350cae09c0645126edb1334f73fc 100644 --- a/CEP/DP3/DPPP/src/CMakeLists.txt +++ b/CEP/DP3/DPPP/src/CMakeLists.txt @@ -20,8 +20,8 @@ lofar_add_library(dppp DemixerNew.cc DemixInfo.cc DemixWorker.cc Predict.cc OneApplyCal.cc ApplyBeam.cc - PhaseFitter.cc H5Parm.cc SolTab.cc DummyStep.cc H5ParmPredict.cc - Upsample.cc + PhaseFitter.cc H5Parm.cc SolTab.cc DummyStep.cc H5ParmPredict.cc GridInterpolate.cc + Upsample.cc Split.cc ) lofar_add_bin_program(NDPPP NDPPP.cc) diff --git a/CEP/DP3/DPPP/src/DPRun.cc b/CEP/DP3/DPPP/src/DPRun.cc index f25bb67283b854d5654a0ca3c1fe9f90d29a4a6e..809e2b8220f56388d316d5332b42071dd167e73b 100644 --- a/CEP/DP3/DPPP/src/DPRun.cc +++ b/CEP/DP3/DPPP/src/DPRun.cc @@ -43,6 +43,7 @@ #include <DPPP/Predict.h> #include <DPPP/H5ParmPredict.h> #include <DPPP/GainCal.h> +#include <DPPP/Split.h> #include <DPPP/Upsample.h> #include <DPPP/Filter.h> #include <DPPP/Counter.h> @@ -129,8 +130,12 @@ namespace LOFAR { uint numThreads = parset.getInt("numthreads", OpenMP::maxThreads()); OpenMP::setNumThreads(numThreads); - // Create the steps and fill their DPInfo objects. - DPStep::ShPtr firstStep = makeSteps (parset); + // Create the steps, link them toggether + DPStep::ShPtr firstStep = makeSteps (parset, "", 0); + + // Let all steps fill their DPInfo object using the info from the previous step. + DPInfo lastInfo = firstStep->setInfo (DPInfo()); + // Show the steps. DPStep::ShPtr step = firstStep; DPStep::ShPtr lastStep; @@ -226,58 +231,62 @@ namespace LOFAR { // The destructors are called automatically at this point. } - DPStep::ShPtr DPRun::makeSteps (const ParameterSet& parset) + DPStep::ShPtr DPRun::makeSteps (const ParameterSet& parset, + const string& prefix, + DPInput* reader) { DPStep::ShPtr firstStep; DPStep::ShPtr lastStep; - // Get input and output MS name. - // Those parameters were always called msin and msout. - // However, SAS/MAC cannot handle a parameter and a group with the same - // name, hence one can also use msin.name and msout.name. - vector<string> inNames = parset.getStringVector ("msin.name", - vector<string>()); - if (inNames.empty()) { - inNames = parset.getStringVector ("msin"); - } - ASSERTSTR (inNames.size() > 0, "No input MeasurementSets given"); - // Find all file names matching a possibly wildcarded input name. - // This is only possible if a single name is given. - if (inNames.size() == 1) { - if (inNames[0].find_first_of ("*?{['") != string::npos) { - vector<string> names; - names.reserve (80); - casacore::Path path(inNames[0]); - casacore::String dirName(path.dirName()); - casacore::Directory dir(dirName); - // Use the basename as the file name pattern. - casacore::DirectoryIterator dirIter (dir, - casacore::Regex::fromPattern(path.baseName())); - while (!dirIter.pastEnd()) { - names.push_back (dirName + '/' + dirIter.name()); - dirIter++; + if (!reader) { + // Get input and output MS name. + // Those parameters were always called msin and msout. + // However, SAS/MAC cannot handle a parameter and a group with the same + // name, hence one can also use msin.name and msout.name. + vector<string> inNames = parset.getStringVector ("msin.name", + vector<string>()); + if (inNames.empty()) { + inNames = parset.getStringVector ("msin"); + } + ASSERTSTR (inNames.size() > 0, "No input MeasurementSets given"); + // Find all file names matching a possibly wildcarded input name. + // This is only possible if a single name is given. + if (inNames.size() == 1) { + if (inNames[0].find_first_of ("*?{['") != string::npos) { + vector<string> names; + names.reserve (80); + casacore::Path path(inNames[0]); + casacore::String dirName(path.dirName()); + casacore::Directory dir(dirName); + // Use the basename as the file name pattern. + casacore::DirectoryIterator dirIter (dir, + casacore::Regex::fromPattern(path.baseName())); + while (!dirIter.pastEnd()) { + names.push_back (dirName + '/' + dirIter.name()); + dirIter++; + } + ASSERTSTR (!names.empty(), "No datasets found matching msin " + << inNames[0]); + inNames = names; } - ASSERTSTR (!names.empty(), "No datasets found matching msin " - << inNames[0]); - inNames = names; } - } - // Get the steps. - vector<string> steps = parset.getStringVector ("steps"); - // Currently the input MS must be given. - // In the future it might be possible to have a simulation step instead. - // Create MSReader step if input ms given. - MSReader* reader = 0; - if (inNames.size() == 1) { - reader = new MSReader (inNames[0], parset, "msin."); - } else { - reader = new MultiMSReader (inNames, parset, "msin."); + // Get the steps. + // Currently the input MS must be given. + // In the future it might be possible to have a simulation step instead. + // Create MSReader step if input ms given. + if (inNames.size() == 1) { + reader = new MSReader (inNames[0], parset, "msin."); + } else { + reader = new MultiMSReader (inNames, parset, "msin."); + } + firstStep = DPStep::ShPtr (reader); } + casacore::Path pathIn (reader->msName()); casacore::String currentMSName (pathIn.absoluteName()); // Create the other steps. - firstStep = DPStep::ShPtr (reader); + vector<string> steps = parset.getStringVector (prefix + "steps"); lastStep = firstStep; DPStep::ShPtr step; for (vector<string>::const_iterator iter = steps.begin(); @@ -329,27 +338,33 @@ namespace LOFAR { step = DPStep::ShPtr(new GainCal (reader, parset, prefix)); } else if (type == "upsample") { step = DPStep::ShPtr(new Upsample (reader, parset, prefix)); - } else if (type == "out" || type=="output") { - step = makeOutputStep(reader, parset, prefix, - inNames.size()>1, currentMSName); + } else if (type == "split" || type == "explode") { + step = DPStep::ShPtr(new Split (reader, parset, prefix)); + } else if (type == "out" || type=="output" || type=="msout") { + step = makeOutputStep(dynamic_cast<MSReader*>(reader), parset, prefix, currentMSName); } else { // Maybe the step is defined in a dynamic library. step = findStepCtor(type) (reader, parset, prefix); } - lastStep->setNextStep (step); + if (lastStep) { + lastStep->setNextStep (step); + } lastStep = step; // Define as first step if not defined yet. if (!firstStep) { firstStep = step; } } - step = makeOutputStep(reader, parset, "msout.", - inNames.size()>1, currentMSName); - lastStep->setNextStep (step); - lastStep = step; - - // Let all steps fill their info using the info from the previous step. - DPInfo lastInfo = firstStep->setInfo (DPInfo()); + // Add an output step if not explicitly added in steps (unless last step is a 'split' step) + if (steps.size()==0 || ( + steps[steps.size()-1] != "out" && + steps[steps.size()-1] != "output" && + steps[steps.size()-1] != "msout" && + steps[steps.size()-1] != "split")) { + step = makeOutputStep(dynamic_cast<MSReader*>(reader), parset, "msout.", currentMSName); + lastStep->setNextStep (step); + lastStep = step; + } // Add a null step, so the last step can use getNextStep->process(). DPStep::ShPtr nullStep(new NullStep()); @@ -364,7 +379,6 @@ namespace LOFAR { DPStep::ShPtr DPRun::makeOutputStep (MSReader* reader, const ParameterSet& parset, const string& prefix, - bool multipleInputs, casacore::String& currentMSName) { DPStep::ShPtr step; @@ -396,9 +410,8 @@ namespace LOFAR { // Create MSUpdater. // Take care the history is not written twice. // Note that if there is nothing to write, the updater won't do anything. - ASSERTSTR (! multipleInputs, - "No update can be done if multiple input MSs are used"); - step = DPStep::ShPtr(new MSUpdater(reader, outName, parset, prefix, + step = DPStep::ShPtr(new MSUpdater(dynamic_cast<MSReader*>(reader), + outName, parset, prefix, outName!=currentMSName)); } else { step = DPStep::ShPtr(new MSWriter (reader, outName, parset, prefix)); diff --git a/CEP/DP3/DPPP/src/GainCal.cc b/CEP/DP3/DPPP/src/GainCal.cc index 0b367d6ee5efb3de713ed1f6d0f48175dd22a679..fa382903f8176a1e4e836173732d93b1ae2cf981 100644 --- a/CEP/DP3/DPPP/src/GainCal.cc +++ b/CEP/DP3/DPPP/src/GainCal.cc @@ -943,21 +943,21 @@ namespace LOFAR { vector<H5Parm::AxisInfo> axes; axes.push_back(H5Parm::AxisInfo("time", itsSols.size())); axes.push_back(H5Parm::AxisInfo("freq", nSolFreqs)); - axes.push_back(H5Parm::AxisInfo("ant", info().nantenna())); + axes.push_back(H5Parm::AxisInfo("ant", info().antennaUsed().size())); if (nPol>1) { axes.push_back(H5Parm::AxisInfo("pol", nPol)); } vector<H5Parm::SolTab> soltabs = makeSolTab(h5parm, itsMode, axes); - std::vector<std::string> antennaNames; - for (uint st = 0; st<info().antennaNames().size(); ++st) { - antennaNames.push_back(info().antennaNames()[st]); + std::vector<std::string> antennaUsedNames; + for (uint st = 0; st<info().antennaUsed().size(); ++st) { + antennaUsedNames.push_back(info().antennaNames()[info().antennaUsed()[st]]); } vector<H5Parm::SolTab>::iterator soltabiter = soltabs.begin(); for (; soltabiter != soltabs.end(); ++soltabiter) { - (*soltabiter).setAntennas(antennaNames); + (*soltabiter).setAntennas(antennaUsedNames); if (nPol>1) { (*soltabiter).setPolarizations(polarizations); } @@ -979,11 +979,11 @@ namespace LOFAR { "step " + itsName + " in parset: \n" + itsParsetString; if (itsMode==TEC || itsMode==TECANDPHASE) { - vector<double> tecsols(nSolFreqs*antennaNames.size()*nSolTimes*nPol); - vector<double> weights(nSolFreqs*antennaNames.size()*nSolTimes*nPol, 1.); + vector<double> tecsols(nSolFreqs*antennaUsedNames.size()*nSolTimes*nPol); + vector<double> weights(nSolFreqs*antennaUsedNames.size()*nSolTimes*nPol, 1.); vector<double> phasesols; if (itsMode==TECANDPHASE) { - phasesols.resize(nSolFreqs*antennaNames.size()*nSolTimes*nPol); + phasesols.resize(nSolFreqs*antennaUsedNames.size()*nSolTimes*nPol); } size_t i=0; for (uint time=0; time<nSolTimes; ++time) { @@ -1008,8 +1008,8 @@ namespace LOFAR { soltabs[1].setValues(phasesols, weights, historyString); } } else { - vector<DComplex> sols(nSolFreqs*antennaNames.size()*nSolTimes*nPol); - vector<double> weights(nSolFreqs*antennaNames.size()*nSolTimes*nPol, 1.); + vector<DComplex> sols(nSolFreqs*antennaUsedNames.size()*nSolTimes*nPol); + vector<double> weights(nSolFreqs*antennaUsedNames.size()*nSolTimes*nPol, 1.); size_t i=0; for (uint time=0; time<nSolTimes; ++time) { for (uint freqCell=0; freqCell<nSolFreqs; ++freqCell) { diff --git a/CEP/DP3/DPPP/src/GridInterpolate.cc b/CEP/DP3/DPPP/src/GridInterpolate.cc new file mode 100644 index 0000000000000000000000000000000000000000..158451aad65db5f9ce2a068ea3fd4d7cbe67d5c3 --- /dev/null +++ b/CEP/DP3/DPPP/src/GridInterpolate.cc @@ -0,0 +1,102 @@ +//# GridInterpolate.cc: Interpolate data from regular 2d grid to another +//# Copyright (C) 2018 +//# ASTRON (Netherlands Institute for Radio Astronomy) +//# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +//# +//# This file is part of the LOFAR software suite. +//# The LOFAR software suite is free software: you can redistribute it and/or +//# modify it under the terms of the GNU General Public License as published +//# by the Free Software Foundation, either version 3 of the License, or +//# (at your option) any later version. +//# +//# The LOFAR software suite is distributed in the hope that it will be useful, +//# but WITHOUT ANY WARRANTY; without even the implied warranty of +//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +//# GNU General Public License for more details. +//# +//# You should have received a copy of the GNU General Public License along +//# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +//# +//# $Id: GridInterpolate.cc 37169 2017-04-19 12:41:21Z dijkema $ +//# + +#include <DPPP/GridInterpolate.h> + +#include <iostream> +#include <vector> +#include <cassert> +#include <stdexcept> + +using namespace std; + +namespace LOFAR { + void getAxisIndices(const vector<double>& ax_src, + const vector<double>& ax_tgt, + vector<size_t>& indices, + bool nearest) { + indices.resize(ax_tgt.size()); + if (ax_tgt.empty()) { + return; + } + assert(!ax_src.empty()); + + double lowmatch, highmatch; + + vector<double>::const_iterator src_val = ax_src.begin(); + vector<double>::const_iterator tgt_val = ax_tgt.begin(); + vector<size_t>::iterator index_val = indices.begin(); + + while (tgt_val != ax_tgt.end()) { + while (*src_val < *tgt_val && src_val != ax_src.end()) { + src_val++; + } + if (src_val == ax_src.begin()) { + *index_val = src_val - ax_src.begin(); + } else if (src_val == ax_src.end()) { + *index_val = src_val - ax_src.begin() - 1; + } else { + if (nearest) { + lowmatch = *(src_val-1); + highmatch = *src_val; + + if (highmatch - *tgt_val < *tgt_val - lowmatch) { + *index_val = src_val - ax_src.begin(); + } else { + *index_val = src_val - ax_src.begin() - 1; + } + } else { + *index_val = src_val - ax_src.begin() - 1; + } + } + tgt_val++; index_val++; + } + } + + void gridNearestNeighbor(const vector<double>& x_src, + const vector<double>& y_src, + const vector<double>& x_tgt, + const vector<double>& y_tgt, + const double* vals_src, + double* vals_tgt, + bool nearest) { + vector<size_t> x_indices; + vector<size_t> y_indices; + getAxisIndices(x_src, x_tgt, x_indices, nearest); + getAxisIndices(y_src, y_tgt, y_indices, nearest); + + size_t nx = x_tgt.size(); + size_t ny = y_tgt.size(); + size_t ny_src = y_src.size(); + // y varies fastest + + if (nearest) { + for (size_t i=0; i<nx; ++i) { + for (size_t j=0; j<ny; ++j) { + vals_tgt[i*ny+j] = vals_src[x_indices[i]*ny_src + y_indices[j]]; + } + } + } else { + throw std::logic_error("Not implemented"); + } + } +} diff --git a/CEP/DP3/DPPP/src/H5ParmPredict.cc b/CEP/DP3/DPPP/src/H5ParmPredict.cc index 81ff212b202e709a635f3f7c43463826ab8d70ec..215b95bf367d54b0e970f2abc9242c371e8dd0b6 100644 --- a/CEP/DP3/DPPP/src/H5ParmPredict.cc +++ b/CEP/DP3/DPPP/src/H5ParmPredict.cc @@ -56,7 +56,7 @@ namespace LOFAR { vector<string> h5directions = soltab.getStringAxis("dir"); - string operation = parset.getString("operation", "replace"); + string operation = parset.getString(prefix+"operation", "replace"); if (itsDirections.empty()) { itsDirections = h5directions; @@ -83,6 +83,8 @@ namespace LOFAR { if (operation=="replace" && i>0) { predictStep->setOperation("add"); + } else { + predictStep->setOperation(operation); } itsPredictSteps.push_back(Predict::ShPtr(predictStep)); @@ -117,7 +119,9 @@ namespace LOFAR { os << "H5ParmPredict " << itsName << endl; os << " H5Parm: " << itsH5ParmName << endl; os << " directions: " << itsDirections << endl; - itsPredictSteps[0]->show(os); + for (uint dir=0; dir<itsPredictSteps.size(); ++dir) { + itsPredictSteps[dir]->show(os); + } } void H5ParmPredict::showTimings (std::ostream& os, double duration) const diff --git a/CEP/DP3/DPPP/src/MSReader.cc b/CEP/DP3/DPPP/src/MSReader.cc index a3d774878cf7ab246b47712566775f1646a128e2..4cd4785a0103884ed46df1977350fc4b7bf588d4 100644 --- a/CEP/DP3/DPPP/src/MSReader.cc +++ b/CEP/DP3/DPPP/src/MSReader.cc @@ -558,12 +558,9 @@ namespace LOFAR { ROArrayMeasColumn<MDirection> fldcol2 (fldtab, "DELAY_DIR"); phaseCenter = *(fldcol1(0).data()); delayCenter = *(fldcol2(0).data()); - if (fldtab.tableDesc().isColumn ("LOFAR_TILE_BEAM_DIR")) { - ROArrayMeasColumn<MDirection> fldcol3 (fldtab, "LOFAR_TILE_BEAM_DIR"); - tileBeamDir = *(fldcol3(0).data()); - } else { - tileBeamDir = delayCenter; - } + + tileBeamDir = StationResponse::readTileBeamDirection(itsMS); + // Get the array position using the telescope name from the OBSERVATION // subtable. Table obstab (itsMS.keywordSet().asTable ("OBSERVATION")); diff --git a/CEP/DP3/DPPP/src/OneApplyCal.cc b/CEP/DP3/DPPP/src/OneApplyCal.cc index 3d131c935edcc085f285651762863676558750af..5c1a9d93e584e7540b9966f0a8496a6a4acb4672 100644 --- a/CEP/DP3/DPPP/src/OneApplyCal.cc +++ b/CEP/DP3/DPPP/src/OneApplyCal.cc @@ -59,10 +59,10 @@ namespace LOFAR { parset.getString(prefix + "parmdb") : parset.getString(defaultPrefix + "parmdb")), itsUseH5Parm (itsParmDBName.find(".h5") != string::npos), - itsTimeSlotsPerParmUpdate ( - parset.isDefined(prefix + "timeslotsperparmupdate") ? - parset.getInt (prefix + "timeslotsperparmupdate") : - parset.getInt (defaultPrefix + "timeslotsperparmupdate", 500)), + itsSolSetName ( + parset.isDefined(prefix + "solset") ? + parset.getString(prefix + "solset") : + parset.getString(defaultPrefix + "solset", "")), itsSigmaMMSE ( parset.isDefined(prefix + "MMSE.Sigma") ? parset.getDouble(prefix + "MMSE.Sigma") : @@ -90,19 +90,28 @@ namespace LOFAR { } if (itsUseH5Parm) { + itsTimeSlotsPerParmUpdate = 0; string directionStr; directionStr = (parset.isDefined(prefix + "direction") ? parset.getString(prefix + "direction") : parset.getString(defaultPrefix + "direction", predictDirection)); - itsH5Parm = H5Parm(itsParmDBName); + itsH5Parm = H5Parm(itsParmDBName, false, false, itsSolSetName); itsSolTabName = (parset.isDefined(prefix + "correction") ? parset.getString(prefix + "correction") : parset.getString(defaultPrefix + "correction")); - - itsSolTab = itsH5Parm.getSolTab(itsSolTabName); - itsCorrectType = stringToCorrectType(itsSolTab.getType()); + if(itsSolTabName == "fulljones") + { + itsSolTab = itsH5Parm.getSolTab("amplitude000"); + itsSolTab2 = itsH5Parm.getSolTab("phase000"); + itsSolTabName = "amplitude000, phase000"; // this is only so that show() shows these tables + itsCorrectType = FULLJONES; + } + else { + itsSolTab = itsH5Parm.getSolTab(itsSolTabName); + itsCorrectType = stringToCorrectType(itsSolTab.getType()); + } if (itsCorrectType==PHASE && nPol("")==1) { itsCorrectType = SCALARPHASE; } @@ -117,6 +126,10 @@ namespace LOFAR { itsDirection = itsSolTab.getDirIndex(directionStr); } } else { + itsTimeSlotsPerParmUpdate = + parset.isDefined(prefix + "timeslotsperparmupdate") ? + parset.getInt (prefix + "timeslotsperparmupdate") : + parset.getInt (defaultPrefix + "timeslotsperparmupdate", 500); string correctTypeStr = toLower( parset.isDefined(prefix + "correction") ? parset.getString(prefix + "correction") : @@ -176,7 +189,9 @@ namespace LOFAR { ASSERT(itsNCorr==4); - if (!itsUseH5Parm) { // Use ParmDB + if (itsUseH5Parm) { + itsTimeSlotsPerParmUpdate = info().ntime(); + } else { // Use ParmDB itsParmDB.reset(new BBS::ParmFacade(itsParmDBName)); } @@ -300,8 +315,11 @@ namespace LOFAR { bool regularChannels=allNearAbs(upFreq-lowFreq, freqstep0, 1.e3) && allNearAbs(info().chanWidths(), info().chanWidths()(0), 1.e3); - ASSERTSTR(regularChannels, - "ApplyCal requires evenly spaced channels."); + + if (!itsUseH5Parm) { + ASSERTSTR(regularChannels, + "ApplyCal with parmdb requires evenly spaced channels."); + } } } @@ -310,6 +328,7 @@ namespace LOFAR { os << "ApplyCal " << itsName << std::endl; if (itsUseH5Parm) { os << " H5Parm: " << itsParmDBName << endl; + os << " SolSet: " << itsH5Parm.getSolSetName() << endl; os << " SolTab: " << itsSolTabName << endl; } else { os << " parmdb: " << itsParmDBName << endl; @@ -323,9 +342,7 @@ namespace LOFAR { if (itsInvert) { os << " sigmaMMSE: " << itsSigmaMMSE << endl; } - if (!itsUseH5Parm) { - os << " timeSlotsPerParmUpdate: " << itsTimeSlotsPerParmUpdate <<endl; - } + os << " timeSlotsPerParmUpdate: " << itsTimeSlotsPerParmUpdate <<endl; } void OneApplyCal::showTimings (std::ostream& os, double duration) const @@ -398,6 +415,19 @@ namespace LOFAR { getNextStep()->finish(); } + void OneApplyCal::applyFlags(vector<double>& values, + const vector<double>& weights) { + ASSERT(values.size() == weights.size()); + vector<double>::iterator values_it = values.begin(); + vector<double>::const_iterator weights_it = weights.begin(); + + for (; values_it != values.end(); ++values_it) { + if (*weights_it == 0.) { + *values_it = std::numeric_limits<float>::quiet_NaN(); + } + weights_it++; + } + } void OneApplyCal::updateParms (const double bufStartTime) { @@ -439,31 +469,7 @@ namespace LOFAR { #pragma omp critical(updateH5ParmValues) { // TODO: understand polarization etc. - ASSERT(itsParmExprs.size()==1 || itsParmExprs.size()==2); - hsize_t startTime = 0; - if (itsSolTab.hasAxis("time")) { - startTime = itsSolTab.getTimeIndex(bufStartTime); - } - hsize_t startFreq = 0; - if (itsSolTab.hasAxis("freq")) { - startFreq = itsSolTab.getFreqIndex(info().chanFreqs()[0]); - } - uint freqUpsampleFactor = numFreqs; - - double h5freqinterval = 0.; - if (itsSolTab.hasAxis("freq") && itsSolTab.getAxis("freq").size > 1) { - h5freqinterval = itsSolTab.getFreqInterval(); - ASSERT(h5freqinterval>0); - freqUpsampleFactor = h5freqinterval/info().chanWidths()[0] + 0.5; // Round; - ASSERT(near(h5freqinterval, freqUpsampleFactor*info().chanWidths()[0],1.e-5)); - } - - uint timeUpsampleFactor = numTimes; - if (itsSolTab.hasAxis("time") && itsSolTab.getAxis("time").size > 1) { - double h5timeInterval = itsSolTab.getTimeInterval(); - timeUpsampleFactor = h5timeInterval/itsTimeInterval+0.5; // Round - ASSERT(near(h5timeInterval,timeUpsampleFactor*itsTimeInterval,1.e-5)); - } + // ASSERT(itsParmExprs.size()==1 || itsParmExprs.size()==2); // Figure out whether time or frequency is first axis bool freqvariesfastest = true; @@ -473,51 +479,49 @@ namespace LOFAR { } ASSERT(freqvariesfastest); - // Take the ceiling of numTimes/timeUpsampleFactor, same for freq - uint numTimesInH5Parm = (numTimes+timeUpsampleFactor-1)/timeUpsampleFactor; - uint numFreqsInH5Parm = (numFreqs+freqUpsampleFactor-1)/freqUpsampleFactor; - - // Check that frequencies match - if (itsSolTab.hasAxis("freq") && itsSolTab.getAxis("freq").size > 1) { - vector<double> h5parmfreqs = itsSolTab.getRealAxis("freq"); - for (uint f=0; f<info().nchan(); ++f) { - ASSERT(nearAbs(info().chanFreqs()[f], - h5parmfreqs[startFreq + f/freqUpsampleFactor], - h5freqinterval*0.501)); - } + vector<double> times(info().ntime()); + for (uint t=0; t<times.size(); ++t) { + // time centroids + times[t] = info().startTime() + (t+0.5) * info().timeInterval(); + } + vector<double> freqs(info().chanFreqs().size()); + for (uint ch=0; ch<info().chanFreqs().size(); ++ch) { + freqs[ch] = info().chanFreqs()[ch]; } + vector<double> weights; for (uint ant = 0; ant < numAnts; ++ant) { - for (uint pol=0; pol<itsParmExprs.size(); ++pol) { - vector<double> rawsols, rawweights; - rawsols = itsSolTab.getValues(info().antennaNames()[ant], - startTime, numTimesInH5Parm, 1, - startFreq, numFreqsInH5Parm, 1, pol, itsDirection); - - rawweights = itsSolTab.getWeights(info().antennaNames()[ant], - startTime, numTimesInH5Parm, 1, - startFreq, numFreqsInH5Parm, 1, pol, itsDirection); - - parmvalues[pol][ant].resize(tfDomainSize); - - size_t tf=0; - for (uint t=0; t<numTimesInH5Parm; ++t) { - for (uint ti=0; ti<timeUpsampleFactor; ++ti) { - for (uint f=0; f<numFreqsInH5Parm; ++f) { - for (uint fi=0; fi<freqUpsampleFactor; ++fi) { - if (tf<tfDomainSize) { - if (rawweights[t*numFreqsInH5Parm + f]>0) { - parmvalues[pol][ant][tf++] = rawsols[t*numFreqsInH5Parm + f]; - } else { - parmvalues[pol][ant][tf++] = std::numeric_limits<double>::quiet_NaN(); - } - } - } - } - } - } - ASSERT(tf==tfDomainSize); - } + if(itsCorrectType == FULLJONES) + { + for (uint pol=0; pol<4; ++pol) { + // Place amplitude in even and phase in odd elements + parmvalues[pol*2][ant] = itsSolTab.getValuesOrWeights("val", + info().antennaNames()[ant], + times, freqs, + pol, itsDirection); + weights = itsSolTab.getValuesOrWeights("weight", + info().antennaNames()[ant], times, freqs, pol, itsDirection); + applyFlags(parmvalues[pol*2][ant], weights); + parmvalues[pol*2+1][ant] = itsSolTab2.getValuesOrWeights("val", + info().antennaNames()[ant], + times, freqs, + pol, itsDirection); + weights = itsSolTab2.getValuesOrWeights("weight", + info().antennaNames()[ant], times, freqs, pol, itsDirection); + applyFlags(parmvalues[pol*2+1][ant], weights); + } + } + else { + for (uint pol=0; pol<itsParmExprs.size(); ++pol) { + parmvalues[pol][ant] = itsSolTab.getValuesOrWeights("val", + info().antennaNames()[ant], + times, freqs, + pol, itsDirection); + weights = itsSolTab.getValuesOrWeights("weight", + info().antennaNames()[ant], times, freqs, pol, itsDirection); + applyFlags(parmvalues[pol][ant], weights); + } + } } } // End pragma omp critical } else { // Use ParmDB diff --git a/CEP/DP3/DPPP/src/PhaseFitter.cc b/CEP/DP3/DPPP/src/PhaseFitter.cc index 59d9763871b7744ef3c9af33c748a88e1d99b278..c883ec004a6256b9a4b4a9d933ff2cc2a5b78a12 100644 --- a/CEP/DP3/DPPP/src/PhaseFitter.cc +++ b/CEP/DP3/DPPP/src/PhaseFitter.cc @@ -36,29 +36,34 @@ double PhaseFitter::TEC2ModelCost(double alpha, double beta) const for(size_t i=0; i!=Size(); ++i) { double estphase = TEC2ModelFuncWrapped(_frequencies[i], alpha, beta); double dCost = fmod(std::fabs(estphase - _phases[i]), 2.0*M_PI); - if(dCost > M_PI) dCost = 2.0*M_PI - dCost; - dCost *= _weights[i]; + if(dCost > M_PI) + dCost = 2.0*M_PI - dCost; + dCost *= _weights[i]; costVal += dCost; - weightSum += _weights[i]; + weightSum += _weights[i]; } - return costVal / weightSum; + if(weightSum == 0.0) + return 0.0; + else + return costVal / weightSum; } double PhaseFitter::fitTEC2ModelBeta(double alpha, double betaEstimate) const { - double weightSum = 0.0; + double weightSum = 0.0; for(size_t iter=0; iter!=3; ++iter) { double sum = 0.0; for(size_t i=0; i!=Size(); ++i) { double p = _phases[i], e = TEC2ModelFunc(_frequencies[i], alpha, betaEstimate); double dist = fmod(p - e, 2.0*M_PI); if(dist < -M_PI) - dist += 2.0*M_PI; + dist += 2.0*M_PI; else if(dist > M_PI) - dist -= 2.0*M_PI; + dist -= 2.0*M_PI; sum += dist * _weights[i]; - weightSum += _weights[i]; + weightSum += _weights[i]; } - betaEstimate = betaEstimate + sum / weightSum; + if(weightSum != 0.0) + betaEstimate = betaEstimate + sum / weightSum; } return fmod(betaEstimate, 2.0*M_PI); } @@ -168,10 +173,10 @@ void PhaseFitter::bruteForceSearchTEC1Model(double& lowerAlpha, double& upperAlp // make r between [0, 1] double r = double(i)/alphaOversampling; double alpha = lowerAlpha + r*dAlpha; - // We have to have some freedom in the fit to make sure - // we do rule out an area with an unwripping that is correct - // Hence we use the two-parameter model and allow beta to be fitted. - // The ternary search will fix alpha to accomodate a zero beta. + // We have to have some freedom in the fit to make sure + // we do rule out an area with an unwripping that is correct + // Hence we use the two-parameter model and allow beta to be fitted. + // The ternary search will fix alpha to accomodate a zero beta. double curBeta = fitTEC2ModelBeta(alpha, 0.0); double costVal = TEC2ModelCost(alpha, curBeta); if(costVal < minCost) { @@ -182,7 +187,7 @@ void PhaseFitter::bruteForceSearchTEC1Model(double& lowerAlpha, double& upperAlp double newLowerAlpha = double(alphaIndex-1)/alphaOversampling*dAlpha + lowerAlpha; upperAlpha = double(alphaIndex+1)/alphaOversampling*dAlpha + lowerAlpha; lowerAlpha = newLowerAlpha; - //std::cout << "alpha in " << lowerAlpha << "-" << upperAlpha << '\n'; + //std::cout << "alpha in " << lowerAlpha << "-" << upperAlpha << '\n'; } double PhaseFitter::TEC1ModelCost(double alpha) const @@ -191,12 +196,16 @@ double PhaseFitter::TEC1ModelCost(double alpha) const for(size_t i=0; i!=Size(); ++i) { double estphase = TEC1ModelFuncWrapped(_frequencies[i], alpha); double dCost = fmod(std::fabs(estphase - _phases[i]), 2.0*M_PI); - if(dCost > M_PI) dCost = 2.0*M_PI - dCost; - dCost *= _weights[i]; + if(dCost > M_PI) + dCost = 2.0*M_PI - dCost; + dCost *= _weights[i]; costVal += dCost; - weightSum += _weights[i]; + weightSum += _weights[i]; } - return costVal / weightSum; + if(weightSum == 0.0) + return 0.0; + else + return costVal / weightSum; } double PhaseFitter::ternarySearchTEC1ModelAlpha(double startAlpha, double endAlpha) const @@ -215,7 +224,7 @@ double PhaseFitter::ternarySearchTEC1ModelAlpha(double startAlpha, double endAlp } dCost = std::fabs(lCost - rCost); ++iter; - //std::cout << iter << '\t' << startAlpha << '\t' << endAlpha << '\n'; + //std::cout << iter << '\t' << startAlpha << '\t' << endAlpha << '\n'; } while(dCost > _fittingAccuracy && iter < 100); double finalAlpha = (lAlpha + rAlpha) * 0.5; return finalAlpha; diff --git a/CEP/DP3/DPPP/src/SolTab.cc b/CEP/DP3/DPPP/src/SolTab.cc index 54468913db0a69fedfdf9dc581066bc56603d9a7..b96b4361fb821a30c606b9487f584c52b020dd4d 100644 --- a/CEP/DP3/DPPP/src/SolTab.cc +++ b/CEP/DP3/DPPP/src/SolTab.cc @@ -1,5 +1,6 @@ #include <lofar_config.h> #include <DPPP/H5Parm.h> +#include <DPPP/GridInterpolate.h> #include <Common/Exception.h> #include <Common/StringUtil.h> #include <Common/LofarLogger.h> @@ -125,7 +126,15 @@ namespace LOFAR { } // Add weights - H5::DataSet weightset = createDataSet("weight", H5::PredType::IEEE_F64LE, + // Do not use half float data type because typical weights range can be 1.e-14 + /* + hid_t halffloat = H5Tcopy(H5T_IEEE_F32BE); + H5Tset_fields(halffloat, 15, 10, 5, 0, 10); + H5Tset_size(halffloat, 2); + H5Tset_ebias(halffloat, 15); + H5Tlock(halffloat); + */ + H5::DataSet weightset = createDataSet("weight", H5::PredType::IEEE_F32LE, dataspace); // If weights are empty, write ones everywhere @@ -200,6 +209,50 @@ namespace LOFAR { return buffer+1; } + vector<double> H5Parm::SolTab::getValuesOrWeights( + const string& valOrWeight, + const string& antName, + const vector<double>& times, + const vector<double>& freqs, + uint pol, uint dir) { + vector<double> res(times.size()*freqs.size()); + + uint startTimeSlot = 0; + uint ntimeH5 = 1; + + ASSERT(!freqs.empty()); + uint startFreq = 0; + uint nfreqH5 = 1; + + vector<double> interpolated(times.size()*freqs.size()); + + vector<double> freqAxisH5(1, 0.); + vector<double> timeAxisH5(1, 0.); + if (hasAxis("time")) { + timeAxisH5 = getRealAxis("time"); + ntimeH5 = timeAxisH5.size(); + } + if (hasAxis("freq")) { + vector<double> fullFreqAxisH5 = getRealAxis("freq"); + startFreq = getFreqIndex(freqs[0]); + nfreqH5 = getFreqIndex(freqs[freqs.size()-1])-startFreq+1; + freqAxisH5 = vector<double>(fullFreqAxisH5.begin()+startFreq, fullFreqAxisH5.begin()+startFreq+nfreqH5); + } + + vector<double> h5values = getValuesOrWeights(valOrWeight, + antName, + startTimeSlot, ntimeH5, 1, + startFreq, nfreqH5, 1, + pol, dir); + + gridNearestNeighbor(timeAxisH5, freqAxisH5, + times, freqs, + &(h5values[0]), + &(interpolated[0])); + + return interpolated; + } + vector<double> H5Parm::SolTab::getValuesOrWeights( const string& valOrWeight, const string& antName, @@ -286,13 +339,15 @@ namespace LOFAR { H5::StrType(H5::PredType::C_S1, nChar), dataspace); - // Prepare data - char srcArray[metaVals.size()][nChar]; - for (uint i=0; i<metaVals.size(); ++i) { - strncpy(srcArray[i], metaVals[i].c_str(), nChar); - } + if (metaVals.size()>0) { + // Prepare data + char srcArray[metaVals.size()][nChar]; + for (uint i=0; i<metaVals.size(); ++i) { + strncpy(srcArray[i], metaVals[i].c_str(), nChar); + } - dataset.write(srcArray, H5::StrType(H5::PredType::C_S1, nChar)); + dataset.write(srcArray, H5::StrType(H5::PredType::C_S1, nChar)); + } } void H5Parm::SolTab::setSources(const vector<string>& solSources) { @@ -322,7 +377,9 @@ namespace LOFAR { H5::DataSet dataset = createDataSet(metaName, H5::PredType::IEEE_F64LE, dataspace); - dataset.write(&(metaVals[0]), H5::PredType::IEEE_F64LE); + if (metaVals.size() > 0) { + dataset.write(&(metaVals[0]), H5::PredType::IEEE_F64LE); + } } hsize_t H5Parm::SolTab::getAntIndex(const string& antName) { diff --git a/CEP/DP3/DPPP/src/Split.cc b/CEP/DP3/DPPP/src/Split.cc new file mode 100644 index 0000000000000000000000000000000000000000..4f114295394cf92346fd22ce537d7f1eeae20f4e --- /dev/null +++ b/CEP/DP3/DPPP/src/Split.cc @@ -0,0 +1,140 @@ +//# Split.cc: DPPP step class to Split visibilities +//# Copyright (C) 2018 +//# ASTRON (Netherlands Institute for Radio Astronomy) +//# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +//# +//# This file is part of the LOFAR software suite. +//# The LOFAR software suite is free software: you can redistribute it and/or +//# modify it under the terms of the GNU General Public License as published +//# by the Free Software Foundation, either version 3 of the License, or +//# (at your option) any later version. +//# +//# The LOFAR software suite is distributed in the hope that it will be useful, +//# but WITHOUT ANY WARRANTY; without even the implied warranty of +//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +//# GNU General Public License for more details. +//# +//# You should have received a copy of the GNU General Public License along +//# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +//# +//# $Id: GainCal.cc 21598 2012-07-16 08:07:34Z diepen $ +//# +//# @author Tammo Jan Dijkema + +#include <lofar_config.h> +#include <DPPP/Split.h> +#include <DPPP/DPRun.h> + +#include <iostream> +#include <Common/ParameterSet.h> +#include <Common/Timer.h> + +#include <stddef.h> +#include <string> +#include <sstream> +#include <utility> +#include <vector> + +using namespace casacore; + +namespace LOFAR { + namespace DPPP { + + Split::Split (DPInput* input, + const ParameterSet& parset, + const string& prefix) + { + itsReplaceParms = parset.getStringVector(prefix + "replaceparms"); + vector<vector<string> > replaceParmValues; // For each of the parameters, the values for each substep + replaceParmValues.resize(itsReplaceParms.size()); + + vector<vector<string> >::iterator replaceParmValueIt = replaceParmValues.begin(); + vector<string>::iterator replaceParmIt; + uint numSteps = 0; + for (replaceParmIt = itsReplaceParms.begin(); + replaceParmIt != itsReplaceParms.end(); ++replaceParmIt) { + vector<string> parmValues = parset.getStringVector(*replaceParmIt); + *(replaceParmValueIt++) = parmValues; + if (numSteps > 0) { + ASSERTSTR(parmValues.size() == numSteps, "Each parameter in replaceparms should have the same number of items (expected "<< + numSteps <<", got "<<parmValues.size() <<" for step "<<(*replaceParmIt)); + } else { + numSteps = parmValues.size(); + } + } + + // Make a shallow copy to work around constness of parset + ParameterSet parsetCopy(parset); + + // Create the substeps + uint numParameters = itsReplaceParms.size(); + for (uint i = 0; i<numSteps; ++i) { + for (uint j = 0; j<numParameters; ++j) { + parsetCopy.replace(itsReplaceParms[j], replaceParmValues[j][i]); + } + DPStep::ShPtr firstStep = DPRun::makeSteps (parsetCopy, prefix, input); + firstStep->setPrevStep(this); + itsSubsteps.push_back(firstStep); + } + ASSERT(itsSubsteps.size()>0); + } + + Split::~Split() + {} + + void Split::updateInfo (const DPInfo& infoIn) + { + info() = infoIn; + + vector<DPStep::ShPtr>::iterator it; + for (it=itsSubsteps.begin(); it!=itsSubsteps.end(); ++it) { + (*it)->setInfo(infoIn); + } + } + + void Split::show (std::ostream& os) const + { + os << "Split " << itsName << endl; + os << " replace parameters:" << itsReplaceParms << endl; + // Show the steps. + for (uint i=0; i<itsSubsteps.size(); ++i) { + os << "Split substep "<<(i+1)<<" of "<<itsSubsteps.size()<<endl; + DPStep::ShPtr step = itsSubsteps[0]; + DPStep::ShPtr lastStep; + while (step) { + step->show (os); + lastStep = step; + step = step->getNextStep(); + } + } + } + + void Split::showTimings (std::ostream& os, double duration) const + { + for (uint i=0; i<itsSubsteps.size(); ++i) { + DPStep::ShPtr step = itsSubsteps[i]; + while (step) { + step->showTimings(os, duration); + step = step->getNextStep(); + } + } + } + + bool Split::process (const DPBuffer& bufin) + { + for (uint i=0; i<itsSubsteps.size(); ++i) { + itsSubsteps[i]->process(bufin); + } + return false; + } + + + void Split::finish() + { + // Let the next steps finish. + for (uint i=0; i<itsSubsteps.size(); ++i) { + itsSubsteps[i]->finish(); + } + } + } //# end namespace +} diff --git a/CEP/DP3/DPPP/src/UVWCalculator/UVWCalculator.cc b/CEP/DP3/DPPP/src/UVWCalculator/UVWCalculator.cc index 6f582c5a9bc58a30a5968b0a3fa7b0e52b88a098..05ad175e3cfe7c46f87e25c6b4ef107655df3242 100644 --- a/CEP/DP3/DPPP/src/UVWCalculator/UVWCalculator.cc +++ b/CEP/DP3/DPPP/src/UVWCalculator/UVWCalculator.cc @@ -23,6 +23,7 @@ // Note: this code is used by LOFAR and APERTIF software. +#include <lofar_config.h> #include <DPPP/UVWCalculator.h> #include <casacore/measures/Measures/MEpoch.h> #include <casacore/measures/Measures/Muvw.h> diff --git a/CEP/DP3/DPPP/test/CMakeLists.txt b/CEP/DP3/DPPP/test/CMakeLists.txt index 900f2e31f0b11bfeef372e2785a538da926f7022..3cb53f56cacd01291d3a9d22b31a5a40795f8057 100644 --- a/CEP/DP3/DPPP/test/CMakeLists.txt +++ b/CEP/DP3/DPPP/test/CMakeLists.txt @@ -17,6 +17,7 @@ lofar_add_test(tPhaseShift tPhaseShift.cc) lofar_add_test(tStationAdder tStationAdder.cc) lofar_add_test(tScaleData tScaleData.cc) lofar_add_test(tApplyCal tApplyCal.cc) +lofar_add_test(tApplyCalH5 tApplyCalH5.cc) lofar_add_test(tApplyCal2) lofar_add_test(tMultiApplyCal) lofar_add_test(tFilter tFilter.cc) @@ -31,6 +32,9 @@ lofar_add_test(tGainCal) lofar_add_test(tH5Parm tH5Parm) lofar_add_test(tGainCalH5Parm) lofar_add_test(tUpsample tUpsample.cc) +if(CMAKE_CXX_FLAGS MATCHES ".*\\+\\+11.*") + lofar_add_test(tGridInterpolate tGridInterpolate.cc) +endif() # lofar_add_test(tExpr tExpr.cc) # lofar_add_test(tmeqarray tmeqarray.cc) # lofar_add_test(test_flaggers test_flaggers.cc) diff --git a/CEP/DP3/DPPP/test/tApplyBeam.run b/CEP/DP3/DPPP/test/tApplyBeam.run index b9db12572f56f5223288369142d753cf94af5fb2..23c96214f0edebf8163fad21a1ffebf17c09e11f 100755 --- a/CEP/DP3/DPPP/test/tApplyBeam.run +++ b/CEP/DP3/DPPP/test/tApplyBeam.run @@ -24,43 +24,57 @@ tar zxf ${srcdir}/tApplyBeam.tab.tgz echo " select result of 0 rows" > taql.ref echo; echo "Test with invert=true and usechannelfreq=false"; echo -NDPPP msin=tNDPPP-generic.MS msout=outinv.ms steps=[applybeam] applybeam.usechannelfreq=false applybeam.invert=true +cmd='NDPPP msin=tNDPPP-generic.MS msout=outinv.ms steps=[applybeam] applybeam.usechannelfreq=false applybeam.invert=true' +echo $cmd +$cmd # Compare the DATA column of the output MS with the BBS reference output. -$taqlexe 'select from outinv.ms t1, tApplyBeam.tab t2 where not all(near(t1.DATA,t2.DATA_noucf,1e-5) || (isnan(t1.DATA) && isnan(t2.DATA_noucf)))' > taql.out +$taqlexe 'select from outinv.ms t1, tApplyBeam.tab t2 where not all(near(t1.DATA,t2.DATA_noucf,5e-5) || (isnan(t1.DATA) && isnan(t2.DATA_noucf)))' > taql.out diff taql.out taql.ref || exit 1 echo; echo "### Test with invert=false on the output of the previous step"; echo -NDPPP msin=outinv.ms msout=out.ms steps=[applybeam] applybeam.usechannelfreq=false applybeam.invert=false +cmd='NDPPP msin=outinv.ms msout=out.ms steps=[applybeam] applybeam.usechannelfreq=false applybeam.invert=false' +echo $cmd +$cmd # Compare the DATA column of the output MS with the original MS. -$taqlexe 'select from out.ms t1, tNDPPP-generic.MS t2 where not all(near(t1.DATA,t2.DATA,1e-5) || (isnan(t1.DATA) && isnan(t2.DATA)))' > taql.out +$taqlexe 'select from out.ms t1, tNDPPP-generic.MS t2 where not all(near(t1.DATA,t2.DATA,5e-5) || (isnan(t1.DATA) && isnan(t2.DATA)))' > taql.out diff taql.out taql.ref || exit 1 echo; echo "Test with invert=true and usechannelfreq=true"; echo -NDPPP msin=tNDPPP-generic.MS msout=outinv.ms msout.overwrite=true steps=[applybeam] applybeam.usechannelfreq=true applybeam.invert=true +cmd='NDPPP msin=tNDPPP-generic.MS msout=outinv.ms msout.overwrite=true steps=[applybeam] applybeam.usechannelfreq=true applybeam.invert=true' +echo $cmd +$cmd # Compare the DATA column of the output MS with the BBS reference output. -$taqlexe 'select from outinv.ms t1, tApplyBeam.tab t2 where not all(near(t1.DATA,t2.DATA_ucf,1e-5) || (isnan(t1.DATA) && isnan(t2.DATA_ucf)))' > taql.out +$taqlexe 'select from outinv.ms t1, tApplyBeam.tab t2 where not all(near(t1.DATA,t2.DATA_ucf,5e-5) || (isnan(t1.DATA) && isnan(t2.DATA_ucf)))' > taql.out diff taql.out taql.ref || exit 1 echo; echo "Test with invert=false on the output of the previous step"; echo -NDPPP msin=outinv.ms msout=out.ms msout.overwrite=true steps=[applybeam] applybeam.usechannelfreq=true applybeam.invert=false +cmd='NDPPP msin=outinv.ms msout=out.ms msout.overwrite=true steps=[applybeam] applybeam.usechannelfreq=true applybeam.invert=false' +echo $cmd +$cmd # Compare the DATA column of the output MS with the original MS. -$taqlexe 'select from out.ms t1, tNDPPP-generic.MS t2 where not all(near(t1.DATA,t2.DATA,1e-5) || (isnan(t1.DATA) && isnan(t2.DATA)))' > taql.out +$taqlexe 'select from out.ms t1, tNDPPP-generic.MS t2 where not all(near(t1.DATA,t2.DATA,5e-5) || (isnan(t1.DATA) && isnan(t2.DATA)))' > taql.out diff taql.out taql.ref || exit 1 echo; echo "Test with beammode=ARRAY_FACTOR"; echo -NDPPP msin=tNDPPP-generic.MS msout=outinv.ms msout.overwrite=true steps=[applybeam] applybeam.usechannelfreq=true applybeam.invert=true applybeam.beammode=ARRAY_FACTOR +cmd='NDPPP msin=tNDPPP-generic.MS msout=outinv.ms msout.overwrite=true steps=[applybeam] applybeam.usechannelfreq=true applybeam.invert=true applybeam.beammode=ARRAY_FACTOR' +echo $cmd +$cmd # Compare the DATA column of the output MS with the BBS reference output. -$taqlexe 'select from outinv.ms t1, tApplyBeam.tab t2 where not all(near(t1.DATA,t2.DATA_ARRAY_FACTOR,1e-5) || (isnan(t1.DATA) && isnan(t2.DATA_ARRAY_FACTOR)))' > taql.out +$taqlexe 'select from outinv.ms t1, tApplyBeam.tab t2 where not all(near(t1.DATA,t2.DATA_ARRAY_FACTOR,5e-5) || (isnan(t1.DATA) && isnan(t2.DATA_ARRAY_FACTOR)))' > taql.out diff taql.out taql.ref || exit 1 echo; echo "Test with beammode=ELEMENT"; echo -NDPPP msin=tNDPPP-generic.MS msout=outinv.ms msout.overwrite=true steps=[applybeam] applybeam.usechannelfreq=true applybeam.invert=true applybeam.beammode=ELEMENT +cmd='NDPPP msin=tNDPPP-generic.MS msout=outinv.ms msout.overwrite=true steps=[applybeam] applybeam.usechannelfreq=true applybeam.invert=true applybeam.beammode=ELEMENT' +echo $cmd +$cmd # Compare the DATA column of the output MS with the BBS reference output. -$taqlexe 'select from outinv.ms t1, tApplyBeam.tab t2 where not all(near(t1.DATA,t2.DATA_ELEMENT,1e-5) || (isnan(t1.DATA) && isnan(t2.DATA_ELEMENT)))' > taql.out +$taqlexe 'select from outinv.ms t1, tApplyBeam.tab t2 where not all(near(t1.DATA,t2.DATA_ELEMENT,5e-5) || (isnan(t1.DATA) && isnan(t2.DATA_ELEMENT)))' > taql.out diff taql.out taql.ref || exit 1 echo; echo "Test with updateweights=true"; echo -NDPPP msin=tNDPPP-generic.MS msout=. steps=[applybeam] applybeam.updateweights=truue msout.weightcolumn=NEW_WEIGHT_SPECTRUM +cmd='NDPPP msin=tNDPPP-generic.MS msout=. steps=[applybeam] applybeam.updateweights=truue msout.weightcolumn=NEW_WEIGHT_SPECTRUM' +echo $cmd +$cmd # Check that the weights have changed $taqlexe 'select from tNDPPP-generic.MS where all(near(WEIGHT_SPECTRUM, NEW_WEIGHT_SPECTRUM))' > taql.out diff taql.out taql.ref || exit 1 diff --git a/CEP/DP3/DPPP/test/tApplyCalH5.cc b/CEP/DP3/DPPP/test/tApplyCalH5.cc new file mode 100644 index 0000000000000000000000000000000000000000..fe081058ad93b6a7eebaf3af35a2808207f3b2e0 --- /dev/null +++ b/CEP/DP3/DPPP/test/tApplyCalH5.cc @@ -0,0 +1,382 @@ +//# tApplyCalH5.cc: Test program for class ApplyCal +//# Copyright (C) 2013 +//# ASTRON (Netherlands Institute for Radio Astronomy) +//# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +//# +//# This file is part of the LOFAR software suite. +//# The LOFAR software suite is free software: you can redistribute it and/or +//# modify it under the terms of the GNU General Public License as published +//# by the Free Software Foundation, either version 3 of the License, or +//# (at your option) any later version. +//# +//# The LOFAR software suite is distributed in the hope that it will be useful, +//# but WITHOUT ANY WARRANTY; without even the implied warranty of +//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +//# GNU General Public License for more details. +//# +//# You should have received a copy of the GNU General Public License along +//# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +//# +//# $Id: tApplyCalH5.cc 24221 2013-08-02 12:24:48Z tammo $ +//# +//# @author Tammo Jan Dijkema + +#include <lofar_config.h> +#include <DPPP/ApplyCal.h> +#include <DPPP/DPInput.h> +#include <DPPP/DPBuffer.h> +#include <DPPP/H5Parm.h> +#include <DPPP/DPInfo.h> +#include <Common/ParameterSet.h> +#include <Common/StringUtil.h> +#include <Common/StreamUtil.h> +#include <casa/Arrays/ArrayMath.h> +#include <casa/Arrays/ArrayLogical.h> +#include <casa/Arrays/ArrayIO.h> +#include <iostream> + +using namespace LOFAR; +using namespace LOFAR::DPPP; +using namespace casa; +using namespace std; + +// Simple class to generate input arrays. +// 9 baselines, 3 antennas, 4 correlations +class TestInput: public DPInput +{ +public: + TestInput(uint ntime, uint nchan) + : itsCount(0), itsNTime(ntime), itsNChan(nchan), itsNBl(9), itsNCorr(4), + itsTimeInterval(5.), itsFirstTime(4472025740.0) + { + info().init (itsNCorr, nchan, ntime, itsFirstTime, itsTimeInterval, + string(), string()); + // Fill the baseline stations; use 3 stations. + // So they are called 00 01 02 10 11 12 20 21 22, etc. + + Vector<Int> ant1(itsNBl); + Vector<Int> ant2(itsNBl); + int st1 = 0; + int st2 = 0; + for (int i=0; i<itsNBl; ++i) { + ant1[i] = st1; + ant2[i] = st2; + if (++st2 == 3) { + st2 = 0; + if (++st1 == 3) { + st1 = 0; + } + } + } + Vector<String> antNames(3); + antNames[0] = "ant1"; + antNames[1] = "ant2"; + antNames[2] = "ant3"; + // Define their positions (more or less WSRT RT0-3). + vector<MPosition> antPos(3); + Vector<double> vals(3); + vals[0] = 3828763; vals[1] = 442449; vals[2] = 5064923; + antPos[0] = MPosition(Quantum<Vector<double> >(vals,"m"), + MPosition::ITRF); + vals[0] = 3828746; vals[1] = 442592; vals[2] = 5064924; + antPos[1] = MPosition(Quantum<Vector<double> >(vals,"m"), + MPosition::ITRF); + vals[0] = 3828729; vals[1] = 442735; vals[2] = 5064925; + antPos[2] = MPosition(Quantum<Vector<double> >(vals,"m"), + MPosition::ITRF); + Vector<double> antDiam(3, 70.); + info().set (antNames, antDiam, antPos, ant1, ant2); + // Define the frequencies. + Vector<double> chanWidth(nchan, 100.e6); + Vector<double> chanFreqs(nchan); + for (uint ch=0; ch<nchan; ++ch) { + double freq = 100.e6 + ch*10.e6; + if (ch>2) { + // Make frequencies unevenly spaced + freq += 4.e6; + } + if (ch>4) { + freq += 1.e6; + } + if (ch>5) { + freq += 15.e6; + } + chanFreqs[ch] = freq; + } + if (nchan==2) { + chanFreqs[0] = 100.e6; + chanFreqs[1] = 101.e6; + } + info().set (chanFreqs, chanWidth); + } +private: + virtual bool process (const DPBuffer&) + { + // Stop when all times are done. + if (itsCount == itsNTime) { + return false; + } + Cube<Complex> data(itsNCorr, itsNChan, itsNBl); + for (int i=0; i<int(data.size()); ++i) { + data.data()[i] = Complex(1,0); + } + Cube<Float> weights(itsNCorr, itsNChan, itsNBl); + weights=1.; + + Matrix<double> uvw(3, itsNBl); + for (int i=0; i<itsNBl; ++i) { + uvw(0,i) = 1 + itsCount + i; + uvw(1,i) = 2 + itsCount + i; + uvw(2,i) = 3 + itsCount + i; + } + DPBuffer buf; + buf.setTime (itsCount*itsTimeInterval + itsFirstTime); + buf.setData (data); + buf.setWeights (weights); + buf.setUVW (uvw); + Cube<bool> flags(data.shape()); + flags = false; + buf.setFlags (flags); + Cube<bool> fullResFlags(itsNChan, 1, itsNBl); + fullResFlags = false; + buf.setFullResFlags (fullResFlags); + getNextStep()->process (buf); + ++itsCount; + return true; + } + + virtual void finish() {getNextStep()->finish();} + virtual void show (std::ostream&) const {} + virtual void updateInfo (const DPInfo&) {} + + int itsCount, itsNTime, itsNChan, itsNBl, itsNCorr, itsTimeInterval; + double itsFirstTime; +}; + + + +// Class to check result of TestInput run by tests. +class TestOutput: public DPStep +{ +public: + enum tests {WeightsNotChanged=1, DataNotChanged=2, DataChanged=4, + DataEquals=8, WeightEquals=16}; + TestOutput(int ntime, int nchan, int doTest, bool solsHadFreqAxis=true, + bool solsHadTimeAxis=true) + : itsCount(0), itsTimeStep(0), itsNTime(ntime), itsNBl(9), itsNChan(nchan), + itsNCorr(4), itsTimeInterval(5.), itsDoTest(doTest), + itsSolsHadFreqAxis(solsHadFreqAxis), itsSolsHadTimeAxis(solsHadTimeAxis) + {} +private: + virtual bool process (const DPBuffer& buf) + { + Cube<Complex> data(itsNCorr, itsNChan, itsNBl); + for (int i=0; i<int(data.size()); ++i) { + data.data()[i] = Complex(1,0); + } + Cube<Float> weights(itsNCorr, itsNChan, itsNBl); + indgen (weights, 1.0f, 0.0f); + + vector<double> rightTimes(max(itsNTime, 5)); + rightTimes[0] = 0; + rightTimes[1] = 2; + rightTimes[2] = 3; + for (int t=3; t<itsNTime; ++t) { + rightTimes[t] = 4; + } + if (!itsSolsHadTimeAxis) { + rightTimes.assign(itsNTime, 0); + } + + vector<double> rightFreqs(max(itsNChan, 5)); + rightFreqs[0] = 1; + rightFreqs[1] = 1; + rightFreqs[2] = 2; + rightFreqs[3] = 2; + rightFreqs[4] = 2; + for (int f=5; f<itsNChan; ++f) { + rightFreqs[f] = 3; + } + if (!itsSolsHadFreqAxis) { + rightFreqs.assign(itsNChan, 1); + } + + if (itsDoTest) { + //cout<<endl; + for (uint bl=0; bl<info().nbaselines(); ++bl) { + for (int chan=0; chan<itsNChan; ++chan) { + uint ant1 = info().getAnt1()[bl]; + uint ant2 = info().getAnt2()[bl]; + // Square root of autocorrelation for first antenna + complex<float> val = sqrt(buf.getData().data()[bl*itsNCorr*itsNChan + chan*itsNCorr]); + + bool flag = buf.getFlags().data()[bl*itsNCorr*itsNChan + chan*itsNCorr]; + if ((ant1==1 || ant2==1) && rightTimes[itsTimeStep]==2 && rightFreqs[chan]==2) { + ASSERT(flag); + } else { + ASSERT(!flag); + ASSERT(near(rightTimes[itsTimeStep]*100 + rightFreqs[chan], val)); + } + } + } + } + + if (itsDoTest & DataEquals) { + ASSERT (allNear (buf.getData(), data, 1.e-7)); + } + + if (itsDoTest & DataNotChanged) { + ASSERT (allNear (buf.getData(), data, 1.e-7)); + } + if (itsDoTest & DataChanged) { + ASSERT (!(allNear (buf.getData(), data, 1.e-7))); + } + if (itsDoTest & WeightsNotChanged) { + ASSERT (allNear (buf.getWeights(), weights, 1.e-7)); + } + itsCount++; + itsTimeStep++; + return true; + } + + virtual void finish() {} + virtual void show (std::ostream&) const {} + virtual void updateInfo (const DPInfo& infoIn) + { + info() = infoIn; + ASSERT (int(infoIn.origNChan())==itsNChan); + ASSERT (int(infoIn.nchan())==itsNChan); + ASSERT (int(infoIn.ntime())==itsNTime); + ASSERT (infoIn.timeInterval()==itsTimeInterval); + ASSERT (int(infoIn.nbaselines())==itsNBl); + } + + int itsCount; + int itsTimeStep; + int itsNTime, itsNBl, itsNChan, itsNCorr, itsTimeInterval, itsDoTest; + bool itsSolsHadFreqAxis, itsSolsHadTimeAxis; +}; + + +// Execute steps. +void execute (const DPStep::ShPtr& step1) +{ + // Set DPInfo. + step1->setInfo (DPInfo()); + + const DPStep::ShPtr& step=step1->getNextStep(); + + // TODO: do line below for any step that is an ApplyCal + step->show (cout); + + // Execute the steps. + DPBuffer buf; + while (step1->process(buf)); + step1->finish(); +} + +// Test amplitude correction +void testampl(int ntime, int nchan, bool freqaxis, bool timeaxis) +{ + cout << "testampl: ntime=" << ntime << " nchan=" << nchan << endl; + // Create the steps. + TestInput* in = new TestInput(ntime, nchan); + DPStep::ShPtr step1(in); + + ParameterSet parset1; + parset1.add ("correction", "myampl"); + parset1.add ("parmdb", "tApplyCalH5_tmp.h5"); + DPStep::ShPtr step2(new ApplyCal(in, parset1, "")); + + DPStep::ShPtr step3(new TestOutput(ntime, nchan, + TestOutput::WeightsNotChanged, freqaxis, timeaxis)); + + step1->setNextStep (step2); + step2->setNextStep (step3); + execute (step1); + cout<<endl; +} + + +// Write a temporary H5Parm +void createH5Parm(vector<double> times, vector<double> freqs) { + H5Parm h5parm("tApplyCalH5_tmp.h5", true); + + // Add antenna metadata + vector<string> antNames; + vector<vector<double> > antPositions; + vector<double> oneAntPos(3, 42.); + for (uint i=0; i<3; ++i) { + stringstream antNameStr; + antNameStr<<"ant"<<(i+1); + antNames.push_back(antNameStr.str()); + antPositions.push_back(oneAntPos); + } + h5parm.addAntennas(antNames, antPositions); + + vector<H5Parm::AxisInfo> axes; + axes.push_back(H5Parm::AxisInfo("ant",3)); + if (!times.empty()) { + axes.push_back(H5Parm::AxisInfo("time", times.size())); + } + if (!freqs.empty()) { + axes.push_back(H5Parm::AxisInfo("freq", freqs.size())); + } + + H5Parm::SolTab soltab = h5parm.createSolTab("myampl","amplitude",axes); + ASSERT(h5parm.nSolTabs() == 1); + ASSERT(h5parm.hasSolTab("myampl")); + soltab.setTimes(times); + soltab.setFreqs(freqs); + soltab.setAntennas(antNames); + + uint ntimes = max(times.size(), 1); + uint nfreqs = max(freqs.size(), 1); + vector<double> values(ntimes*nfreqs*3); + vector<double> weights(ntimes*nfreqs*3); + for (uint ant=0; ant<3; ++ant) { + for (uint t=0; t<ntimes; ++t) { + for (uint f=0; f<nfreqs; ++f) { + values[ant*ntimes*nfreqs+t*nfreqs + f] = 1./(100.*(t%100)+(1+f)); + weights[ant*ntimes*nfreqs+t*nfreqs + f] = 1.; + if (ant==1 && t==2 && f==1) { + weights[ant*ntimes*nfreqs+t*nfreqs + f] = 0.; + } + } + } + } + soltab.setValues(values, weights, "CREATE with DPPP tApplyCalH5"); +} + +int main() +{ + INIT_LOGGER ("tApplyCalH5"); + + vector<double> times; + times.push_back(4472025742.0); + times.push_back(4472025745.0); + times.push_back(4472025747.5); + times.push_back(4472025748.0); + times.push_back(4472025762.0); + vector<double> freqs; + freqs.push_back(90.e6); + freqs.push_back(139.e6); + freqs.push_back(170.e6); + + try { + createH5Parm(times, freqs); + testampl(5, 7, true, true); + createH5Parm(times, freqs); + testampl(5, 2, true, true); + createH5Parm(times, vector<double>()); + testampl(8, 9, false, true); + createH5Parm(vector<double>(), freqs); + testampl(13, 3, true, false); + createH5Parm(vector<double>(), vector<double>()); + testampl(9, 2, false, false); + } catch (std::exception& x) { + cout << "Unexpected exception: " << x.what() << endl; + return 1; + } + return 0; +} diff --git a/CEP/DP3/DPPP/test/tApplyCalH5.run b/CEP/DP3/DPPP/test/tApplyCalH5.run new file mode 100755 index 0000000000000000000000000000000000000000..81ee2db8f59099c3b4bd1b29b84edbadf92a96aa --- /dev/null +++ b/CEP/DP3/DPPP/test/tApplyCalH5.run @@ -0,0 +1,3 @@ +#!/bin/sh + +./tApplyCalH5 diff --git a/CEP/DP3/DPPP/test/tApplyCalH5.sh b/CEP/DP3/DPPP/test/tApplyCalH5.sh new file mode 100755 index 0000000000000000000000000000000000000000..189bdc360b952a79420d8b65466ab5c5a930bd25 --- /dev/null +++ b/CEP/DP3/DPPP/test/tApplyCalH5.sh @@ -0,0 +1,2 @@ +#!/bin/sh +./runctest.sh tApplyCalH5 diff --git a/CEP/DP3/DPPP/test/tGridInterpolate.cc b/CEP/DP3/DPPP/test/tGridInterpolate.cc new file mode 100644 index 0000000000000000000000000000000000000000..02a5814d9f453d50fa3345848ab730a52c8ff129 --- /dev/null +++ b/CEP/DP3/DPPP/test/tGridInterpolate.cc @@ -0,0 +1,64 @@ +//# tGridInterpolate.cc: test program for GridInterpolate +//# Copyright (C) 2010 +//# ASTRON (Netherlands Institute for Radio Astronomy) +//# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +//# +//# This file is part of the LOFAR software suite. +//# The LOFAR software suite is free software: you can redistribute it and/or +//# modify it under the terms of the GNU General Public License as published +//# by the Free Software Foundation, either version 3 of the License, or +//# (at your option) any later version. +//# +//# The LOFAR software suite is distributed in the hope that it will be useful, +//# but WITHOUT ANY WARRANTY; without even the implied warranty of +//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +//# GNU General Public License for more details. +//# +//# You should have received a copy of the GNU General Public License along +//# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +//# +//# $Id: tGridInterpolate.cc 31423 2015-04-03 14:06:21Z dijkema $ +//# +//# @author Tammo Jan Dijkema + +#include <lofar_config.h> +#include <DPPP/GridInterpolate.h> +#include <cassert> + +using namespace LOFAR; +using namespace std; + +int main() { + vector<double> ax_src = {1,3}; + vector<double> ax_tgt = {0.5, 1.5, 2.5, 3.5}; + + vector<size_t> indices; + getAxisIndices(ax_src, ax_tgt, indices); + assert(indices.size()==ax_tgt.size()); + assert(indices[0]==0 && indices[1]==0 && indices[2]==1 && indices[3]==1); + + vector<double> x_src = {2,4,8,10}; + vector<double> y_src = {3,6,12}; + vector<double> x_tgt = {1,3.5,9.5,10}; + vector<double> y_tgt = {4,10}; + vector<double> vals_src(x_src.size()*y_src.size()); + vector<double> vals_tgt(x_tgt.size()*y_tgt.size()); + for (size_t i=0; i<vals_src.size(); ++i) { + vals_src[i] = i; + } + + getAxisIndices(x_src, x_tgt, indices); + assert(indices.size() == x_tgt.size()); + assert(indices[0]==0 && indices[1]==1 && indices[2]==3 && indices[3]==3); + + gridNearestNeighbor(x_src, y_src, x_tgt, y_tgt, vals_src.data(), vals_tgt.data()); + + assert(vals_tgt[0] == vals_src[0]); + assert(vals_tgt[1] == vals_src[2]); + assert(vals_tgt[2] == vals_src[3]); + assert(vals_tgt[3] == vals_src[5]); + assert(vals_tgt[4] == vals_src[9]); + assert(vals_tgt[5] == vals_src[11]); + assert(vals_tgt[6] == vals_src[9]); + assert(vals_tgt[7] == vals_src[11]); +} diff --git a/CEP/DP3/DPPP/test/tH5Parm.cc b/CEP/DP3/DPPP/test/tH5Parm.cc index 4291800d1dbc595fa0d7ed8a33ef3d6c2d103440..544cca60261576050058219e06d72ce914cd678e 100644 --- a/CEP/DP3/DPPP/test/tH5Parm.cc +++ b/CEP/DP3/DPPP/test/tH5Parm.cc @@ -9,7 +9,7 @@ using namespace std; using namespace LOFAR; -void checkAxes(H5Parm::SolTab& soltab) { +void checkAxes(H5Parm::SolTab& soltab, size_t ntimes) { ASSERT(soltab.nAxes()==3); ASSERT(soltab.hasAxis("ant")); ASSERT(soltab.hasAxis("time")); @@ -18,12 +18,13 @@ void checkAxes(H5Parm::SolTab& soltab) { ASSERT(soltab.getAxis(1).name=="time"); ASSERT(soltab.getAxis(2).name=="bla"); ASSERT(soltab.getAxis(0).size==3); - ASSERT(soltab.getAxis(1).size==4); + ASSERT(soltab.getAxis(1).size==ntimes); ASSERT(soltab.getAxis(2).size==1); } int main(int, char**) { { + size_t ntimes=7; { // Create a new H5Parm cout<<"Create tH5Parm_tmp.h5"<<endl; @@ -49,7 +50,7 @@ int main(int, char**) { vector<H5Parm::AxisInfo> axes; axes.push_back(H5Parm::AxisInfo("ant",3)); - axes.push_back(H5Parm::AxisInfo("time",4)); + axes.push_back(H5Parm::AxisInfo("time",ntimes)); axes.push_back(H5Parm::AxisInfo("bla",1)); cout<<"Create new SolTab"<<endl; @@ -62,14 +63,19 @@ int main(int, char**) { // Check the axes H5Parm::SolTab soltab = h5parm.getSolTab("mysol"); ASSERT(soltab.getType()=="mytype"); - checkAxes(soltab); + checkAxes(soltab, ntimes); // Add some data - vector<double> vals(3*4); - for (size_t ant=0; ant<3; ++ant) - for (size_t time=0; time<4; ++time) - vals[ant*4+time]=10*ant+time; - soltab.setValues(vals, vector<double>(), "CREATE with DPPP"); + vector<double> vals(3*ntimes); + vector<double> weights(3*ntimes); + for (size_t ant=0; ant<3; ++ant) { + for (size_t time=0; time<ntimes; ++time) { + vals[ant*ntimes+time]=10*ant+time; + weights[ant*ntimes+time]=0.4; + } + } + + soltab.setValues(vals, weights, "CREATE with DPPP"); // Add metadata for stations vector<string> someAntNames; @@ -80,9 +86,9 @@ int main(int, char**) { // Add metadata for times vector<double> times; - times.push_back(57878.5); - times.push_back(57880.5); - times.push_back(57882.5); + for (size_t time=0; time<ntimes; ++time) { + times.push_back(57878.5+2.0*time); + } soltab.setTimes(times); // Add metadata for freqs; @@ -117,13 +123,13 @@ int main(int, char**) { // Check the axes H5Parm::SolTab soltab = h5parm.getSolTab("mysol"); ASSERT(soltab.getType()=="mytype"); - checkAxes(soltab); + checkAxes(soltab, ntimes); cout<<"read some data"<<endl; double starttime = 57878.49999; hsize_t starttimeindex = soltab.getTimeIndex(starttime); cout<<"starttimeindex="<<starttimeindex<<endl; - vector<double> val = soltab.getValues("Antenna2", starttimeindex, 4); + vector<double> val = soltab.getValues("Antenna2", starttimeindex, ntimes); ASSERT(casa::near(val[0],10.)); ASSERT(casa::near(val[1],11.)); ASSERT(casa::near(val[2],12.)); @@ -147,8 +153,43 @@ int main(int, char**) { ASSERT(casa::near(soltab.getFreqInterval(0),1e6)); ASSERT(casa::near(soltab.getFreqInterval(1),4e6)); ASSERT(casa::near(soltab.getFreqInterval(2),2e6)); - } + cout<<"Checking interpolation (on input time axis)"<<endl; + vector<double> freqs; + freqs.push_back(130e6); + freqs.push_back(131e6); + + vector<double> times; + for (size_t time=0; time<ntimes; ++time) { + times.push_back(57878.5+2.0*time); + } + + vector<double> newgridvals = soltab.getValuesOrWeights("val", "Antenna1", + times, freqs, 0, 0); + ASSERT(newgridvals.size() == times.size() * freqs.size()); + size_t idx=0; + for (size_t time=0; time<times.size(); ++time) { + for (size_t freq=0; freq<freqs.size(); ++freq) { + ASSERT(casa::near(newgridvals[idx++], double(time))); + } + } + + times.clear(); + cout<<"Checking interpolation, upsampled 3 times, add 2 time slots at end"<<endl; + for (size_t time=0; time<3*ntimes+2; ++time) { + times.push_back(57878.5+2.0*time/3.); + } + newgridvals = soltab.getValuesOrWeights("val", "Antenna1", + times, freqs, 0, 0); + ASSERT(newgridvals.size() == times.size() * freqs.size()); + idx=0; + for (int time=0; time<int(times.size()); ++time) { + for (size_t freq=0; freq<freqs.size(); ++freq) { + ASSERT(casa::near(newgridvals[idx++], min(double((time+1)/3),double(ntimes-1)))); + } + } + + } // Remove the file // remove("tH5Parm_tmp.h5"); } diff --git a/CEP/DP3/DPPP_AOFlag/CMakeLists.txt b/CEP/DP3/DPPP_AOFlag/CMakeLists.txt index 6f881f3861d0174d653a2b5c15bec929b01fac14..01852610f85a91ec44bf6ffcbf6e5dae6dd6e2a1 100644 --- a/CEP/DP3/DPPP_AOFlag/CMakeLists.txt +++ b/CEP/DP3/DPPP_AOFlag/CMakeLists.txt @@ -5,7 +5,9 @@ lofar_package(DPPP_AOFlag 1.0 DEPENDS DPPP) include(LofarFindPackage) lofar_find_package(AOFlagger REQUIRED) lofar_find_package(Casacore COMPONENTS casa ms tables REQUIRED) -lofar_find_package(Boost REQUIRED COMPONENTS date_time thread filesystem system) +lofar_find_package(Boost REQUIRED COMPONENTS date_time thread filesystem system python) +# AOFlagger depends on Python 2.7, see aoflagger CMake +lofar_find_package(Python 2.7 REQUIRED) #lofar_find_package(GSL) lofar_find_package(LibXml2 REQUIRED) lofar_find_package(PNG REQUIRED) @@ -16,6 +18,16 @@ lofar_find_package(CFITSIO REQUIRED) # list(APPEND LOFAR_EXTRA_LIBRARIES ${RT_LIBRARY}) #endif(CMAKE_SYSTEM_NAME MATCHES "Linux") +# Copied from AOFlagger CMakeLists to link in GTKMM if AOFlagger was built with it +find_package(PkgConfig) +pkg_check_modules(GTKMM gtkmm-3.0>=3.0.0) +pkg_check_modules(SIGCXX sigc++-2.0) + +if(GTKMM_FOUND) + set(LOFAR_EXTRA_LIBRARIES ${LOFAR_EXTRA_LIBRARIES} ${GTKMM_LIBRARIES} ${GLIBMM_LIBRARIES}) +endif(GTKMM_FOUND) +# End check for GTKMM + add_subdirectory(include/DPPP_AOFlag) add_subdirectory(src) add_subdirectory(test) diff --git a/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/CMakeLists.txt b/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/CMakeLists.txt index 0a9a4816a5ad5bc70118c5d4690cd7d79269c68e..37345b4307e429f8677085302e23da9476c02738 100644 --- a/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/CMakeLists.txt +++ b/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/CMakeLists.txt @@ -1,7 +1,12 @@ # $Id: CMakeLists.txt 30990 2015-02-12 12:27:47Z diepen $ # List of header files that will be installed. -set(inst_HEADERS Register.h DDECal.h MultiDirSolver.h H5Parm.h Constraint.h ScreenConstraint.h TECConstraint.h PiercePoint.h Matrix2x2.h PieceWisePhaseFitter.h RotationConstraint.h RotationAndDiagonalConstraint.h) +set(inst_HEADERS + Register.h DDECal.h MultiDirSolver.h H5Parm.h + Constraint.h KernelSmoother.h ScreenConstraint.h TECConstraint.h + PiercePoint.h Matrix2x2.h PieceWisePhaseFitter.h + RotationConstraint.h RotationAndDiagonalConstraint.h + SmoothnessConstraint.h ) # Create symbolic link to include directory. execute_process(COMMAND ${CMAKE_COMMAND} -E create_symlink diff --git a/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/Constraint.h b/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/Constraint.h index 7f215b28d98a0870fc15b470f1fff31e3dd2caa1..b9af08e49f4d92dcdd1b208468070e28ffdd687f 100644 --- a/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/Constraint.h +++ b/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/Constraint.h @@ -5,6 +5,7 @@ #include <memory> #include <set> #include <vector> +#include <ostream> /** * This class is the base class for classes that implement a constraint on @@ -21,6 +22,7 @@ public: { public: std::vector<double> vals; + std::vector<double> weights; std::string axes; // Comma-separated string with axis names, fastest varying last std::vector<size_t> dims; std::string name; @@ -54,18 +56,40 @@ public: virtual bool Satisfied() const { return true; } /** - * This method applies the constraints to the solutions. It should be implemented in - * a thread safe manner, allowing multiple Apply() calls to run in parallel. + * This method applies the constraints to the solutions. * @param solutions is an array of array, such that: - * - solutions[ch] is a pointer for channelblock ch to antenna x directions solutions. - * - directions is the dimension with the fastest changing index. + * - solutions[ch] is a pointer for channelblock ch to antenna x directions x pol solutions. + * - pol is the dimension with the fastest changing index. * @param time Central time of interval. */ virtual std::vector<Result> Apply( std::vector<std::vector<dcomplex> >& solutions, - double time) = 0; + double time, std::ostream* statStream) = 0; + + /** + * Initialize the dimensions for the constraint. Should be overridden when + * something more than assigning dimensions is needed (e.g. resizing vectors). + * Weights are initialized to 1. here. + */ + virtual void InitializeDimensions(size_t nAntennas, + size_t nDirections, + size_t nChannelBlocks) + { + _nAntennas = nAntennas; + _nDirections = nDirections; + _nChannelBlocks = nChannelBlocks; + } + + /** + * Set weights. The vector should contain an array of size nAntennas * nChannelBlocks, + * where the channel index varies fastest. + */ + virtual void SetWeights(const std::vector<double> &) {} virtual void showTimings (std::ostream&, double) const {} + +protected: + size_t _nAntennas, _nDirections, _nChannelBlocks; }; /** @@ -79,7 +103,8 @@ public: virtual std::vector<Result> Apply( std::vector<std::vector<dcomplex> >& solutions, - double time); + double time, + std::ostream* statStream); }; /** @@ -93,7 +118,8 @@ public: virtual std::vector<Result> Apply( std::vector<std::vector<dcomplex> >& solutions, - double time); + double time, + std::ostream* statStream); }; class DiagonalConstraint : public Constraint @@ -103,7 +129,8 @@ public: virtual std::vector<Result> Apply( std::vector<std::vector<dcomplex> >& solutions, - double time); + double time, + std::ostream* statStream); private: const size_t _polsPerSolution; }; @@ -123,20 +150,17 @@ class CoreConstraint : public Constraint public: CoreConstraint() { } - void initialize(size_t nAntennas, size_t nDirections, size_t nChannelBlocks, const std::set<size_t>& coreAntennas) + void initialize(const std::set<size_t>& coreAntennas) { - _nAntennas = nAntennas; - _nDirections = nDirections; - _nChannelBlocks = nChannelBlocks; _coreAntennas = coreAntennas; } virtual std::vector<Result> Apply( std::vector<std::vector<dcomplex> >& solutions, - double time); + double time, + std::ostream* statStream); private: - size_t _nAntennas, _nDirections, _nChannelBlocks; std::set<size_t> _coreAntennas; }; diff --git a/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/DDECal.h b/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/DDECal.h index f4ec891d3eefb3ca5025d77aa4d599a3d6cdd16a..b6185b0cc0f0a0331d8b64e4eac064639491a14d 100644 --- a/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/DDECal.h +++ b/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/DDECal.h @@ -114,6 +114,9 @@ namespace LOFAR { std::string itsName; vector<DPBuffer> itsBufs; + bool itsUseModelColumn; + std::vector<casacore::Cube<casacore::Complex>> itsModelData; + // The time of the current buffer (in case of solint, average time) double itsAvgTime; std::vector<casacore::Complex*> itsDataPtrs; @@ -130,7 +133,6 @@ namespace LOFAR { // For each time, for each constraint, a vector of results (e.g. tec and phase) std::vector<std::vector<std::vector<Constraint::Result> > > itsConstraintSols; - casacore::Cube<casacore::Complex> itsModelData; std::string itsH5ParmName; H5Parm itsH5Parm; std::string itsParsetString; // Parset, for logging in H5Parm @@ -141,10 +143,13 @@ namespace LOFAR { uint itsSolInt; uint itsStepInSolInt; uint itsNChan; + vector<size_t> itsChanBlockStart; // For each channel block, the index in the channels at which this channel block starts vector<double> itsChanBlockFreqs; vector<vector<string> > itsDirections; // For each direction, a vector of patches vector<casacore::CountedPtr<Constraint> > itsConstraints; + vector<double> itsWeights; + UVWFlagger itsUVWFlagStep; ResultStep::ShPtr itsDataResultStep; // Result step for data after UV-flagging vector<Predict> itsPredictSteps; @@ -155,6 +160,7 @@ namespace LOFAR { NSTimer itsTimerSolve; NSTimer itsTimerWrite; double itsCoreConstraint; + double itsSmoothnessConstraint; double itsScreenCoreConstraint; MultiDirSolver itsMultiDirSolver; diff --git a/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/KernelSmoother.h b/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/KernelSmoother.h new file mode 100644 index 0000000000000000000000000000000000000000..984e82d173b41faa96324d78545fb134b97a7359 --- /dev/null +++ b/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/KernelSmoother.h @@ -0,0 +1,101 @@ +#ifndef KERNEL_SMOOTHER_H +#define KERNEL_SMOOTHER_H + +#include <cmath> +#include <stdexcept> +#include <vector> + +template<typename DataType, typename NumType> +class KernelSmoother +{ +public: + enum KernelType { + RectangularKernel, + TriangularKernel, + /** Gaussian, trimmed off at 3 sigma */ + GaussianKernel, + /** The Epanechnikov kernel is a quadratic kernel, given by 3/4 (1 - x^2) */ + EpanechnikovKernel + }; + + KernelSmoother(const NumType* frequencies, size_t n, KernelType kernelType, NumType kernelBandwidth) : + _frequencies(frequencies, frequencies+n), + _scratch(n), + _kernelType(kernelType), + _bandwidth(kernelBandwidth) + { + } + + NumType Kernel(NumType distance) const + { + NumType x = distance / _bandwidth; + if(x < NumType(-1.0) || x > NumType(1.0)) + return NumType(0.0); + else { + switch(_kernelType) + { + case RectangularKernel: + default: + return NumType(0.5); + case TriangularKernel: + return x >= NumType(0.0) ? (NumType(1.0) - x) : (NumType(1.0) + x); + case GaussianKernel: + // e^(-x^2 / sigma^2), sigma = bandwidth / 3. + return std::exp(-x*x*NumType(9.0)); + case EpanechnikovKernel: + // 3/4 * (1-x)^2; + x = NumType(1.0) - x; + return (NumType(3.0) / NumType(4.0)) * x * x; + } + } + } + + void Smooth(DataType* data, const NumType* weight) + { + size_t n = _frequencies.size(); + + size_t + bandLeft = 0, + // find right kernel value for first element + bandRight = std::lower_bound(_frequencies.begin(), _frequencies.end(), _frequencies[0] + _bandwidth * 0.5) - _frequencies.begin() + 1; + + for(size_t i=0; i!=n; ++i) + { + // If a boundary is further than half the bandwidth away, move boundary + while(_frequencies[bandLeft] < _frequencies[i] - _bandwidth * 0.5) + ++bandLeft; + while(bandRight!=n && _frequencies[bandRight] < _frequencies[i] + _bandwidth * 0.5) + ++bandRight; + + // A value of 1 is added to make sure we are not skipping a value because of rounding errors + // (kernel will be zero past boundaries, so including an unnecessary value has no effect) + size_t start = bandLeft > 0 ? bandLeft-1 : 0; + size_t end = bandRight < n ? bandRight+1 : n; + + DataType sum(0.0); + NumType weightSum(0.0); + //std::cout << start << " -> " << end << " (" << _frequencies[start] << " -> " << _frequencies[end] << ")\n"; + for(size_t j=start; j!=end; ++j) + { + double distance = _frequencies[i] - _frequencies[j]; + double w = Kernel(distance) * weight[j]; + sum += data[j] * w; + weightSum += w; + } + if(weightSum == 0.0) + _scratch[i] = 0.0; + else + _scratch[i] = sum / weightSum; + } + std::copy(_scratch.begin(), _scratch.end(), data); + } + +private: + std::vector<NumType> _frequencies; + std::vector<DataType> _scratch; + enum KernelType _kernelType; + NumType _bandwidth; +}; + +#endif + diff --git a/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/Matrix2x2.h b/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/Matrix2x2.h index 2e27c1e45b0a4e2db3f748fb111b2077739c4381..69746ed8cab4c955c38f8b860151d96d06f2512b 100644 --- a/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/Matrix2x2.h +++ b/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/Matrix2x2.h @@ -132,9 +132,9 @@ public: static bool Invert(T* matrix) { T d = ((matrix[0]*matrix[3]) - (matrix[1]*matrix[2])); - if(d == 0.0) + if(d == T(0.0)) return false; - T oneOverDeterminant = 1.0 / d; + T oneOverDeterminant = T(1.0) / d; T temp; temp = matrix[3] * oneOverDeterminant; matrix[1] = -matrix[1] * oneOverDeterminant; @@ -143,6 +143,16 @@ public: matrix[0] = temp; return true; } + + template<typename T> + static void ConjugateTranspose(T* matrix) + { + matrix[0] = std::conj(matrix[0]); + T temp = matrix[1]; + matrix[1] = std::conj(matrix[2]); + matrix[2] = std::conj(temp); + matrix[3] = std::conj(matrix[3]); + } static bool MultiplyWithInverse(std::complex<double>* lhs, const std::complex<double>* rhs) { @@ -197,12 +207,13 @@ public: e2 = trHalf - term; } - static void EigenValues(const std::complex<double>* matrix, std::complex<double> &e1, std::complex<double> &e2) + template<typename ValType> + static void EigenValues(const std::complex<ValType>* matrix, std::complex<ValType>& e1, std::complex<ValType>& e2) { - std::complex<double> tr = matrix[0] + matrix[3]; - std::complex<double> d = matrix[0]*matrix[3] - matrix[1]*matrix[2]; - std::complex<double> term = sqrt(tr*tr*0.25-d); - std::complex<double> trHalf = tr*0.5; + std::complex<ValType> tr = matrix[0] + matrix[3]; + std::complex<ValType> d = matrix[0]*matrix[3] - matrix[1]*matrix[2]; + std::complex<ValType> term = sqrt(tr*tr*ValType(0.25)-d); + std::complex<ValType> trHalf = tr*ValType(0.5); e1 = trHalf + term; e2 = trHalf - term; } @@ -278,88 +289,101 @@ public: } }; -class MC2x2 +template<typename ValType> +class MC2x2Base { public: - MC2x2() { } - MC2x2(const MC2x2& source) { Matrix2x2::Assign(_values, source._values); } + MC2x2Base() { } + MC2x2Base(const MC2x2Base<ValType>& source) { Matrix2x2::Assign(_values, source._values); } template<typename T> - explicit MC2x2(const T source[4]) { Matrix2x2::Assign(_values, source); } - MC2x2(double m00, double m01, double m10, double m11) { + explicit MC2x2Base(const T source[4]) { Matrix2x2::Assign(_values, source); } + MC2x2Base(ValType m00, ValType m01, ValType m10, ValType m11) { _values[0] = m00; _values[1] = m01; _values[2] = m10; _values[3] = m11; } - MC2x2(std::complex<double> m00, std::complex<double> m01, std::complex<double> m10, std::complex<double> m11) { + MC2x2Base(std::complex<ValType> m00, std::complex<ValType> m01, std::complex<ValType> m10, std::complex<ValType> m11) { _values[0] = m00; _values[1] = m01; _values[2] = m10; _values[3] = m11; } - MC2x2& operator=(const MC2x2& source) + MC2x2Base<ValType>& operator=(const MC2x2Base<ValType>& source) { Matrix2x2::Assign(_values, source._values); return *this; } - MC2x2& operator+=(const MC2x2& rhs) + MC2x2Base<ValType>& operator+=(const MC2x2Base<ValType>& rhs) { Matrix2x2::Add(_values, rhs._values); return *this; } - MC2x2& operator*=(double rhs) + MC2x2Base<ValType>& operator-=(const MC2x2Base<ValType>& rhs) + { + Matrix2x2::Subtract(_values, rhs._values); + return *this; + } + MC2x2Base<ValType>& operator*=(const MC2x2Base<ValType>& rhs) + { + MC2x2Base<ValType> dest; + Matrix2x2::ATimesB(dest._values, _values, rhs._values); + *this = dest; + return *this; + } + MC2x2Base<ValType>& operator*=(ValType rhs) { Matrix2x2::ScalarMultiply(_values, rhs); return *this; } - MC2x2& operator/=(double rhs) + MC2x2Base<ValType>& operator/=(ValType rhs) { Matrix2x2::ScalarMultiply(_values, 1.0/rhs); return *this; } - const std::complex<double>& operator[](size_t index) const { return _values[index]; } - std::complex<double>& operator[](size_t index) { return _values[index]; } - const double& IndexReal(size_t index) const { return reinterpret_cast<const double(&)[2]>(_values[index/2])[index%2]; } - double& IndexReal(size_t index) { return reinterpret_cast<double(&)[2]>(_values[index/2])[index%2]; } - static MC2x2 Zero() + const std::complex<ValType>& operator[](size_t index) const { return _values[index]; } + std::complex<ValType>& operator[](size_t index) { return _values[index]; } + const ValType& IndexReal(size_t index) const { return reinterpret_cast<const ValType(&)[2]>(_values[index/2])[index%2]; } + ValType& IndexReal(size_t index) { return reinterpret_cast<ValType(&)[2]>(_values[index/2])[index%2]; } + static MC2x2Base<ValType> Zero() { - return MC2x2(0.0, 0.0, 0.0, 0.0); + return MC2x2Base<ValType>(0.0, 0.0, 0.0, 0.0); } - static MC2x2 Unity() + static MC2x2Base<ValType> Unity() { - return MC2x2(1.0, 0.0, 0.0, 1.0); + return MC2x2Base(1.0, 0.0, 0.0, 1.0); } - static MC2x2 NaN() + static MC2x2Base<ValType> NaN() { - return MC2x2( - std::complex<double>(std::numeric_limits<double>::quiet_NaN(), std::numeric_limits<double>::quiet_NaN()), - std::complex<double>(std::numeric_limits<double>::quiet_NaN(), std::numeric_limits<double>::quiet_NaN()), - std::complex<double>(std::numeric_limits<double>::quiet_NaN(), std::numeric_limits<double>::quiet_NaN()), - std::complex<double>(std::numeric_limits<double>::quiet_NaN(), std::numeric_limits<double>::quiet_NaN())); + return MC2x2Base<ValType>( + std::complex<ValType>(std::numeric_limits<ValType>::quiet_NaN(), std::numeric_limits<ValType>::quiet_NaN()), + std::complex<ValType>(std::numeric_limits<ValType>::quiet_NaN(), std::numeric_limits<ValType>::quiet_NaN()), + std::complex<ValType>(std::numeric_limits<ValType>::quiet_NaN(), std::numeric_limits<ValType>::quiet_NaN()), + std::complex<ValType>(std::numeric_limits<ValType>::quiet_NaN(), std::numeric_limits<ValType>::quiet_NaN())); } - std::complex<double>* Data() { return _values; } - const std::complex<double>* Data() const { return _values; } - MC2x2 Multiply(const MC2x2& rhs) const + std::complex<ValType>* Data() { return _values; } + const std::complex<ValType>* Data() const { return _values; } + MC2x2Base<ValType> Multiply(const MC2x2Base<ValType>& rhs) const { - MC2x2 dest; + MC2x2Base<ValType> dest; Matrix2x2::ATimesB(dest._values, _values, rhs._values); return dest; } - MC2x2 MultiplyHerm(const MC2x2& rhs) const + MC2x2Base<ValType> MultiplyHerm(const MC2x2Base<ValType>& rhs) const { - MC2x2 dest; + MC2x2Base dest; Matrix2x2::ATimesHermB(dest._values, _values, rhs._values); return dest; } - MC2x2 HermThenMultiply(const MC2x2& rhs) const + MC2x2Base<ValType> HermThenMultiply(const MC2x2Base<ValType>& rhs) const { - MC2x2 dest; + MC2x2Base<ValType> dest; Matrix2x2::HermATimesB(dest._values, _values, rhs._values); return dest; } - MC2x2 HermThenMultiplyHerm(const MC2x2& rhs) const + MC2x2Base<ValType> HermThenMultiplyHerm(const MC2x2Base<ValType>& rhs) const { - MC2x2 dest; + MC2x2Base<ValType> dest; Matrix2x2::HermATimesHermB(dest._values, _values, rhs._values); return dest; } - void AddWithFactorAndAssign(const MC2x2& rhs, double factor) + void AddWithFactorAndAssign(const MC2x2Base<ValType>& rhs, ValType factor) { Matrix2x2::MultiplyAdd(_values, rhs._values, factor); } @@ -367,19 +391,23 @@ public: { return Matrix2x2::Invert(_values); } - static void ATimesB(MC2x2& dest, const MC2x2& lhs, const MC2x2& rhs) + static void ATimesB(MC2x2Base<ValType>& dest, const MC2x2Base<ValType>& lhs, const MC2x2Base<ValType>& rhs) { Matrix2x2::ATimesB(dest._values, lhs._values, rhs._values); } - static void ATimesHermB(MC2x2& dest, const MC2x2& lhs, const MC2x2& rhs) + static void ATimesB(std::complex<ValType>* dest, const MC2x2Base<ValType>& lhs, const MC2x2Base<ValType>& rhs) + { + Matrix2x2::ATimesB(dest, lhs._values, rhs._values); + } + static void ATimesHermB(MC2x2Base<ValType>& dest, const MC2x2Base<ValType>& lhs, const MC2x2Base<ValType>& rhs) { Matrix2x2::ATimesHermB(dest._values, lhs._values, rhs._values); } - static void HermATimesB(MC2x2& dest, const MC2x2& lhs, const MC2x2& rhs) + static void HermATimesB(MC2x2Base<ValType>& dest, const MC2x2Base<ValType>& lhs, const MC2x2Base<ValType>& rhs) { Matrix2x2::HermATimesB(dest._values, lhs._values, rhs._values); } - static void HermATimesHermB(MC2x2& dest, const MC2x2& lhs, const MC2x2& rhs) + static void HermATimesHermB(MC2x2Base<ValType>& dest, const MC2x2Base<ValType>& lhs, const MC2x2Base<ValType>& rhs) { Matrix2x2::HermATimesHermB(dest._values, lhs._values, rhs._values); } @@ -390,11 +418,11 @@ public: << _values[2] << ", " << _values[3]; return str.str(); } - void CopyValues(std::complex<double>* values) const + void CopyValues(std::complex<ValType>* values) const { Matrix2x2::Assign(values, _values); } - void EigenValues(std::complex<double> &e1, std::complex<double> &e2) const + void EigenValues(std::complex<ValType> &e1, std::complex<ValType> &e2) const { Matrix2x2::EigenValues(_values, e1, e2); } @@ -408,7 +436,10 @@ public: ); } private: - std::complex<double> _values[4]; + std::complex<ValType> _values[4]; }; +using MC2x2 = MC2x2Base<double>; +using MC2x2F = MC2x2Base<float>; + #endif diff --git a/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/MultiDirSolver.h b/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/MultiDirSolver.h index a18de7c186ebbd7b66824c2022f1f45bcd33eb49..ddb5b8f397f8b581790a38777a89df98a2570630 100644 --- a/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/MultiDirSolver.h +++ b/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/MultiDirSolver.h @@ -82,10 +82,16 @@ public: void set_accuracy(double accuracy) { _accuracy = accuracy; } + double get_accuracy() const { return _accuracy; } void set_constraint_accuracy(double constraintAccuracy) { _constraintAccuracy = constraintAccuracy; } void set_step_size(double stepSize) { _stepSize = stepSize; } + double get_step_size() const { return _stepSize; } + + void set_detect_stalling(bool detectStalling) { _detectStalling = detectStalling; } + + bool get_detect_stalling() const { return _detectStalling; } void add_constraint(Constraint* constraint) { _constraints.push_back(constraint); } @@ -110,18 +116,22 @@ private: void makeStep(const std::vector<std::vector<DComplex> >& solutions, std::vector<std::vector<DComplex> >& nextSolutions) const; + + bool detectStall(size_t iteration, const std::vector<double>& stepMagnitudes) const; void makeSolutionsFinite(std::vector<std::vector<DComplex> >& solutions, size_t perPol) const; /** * Assign the solutions in nextSolutions to the solutions. - * @returns whether the solutions have been converged. + * @returns whether the solutions have converged. Appends the current step magnitude to step_magnitudes */ + template<size_t NPol> bool assignSolutions( std::vector<std::vector<DComplex> >& solutions, std::vector<std::vector<DComplex> >& nextSolutions, bool useConstraintAccuracy, - double& sum, double& normSum + double& sum, + std::vector<double>& step_magnitudes ) const; size_t _nAntennas, _nDirections, _nChannels, _nChannelBlocks; @@ -131,6 +141,7 @@ private: size_t _maxIterations; double _accuracy, _constraintAccuracy; double _stepSize; + bool _detectStalling; bool _phaseOnly; std::vector<Constraint*> _constraints; diff --git a/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/RotationAndDiagonalConstraint.h b/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/RotationAndDiagonalConstraint.h index 996f95130c2860eded0717cdeb908fcceff87f1d..cd4934fdf09ed61b077a5a57791987a5e1da12d2 100644 --- a/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/RotationAndDiagonalConstraint.h +++ b/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/RotationAndDiagonalConstraint.h @@ -8,23 +8,27 @@ #endif #include <vector> +#include <ostream> namespace LOFAR { class RotationAndDiagonalConstraint : public Constraint { public: - RotationAndDiagonalConstraint(); + RotationAndDiagonalConstraint() {}; virtual std::vector<Result> Apply( std::vector<std::vector<dcomplex> >& solutions, - double time); + double time, std::ostream* statStream); - void initialize(size_t nAntennas, size_t nDirections, size_t nChannelBlocks); + virtual void InitializeDimensions(size_t nAntennas, + size_t nDirections, + size_t nChannelBlocks); + + virtual void SetWeights(const std::vector<double>& weights); private: - size_t _nAntennas, _nDirections, _nChannelBlocks; - std::vector<Constraint::Result> _resTemplate; + std::vector<Constraint::Result> _res; }; } // namespace LOFAR diff --git a/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/RotationConstraint.h b/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/RotationConstraint.h index ddc25f71a59268d4c19c19654d5d3879d8cefaf3..c097b31b8e7e184985da279e4f192184ed5aea9b 100644 --- a/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/RotationConstraint.h +++ b/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/RotationConstraint.h @@ -8,26 +8,30 @@ #endif #include <vector> +#include <ostream> namespace LOFAR { class RotationConstraint : public Constraint { public: - RotationConstraint(); + RotationConstraint() {}; virtual std::vector<Result> Apply( std::vector<std::vector<dcomplex> >& solutions, - double time); + double time, std::ostream* statStream); - void initialize(size_t nAntennas, size_t nDirections, size_t nChannelBlocks); + virtual void InitializeDimensions(size_t nAntennas, + size_t nDirections, + size_t nChannelBlocks); + + virtual void SetWeights(const std::vector<double>& weights); /* Compute the rotation from a 2x2 full jones solution */ static double get_rotation(std::complex<double>* data); private: - size_t _nAntennas, _nDirections, _nChannelBlocks; - std::vector<Constraint::Result> _resTemplate; + std::vector<Constraint::Result> _res; }; } // namespace LOFAR diff --git a/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/ScreenConstraint.h b/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/ScreenConstraint.h index d22abd5353344c902953764d754304f68586207d..bc4250fc4919c8662b8fe712da37ab3b514fc1bc 100644 --- a/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/ScreenConstraint.h +++ b/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/ScreenConstraint.h @@ -10,6 +10,8 @@ #include <cmath> #include <vector> #include <memory> +#include <ostream> + namespace LOFAR { class ParameterSet; class ScreenConstraint : public Constraint @@ -21,8 +23,13 @@ class ScreenConstraint : public Constraint public: ScreenConstraint(const ParameterSet& parset, const string& prefix); - void initialize(size_t nAntennas, size_t nDirections, size_t nChannelBlocks, const double* frequencies); - virtual std::vector<Constraint::Result> Apply(std::vector<std::vector<MultiDirSolver::DComplex> >& solutions,double time); + + /** Initialize metadata with frequencies, resize some members. + * Should be called after InitializeDimensions. + */ + void initialize(const double* frequencies); + virtual std::vector<Constraint::Result> Apply(std::vector<std::vector<MultiDirSolver::DComplex> >& solutions, + double time, std::ostream* statStream); virtual void CalculatePiercepoints(); void setAntennaPositions(const std::vector<std::vector<double> > antenna_pos); @@ -41,7 +48,6 @@ public: } void getPPValue(std::vector<std::vector<std::complex<double> > >&, size_t, size_t, double&,double&) const; private: - size_t _nAntennas, _nDirections, _nChannelBlocks; std::vector<std::vector<double> > itsAntennaPos; std::vector<std::vector<double> > itsSourcePos; std::vector<double> itsFrequencies; diff --git a/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/SmoothnessConstraint.h b/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/SmoothnessConstraint.h new file mode 100644 index 0000000000000000000000000000000000000000..270f5eadd8d1127582fa4757f43648833240d0e4 --- /dev/null +++ b/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/SmoothnessConstraint.h @@ -0,0 +1,46 @@ +#include "Constraint.h" +#include "KernelSmoother.h" + +#ifndef SMOOTHNESS_CONSTRAINT_H +#define SMOOTHNESS_CONSTRAINT_H + +class SmoothnessConstraint : public Constraint +{ +public: + typedef std::complex<double> dcomplex; + typedef KernelSmoother<dcomplex, double> Smoother; + + SmoothnessConstraint(double bandwidthHz); + + std::vector<Constraint::Result> Apply( + std::vector<std::vector<dcomplex> >& solutions, double, std::ostream* statStream) final override; + + void SetWeights(const std::vector<double> &weights) final override { + _weights = weights; + } + + void Initialize(const double* frequencies); + + virtual void InitializeDimensions(size_t nAntennas, + size_t nDirections, + size_t nChannelBlocks) final override; + + struct FitData + { + FitData(const double* frequencies, size_t n, Smoother::KernelType kernelType, double kernelBandwidth) + : smoother(frequencies, n, kernelType, kernelBandwidth), + data(n), weight(n) + { } + + Smoother smoother; + std::vector<dcomplex> data; + std::vector<double> weight; + }; + std::vector<FitData> _fitData; + std::vector<double> _frequencies, _weights; + Smoother::KernelType _kernelType; + double _bandwidth; +}; + +#endif + diff --git a/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/TECConstraint.h b/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/TECConstraint.h index 647935622d96c3436e68d0bd1a14f6fdb469eb14..bee6e9cb745de6d78668282c8c9e77bc8b4dc2d9 100644 --- a/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/TECConstraint.h +++ b/CEP/DP3/DPPP_DDECal/include/DPPP_DDECal/TECConstraint.h @@ -1,6 +1,8 @@ #ifndef TEC_CONSTRAINT_H #define TEC_CONSTRAINT_H +#include <vector> + #ifdef AOPROJECT #include "Constraint.h" #include "PhaseFitter.h" @@ -12,6 +14,7 @@ #endif #include <vector> +#include <ostream> class TECConstraintBase : public Constraint { @@ -25,17 +28,22 @@ public: TECConstraintBase(Mode mode); - void initialize(size_t nAntennas, size_t nDirections, - size_t nChannelBlocks, const double* frequencies); + /** Initialize metadata with frequencies, resize some members. + * Should be called after InitializeDimensions. + */ + void initialize(const double* frequencies); + /** Propagate weights to the phase fitters */ + virtual void SetWeights(const std::vector<double>& weights) final override; + protected: virtual void initializeChild() { } void applyReferenceAntenna(std::vector<std::vector<dcomplex> >& solutions) const; Mode _mode; - size_t _nAntennas, _nDirections, _nChannelBlocks; std::vector<PhaseFitter> _phaseFitters; + std::vector<double> _weights; }; class TECConstraint : public TECConstraintBase @@ -45,7 +53,8 @@ public: virtual std::vector<Result> Apply( std::vector<std::vector<dcomplex> >& solutions, - double time); + double time, + std::ostream* statStream) override; }; class ApproximateTECConstraint : public TECConstraint @@ -58,15 +67,21 @@ public: _maxApproxIters(50) { } - virtual void PrepareIteration(bool hasReachedPrecision, size_t iteration, bool finalIter) { + virtual void PrepareIteration(bool hasReachedPrecision, size_t iteration, bool finalIter) final override { _finishedApproximateStage = hasReachedPrecision || finalIter || iteration >= _maxApproxIters; + for(size_t thread=0; thread!=_phaseFitters.size(); ++thread) { + std::fill(_phaseFitters[thread].WeightData(), + _phaseFitters[thread].WeightData()+_phaseFitters[thread].Size(), + 1.0); + } } - virtual bool Satisfied() const { return _finishedApproximateStage; } + virtual bool Satisfied() const final override { return _finishedApproximateStage; } virtual std::vector<Result> Apply( std::vector<std::vector<dcomplex> >& solutions, - double time); + double time, + std::ostream* statStream) final override; void SetFittingChunkSize(size_t fittingChunkSize) { _fittingChunkSize = fittingChunkSize; } @@ -74,7 +89,7 @@ public: void SetMaxApproximatingIterations(size_t maxApproxIters) { _maxApproxIters = maxApproxIters; } protected: - virtual void initializeChild(); + virtual void initializeChild() final override; private: bool _finishedApproximateStage; diff --git a/CEP/DP3/DPPP_DDECal/src/CMakeLists.txt b/CEP/DP3/DPPP_DDECal/src/CMakeLists.txt index aa6d71ffd6ce4e2e6b13cb0f53368881b0b41544..480c5c11c2446020cec86c5437db3bf86bd21b72 100644 --- a/CEP/DP3/DPPP_DDECal/src/CMakeLists.txt +++ b/CEP/DP3/DPPP_DDECal/src/CMakeLists.txt @@ -6,7 +6,8 @@ lofar_add_library(dppp_ddecal Package__Version.cc DDECal.cc Register.cc Stopwatch.cc KLFitter.cc DDECal.cc MultiDirSolver.cc Constraint.cc PiercePoint.cc - ScreenConstraint.cc TECConstraint.cc RotationConstraint.cc + ScreenConstraint.cc SmoothnessConstraint.cc + TECConstraint.cc RotationConstraint.cc RotationAndDiagonalConstraint.cc ) diff --git a/CEP/DP3/DPPP_DDECal/src/Constraint.cc b/CEP/DP3/DPPP_DDECal/src/Constraint.cc index 09ac961e0778eef207c8c682620500d30eed4920..b60f4068bc2eb51a63c5f5085a65b723af3494aa 100644 --- a/CEP/DP3/DPPP_DDECal/src/Constraint.cc +++ b/CEP/DP3/DPPP_DDECal/src/Constraint.cc @@ -5,7 +5,8 @@ #endif std::vector<Constraint::Result> PhaseOnlyConstraint::Apply( - std::vector<std::vector<dcomplex> >& solutions, double) + std::vector<std::vector<dcomplex> >& solutions, double, + std::ostream* statStream) { for (size_t ch=0; ch<solutions.size(); ++ch) { for (size_t solIndex=0; solIndex<solutions[ch].size(); ++solIndex) { @@ -17,7 +18,8 @@ std::vector<Constraint::Result> PhaseOnlyConstraint::Apply( } std::vector<Constraint::Result> AmplitudeOnlyConstraint::Apply( - std::vector<std::vector<dcomplex> >& solutions, double) + std::vector<std::vector<dcomplex> >& solutions, double, + std::ostream* statStream) { for (size_t ch=0; ch<solutions.size(); ++ch) { for (size_t solIndex=0; solIndex<solutions[ch].size(); ++solIndex) { @@ -29,7 +31,8 @@ std::vector<Constraint::Result> AmplitudeOnlyConstraint::Apply( } std::vector<Constraint::Result> DiagonalConstraint::Apply( - std::vector<std::vector<dcomplex> >& solutions, double) + std::vector<std::vector<dcomplex> >& solutions, double, + std::ostream* statStream) { if(_polsPerSolution == 4) { @@ -45,7 +48,8 @@ std::vector<Constraint::Result> DiagonalConstraint::Apply( } std::vector<Constraint::Result> CoreConstraint::Apply( - std::vector<std::vector<dcomplex> >& solutions, double) + std::vector<std::vector<dcomplex> >& solutions, double, + std::ostream* statStream) { for (uint ch=0; ch<solutions.size(); ++ch) { std::vector<dcomplex> coreSolutions(_nDirections, 0.0); diff --git a/CEP/DP3/DPPP_DDECal/src/DDECal.cc b/CEP/DP3/DPPP_DDECal/src/DDECal.cc index 522049c290d8d498a67a387029ad26c4d970f956..a6b1fc64f0d34fddff65ac18248a596b5347188b 100644 --- a/CEP/DP3/DPPP_DDECal/src/DDECal.cc +++ b/CEP/DP3/DPPP_DDECal/src/DDECal.cc @@ -37,6 +37,7 @@ #include <DPPP_DDECal/TECConstraint.h> #include <DPPP_DDECal/RotationConstraint.h> #include <DPPP_DDECal/RotationAndDiagonalConstraint.h> +#include <DPPP_DDECal/SmoothnessConstraint.h> #include <ParmDB/ParmDB.h> #include <ParmDB/ParmValue.h> @@ -79,6 +80,7 @@ namespace LOFAR { const string& prefix) : itsInput (input), itsName (prefix), + itsUseModelColumn(parset.getBool (prefix + "usemodelcolumn", false)), itsAvgTime (0), itsSols (), itsH5ParmName (parset.getString (prefix + "h5parm", @@ -93,6 +95,7 @@ namespace LOFAR { itsNChan (parset.getInt (prefix + "nchan", 1)), itsUVWFlagStep (input, parset, prefix), itsCoreConstraint(parset.getDouble (prefix + "coreconstraint", 0.0)), + itsSmoothnessConstraint(parset.getDouble (prefix + "smoothnessconstraint", 0.0)), itsScreenCoreConstraint(parset.getDouble (prefix + "tecscreen.coreconstraint", 0.0)), itsFullMatrixMinimalization(false), itsApproximateTEC(false), @@ -102,33 +105,39 @@ namespace LOFAR { ss << parset; itsParsetString = ss.str(); - vector<string> strDirections = - parset.getStringVector (prefix + "directions", - vector<string> ()); - itsMultiDirSolver.set_max_iterations(parset.getInt(prefix + "maxiter", 50)); - double tolerance = parset.getDouble(prefix + "tolerance", 1.e-5); + double tolerance = parset.getDouble(prefix + "tolerance", 1.e-4); itsMultiDirSolver.set_accuracy(tolerance); itsMultiDirSolver.set_constraint_accuracy(parset.getDouble(prefix + "approxtolerance", tolerance*10.0)); itsMultiDirSolver.set_step_size(parset.getDouble(prefix + "stepsize", 0.2)); + itsMultiDirSolver.set_detect_stalling(parset.getBool(prefix + "detectstalling", true)); if(!itsStatFilename.empty()) itsStatStream.reset(new std::ofstream(itsStatFilename)); - // Default directions are all patches - if (strDirections.empty()) { - string sourceDBName = parset.getString(prefix+"sourcedb"); - BBS::SourceDB sourceDB(BBS::ParmDBMeta("", sourceDBName), false); - vector<string> patchNames = makePatchList(sourceDB, vector<string>()); - itsDirections.resize(patchNames.size()); - for (uint i=0; i<patchNames.size(); ++i) { - itsDirections[i] = vector<string>(1, patchNames[i]); - } + vector<string> strDirections; + if (itsUseModelColumn) { + itsModelData.resize(itsSolInt); + strDirections.push_back("pointing"); + itsDirections.push_back(vector<string>()); } else { - itsDirections.resize(strDirections.size()); - for (uint i=0; i<strDirections.size(); ++i) { - ParameterValue dirStr(strDirections[i]); - itsDirections[i] = dirStr.getStringVector(); + strDirections = parset.getStringVector (prefix + "directions", + vector<string> ()); + // Default directions are all patches + if (strDirections.empty()) { + string sourceDBName = parset.getString(prefix+"sourcedb"); + BBS::SourceDB sourceDB(BBS::ParmDBMeta("", sourceDBName), false); + vector<string> patchNames = makePatchList(sourceDB, vector<string>()); + itsDirections.resize(patchNames.size()); + for (uint i=0; i<patchNames.size(); ++i) { + itsDirections[i] = vector<string>(1, patchNames[i]); + } + } else { + itsDirections.resize(strDirections.size()); + for (uint i=0; i<strDirections.size(); ++i) { + ParameterValue dirStr(strDirections[i]); + itsDirections[i] = dirStr.getStringVector(); + } } } @@ -139,6 +148,10 @@ namespace LOFAR { itsConstraints.push_back(casacore::CountedPtr<Constraint>( new CoreConstraint())); } + if(itsSmoothnessConstraint != 0.0) { + itsConstraints.push_back(casacore::CountedPtr<Constraint>( + new SmoothnessConstraint(itsSmoothnessConstraint))); + } switch(itsMode) { case GainCal::COMPLEXGAIN: itsConstraints.push_back(casacore::CountedPtr<Constraint>( @@ -234,9 +247,13 @@ namespace LOFAR { } const size_t nDir = itsDirections.size(); - itsPredictSteps.resize(nDir); - for (size_t dir=0; dir<nDir; ++dir) { - itsPredictSteps[dir]=Predict(input, parset, prefix, itsDirections[dir]); + if (itsUseModelColumn) { + ASSERT(nDir == 1); + } else { + itsPredictSteps.resize(nDir); + for (size_t dir=0; dir<nDir; ++dir) { + itsPredictSteps[dir]=Predict(input, parset, prefix, itsDirections[dir]); + } } } @@ -255,10 +272,10 @@ namespace LOFAR { info() = infoIn; info().setNeedVisData(); - const size_t nDir=itsDirections.size(); + const size_t nDir = itsDirections.size(); itsUVWFlagStep.updateInfo(infoIn); - for (size_t dir=0; dir<nDir; ++dir) { + for (size_t dir=0; dir<itsPredictSteps.size(); ++dir) { itsPredictSteps[dir].updateInfo(infoIn); } @@ -281,8 +298,8 @@ namespace LOFAR { itsDataResultStep = ResultStep::ShPtr(new ResultStep()); itsUVWFlagStep.setNextStep(itsDataResultStep); - itsResultSteps.resize(nDir); - for (size_t dir=0; dir<nDir; ++dir) { + itsResultSteps.resize(itsPredictSteps.size()); + for (size_t dir=0; dir<itsPredictSteps.size(); ++dir) { itsResultSteps[dir] = MultiResultStep::ShPtr(new MultiResultStep(itsSolInt)); itsPredictSteps[dir].setNextStep(itsResultSteps[dir]); } @@ -314,8 +331,18 @@ namespace LOFAR { itsH5Parm.addAntennas(antennaNames, antennaPos); std::vector<std::pair<double, double> > sourcePositions(itsDirections.size()); - for (uint i=0; i<itsDirections.size(); ++i) { - sourcePositions[i] = itsPredictSteps[i].getFirstDirection(); + if (itsUseModelColumn) { + MDirection dirJ2000(MDirection::Convert(infoIn.phaseCenter(), + MDirection::J2000)()); + Quantum<Vector<Double> > angles = dirJ2000.getAngle(); + sourcePositions[0] = std::pair<double, double> ( + angles.getBaseValue()[0], + angles.getBaseValue()[1]); + + } else { + for (uint i=0; i<itsDirections.size(); ++i) { + sourcePositions[i] = itsPredictSteps[i].getFirstDirection(); + } } itsH5Parm.addSources(getDirectionNames(), sourcePositions); @@ -326,6 +353,7 @@ namespace LOFAR { itsConstraintSols.resize(nSolTimes); size_t nChannelBlocks = info().nchan()/itsNChan; + itsChanBlockStart.resize(nChannelBlocks+1); itsChanBlockFreqs.resize(nChannelBlocks); for(size_t chBlock=0; chBlock!=nChannelBlocks; ++chBlock) { const size_t @@ -336,19 +364,29 @@ namespace LOFAR { info().chanFreqs().data()+channelIndexStart, info().chanFreqs().data()+channelIndexEnd, 0.0) / curChannelBlockSize; + itsChanBlockStart[chBlock] = channelIndexStart; itsChanBlockFreqs[chBlock] = meanfreq; } + itsChanBlockStart[itsChanBlockStart.size()-1] = info().nchan(); + + itsWeights.assign(itsChanBlockFreqs.size()*info().nantenna(), 0.0); for (uint i=0; i<itsConstraints.size();++i) { + // Initialize the constraint with some common metadata + itsConstraints[i]->InitializeDimensions(info().antennaNames().size(), + itsDirections.size(), + nChannelBlocks); + // Different constraints need different information. Determine if the constraint is // of a type that needs more information, and if so initialize the constraint. CoreConstraint* coreConstraint = dynamic_cast<CoreConstraint*>(itsConstraints[i].get()); if(coreConstraint != 0) { + // Take antenna with index 0 as reference station double - refX = antennaPos[i][0], - refY = antennaPos[i][1], - refZ = antennaPos[i][2]; + refX = antennaPos[0][0], + refY = antennaPos[0][1], + refZ = antennaPos[0][2]; std::set<size_t> coreAntennaIndices; const double coreDistSq = itsCoreConstraint*itsCoreConstraint; for(size_t ant=0; ant!=antennaPos.size(); ++ant) @@ -361,21 +399,13 @@ namespace LOFAR { if(distSq <= coreDistSq) coreAntennaIndices.insert(ant); } - coreConstraint->initialize(info().antennaNames().size(), - itsDirections.size(), - info().nchan(), - coreAntennaIndices); + coreConstraint->initialize(coreAntennaIndices); } ScreenConstraint* screenConstraint = dynamic_cast<ScreenConstraint*>(itsConstraints[i].get()); if(screenConstraint != 0) { - screenConstraint->initialize( - info().antennaNames().size(), - itsDirections.size(), - nChannelBlocks, - &(itsChanBlockFreqs[0]) - ); + screenConstraint->initialize(&(itsChanBlockFreqs[0])); screenConstraint->setAntennaPositions(antennaPos); screenConstraint->setDirections(sourcePositions); screenConstraint->initPiercePoints(); @@ -403,24 +433,15 @@ namespace LOFAR { } TECConstraintBase* tecConstraint = dynamic_cast<TECConstraintBase*>(itsConstraints[i].get()); - if(tecConstraint != 0) + if(tecConstraint != nullptr) { - tecConstraint->initialize(info().antennaNames().size(), - itsDirections.size(), - nChannelBlocks, - &(itsChanBlockFreqs[0])); - } - - RotationAndDiagonalConstraint* rotationAndDiagonalConstraint = dynamic_cast<RotationAndDiagonalConstraint*>(itsConstraints[i].get()); - if(rotationAndDiagonalConstraint != 0) - { - rotationAndDiagonalConstraint->initialize(info().antennaNames().size(), itsDirections.size(), nChannelBlocks); - } - RotationConstraint* rotationConstraint = dynamic_cast<RotationConstraint*>(itsConstraints[i].get()); - if(rotationConstraint != 0) - { - rotationConstraint->initialize(info().antennaNames().size(), itsDirections.size(), nChannelBlocks); + tecConstraint->initialize(&itsChanBlockFreqs[0]); } + SmoothnessConstraint* sConstraint = dynamic_cast<SmoothnessConstraint*>(itsConstraints[i].get()); + if(sConstraint != nullptr) + { + sConstraint->Initialize(&itsChanBlockFreqs[0]); + } } uint nSt = info().antennaNames().size(); @@ -434,15 +455,21 @@ namespace LOFAR { void DDECal::show (std::ostream& os) const { - os << "DDECal " << itsName << endl; - os << " H5Parm: " << itsH5ParmName <<endl; - os << " solint: " << itsSolInt <<endl; - os << " nchan: " << itsNChan <<endl; - os << " directions: " << itsDirections << endl; - os << " mode (constraints): " << GainCal::calTypeToString(itsMode) - << endl; - os << " coreconstraint: " << itsCoreConstraint << endl; - os << " approximate fitter: " << itsApproximateTEC << endl; + os + << "DDECal " << itsName << '\n' + << " H5Parm: " << itsH5ParmName << '\n' + << " solint: " << itsSolInt << '\n' + << " nchan: " << itsNChan << '\n' + << " directions: " << itsDirections << '\n' + << " use model column: " << boolalpha << itsUseModelColumn << '\n' + << " tolerance: " << itsMultiDirSolver.get_accuracy() << '\n' + << " max iter: " << itsMultiDirSolver.max_iterations() << '\n' + << " detect stalling: " << std::boolalpha << itsMultiDirSolver.get_detect_stalling() << '\n' + << " step size: " << itsMultiDirSolver.get_step_size() << '\n' + << " mode (constraints): " << GainCal::calTypeToString(itsMode) << '\n' + << " coreconstraint: " << itsCoreConstraint << '\n' + << " smoothnessconstraint:" << itsSmoothnessConstraint << '\n' + << " approximate fitter: " << itsApproximateTEC << '\n'; for (uint i=0; i<itsPredictSteps.size(); ++i) { itsPredictSteps[i].show(os); } @@ -586,11 +613,17 @@ namespace LOFAR { // if(itsPredictSteps.size() < LOFAR::OpenMP::maxThreads()) // LOFAR::OpenMP::setNested(true); + if (itsUseModelColumn) { + itsInput->getModelData (itsBufs[itsStepInSolInt].getRowNrs(), + itsModelData[itsStepInSolInt]); + itsModelDataPtrs[itsStepInSolInt][0] = itsModelData[itsStepInSolInt].data(); + } else { #pragma omp parallel for schedule(dynamic) if(itsPredictSteps.size()>1) - for (size_t dir=0; dir<itsPredictSteps.size(); ++dir) { - itsPredictSteps[dir].process(itsBufs[itsStepInSolInt]); - itsModelDataPtrs[itsStepInSolInt][dir] = - itsResultSteps[dir]->get()[itsStepInSolInt].getData().data(); + for (size_t dir=0; dir<itsPredictSteps.size(); ++dir) { + itsPredictSteps[dir].process(itsBufs[itsStepInSolInt]); + itsModelDataPtrs[itsStepInSolInt][dir] = + itsResultSteps[dir]->get()[itsStepInSolInt].getData().data(); + } } // Handle weights and flags @@ -598,35 +631,56 @@ namespace LOFAR { const size_t nCh = info().nchan(); const size_t nCr = 4; + size_t nchanblocks = itsChanBlockFreqs.size(); + size_t chanblock = 0; + + double weightFactor = 1./(nCh*(info().nantenna()-1)*nCr*itsSolInt); + for (size_t ch=0; ch<nCh; ++ch) { + if (ch == itsChanBlockStart[chanblock+1]) { + chanblock++; + } for (size_t bl=0; bl<nBl; ++bl) { for (size_t cr=0; cr<nCr; ++cr) { if (itsBufs[itsStepInSolInt].getFlags().data()[bl*nCr*nCh+ch*nCr+cr]) { // Flagged points: set data and model to 0 itsDataPtrs[itsStepInSolInt][bl*nCr*nCh+ch*nCr+cr] = 0; - for (size_t dir=0; dir<itsPredictSteps.size(); ++dir) { + for (size_t dir=0; dir<itsModelDataPtrs[0].size(); ++dir) { itsModelDataPtrs[itsStepInSolInt][dir][bl*nCr*nCh+ch*nCr+cr] = 0; } } else { // Premultiply non-flagged data with sqrt(weight) - double weight = sqrt(itsBufs[itsStepInSolInt].getWeights().data()[bl*nCr*nCh+ch*nCr+cr]); - itsDataPtrs[itsStepInSolInt][bl*nCr*nCh+ch*nCr+cr] *= weight; - for (size_t dir=0; dir<itsPredictSteps.size(); ++dir) { - itsModelDataPtrs[itsStepInSolInt][dir][bl*nCr*nCh+ch*nCr+cr] *= weight; + double weight = itsBufs[itsStepInSolInt].getWeights().data()[bl*nCr*nCh+ch*nCr+cr]; + itsDataPtrs[itsStepInSolInt][bl*nCr*nCh+ch*nCr+cr] *= sqrt(weight); + itsWeights[info().getAnt1()[bl]*nchanblocks + chanblock] += weight; + itsWeights[info().getAnt2()[bl]*nchanblocks + chanblock] += weight; + for (size_t dir=0; dir<itsModelDataPtrs[0].size(); ++dir) { + itsModelDataPtrs[itsStepInSolInt][dir][bl*nCr*nCh+ch*nCr+cr] *= sqrt(weight); } } } } } + for (auto& weight: itsWeights) { + weight *= weightFactor; + } + itsTimerPredict.stop(); itsAvgTime += itsAvgTime + bufin.getTime(); if (itsStepInSolInt==itsSolInt-1) { + for (uint constraint_num = 0; constraint_num < itsConstraints.size(); ++constraint_num) { + itsConstraints[constraint_num]->SetWeights(itsWeights); + } + doSolve(); - itsStepInSolInt=0; + + // Clean up, prepare for next iteration + itsStepInSolInt=0; itsAvgTime=0; + itsWeights.assign(itsWeights.size(), 0.); for (size_t dir=0; dir<itsResultSteps.size(); ++dir) { itsResultSteps[dir]->clear(); } @@ -813,15 +867,29 @@ namespace LOFAR { vector<double> sols(numSols); vector<double>::iterator nextpos = sols.begin(); for (uint time=0; time<itsSols.size(); ++time) { + ASSERTSTR(itsConstraintSols[time].size()==itsConstraintSols[0].size(), "Constraints did not produce enough output at time step "<<time); nextpos = std::copy( itsConstraintSols[time][constraintNum][solNameNum].vals.begin(), itsConstraintSols[time][constraintNum][solNameNum].vals.end(), nextpos); } + // Put solution weights in a contiguous piece of memory + vector<double> weights; + if (!itsConstraintSols[0][constraintNum][solNameNum].weights.empty()) { + weights.resize(numSols); + vector<double>::iterator nextpos = weights.begin(); + for (uint time=0; time<itsSols.size(); ++time) { + nextpos = std::copy( + itsConstraintSols[time][constraintNum][solNameNum].weights.begin(), + itsConstraintSols[time][constraintNum][solNameNum].weights.end(), + nextpos); + } + } + string solTabName = firstResult.name+"000"; H5Parm::SolTab soltab = itsH5Parm.createSolTab(solTabName, firstResult.name, axes); - soltab.setValues(sols, vector<double>(), + soltab.setValues(sols, weights, "CREATE by DPPP\n" + Version::getInfo<DPPPVersion>("DPPP", "top") + "\n" + "step " + itsName + " in parset: \n" + @@ -878,7 +946,6 @@ namespace LOFAR { soltab.setFreqs(chanBlockFreqs); soltab.setTimes(solTimes); - // End TODO } } } diff --git a/CEP/DP3/DPPP_DDECal/src/MultiDirSolver.cc b/CEP/DP3/DPPP_DDECal/src/MultiDirSolver.cc index ed7510194ddd86e00e1a0171820017a0fa3cacc1..69019b9f2810fd9b90924b1f9466b32274b03988 100644 --- a/CEP/DP3/DPPP_DDECal/src/MultiDirSolver.cc +++ b/CEP/DP3/DPPP_DDECal/src/MultiDirSolver.cc @@ -21,9 +21,9 @@ MultiDirSolver::MultiDirSolver() : _accuracy(1e-5), _constraintAccuracy(1e-4), _stepSize(0.2), + _detectStalling(true), _phaseOnly(false) -{ -} +{ } void MultiDirSolver::init(size_t nAntennas, size_t nDirections, @@ -75,42 +75,106 @@ void MultiDirSolver::makeSolutionsFinite(std::vector<std::vector<DComplex> >& so std::vector<DComplex>::iterator iter = solVector.begin(); for(size_t i=0; i!=n; ++i) { + bool hasNonFinite = false; for(size_t p=0; p!=perPol; ++p) { - if(!std::isfinite(iter->real()) || !std::isfinite(iter->imag())) - *iter = DComplex(1.0, 0.0); - ++iter; + hasNonFinite = hasNonFinite || !std::isfinite(iter->real()) || !std::isfinite(iter->imag()); } + if(hasNonFinite) + { + if(perPol == 4) + { + iter[0] = DComplex(1.0, 0.0); + iter[1] = DComplex(0.0, 0.0); + iter[2] = DComplex(0.0, 0.0); + iter[3] = DComplex(1.0, 0.0); + } + else { + for(size_t p=0; p!=perPol; ++p) + { + iter[p] = DComplex(1.0, 0.0); + } + } + } + iter += perPol; } } } +template<size_t NPol> bool MultiDirSolver::assignSolutions(std::vector<std::vector<DComplex> >& solutions, std::vector<std::vector<DComplex> >& nextSolutions, bool useConstraintAccuracy, - double& sum, double& normSum) const + double& avgAbsDiff, std::vector<double>& stepMagnitudes) const { - sum = 0.0; - normSum = 0.0; + avgAbsDiff = 0.0; // Calculate the norm of the difference between the old and new solutions size_t n = 0; for(size_t chBlock=0; chBlock<_nChannelBlocks; ++chBlock) { - n += solutions[chBlock].size(); - for(size_t i=0; i!=solutions[chBlock].size(); ++i) + for(size_t i=0; i!=solutions[chBlock].size(); i += NPol) { - double e = std::norm(nextSolutions[chBlock][i] - solutions[chBlock][i]); - normSum += e; - sum += std::norm(solutions[chBlock][i]); - - solutions[chBlock][i] = nextSolutions[chBlock][i]; + // A normalized squared difference is calculated between the solutions of this + // and the previous step: + // sqrt { 1/n sum over | (t1 - t0) t0^(-1) |_2 } + // This criterion is scale independent: all solutions can be scaled without + // affecting the number of iterations. Also, when the polarized version is given + // scalar matrices, it will use the same number of iterations as the scalar + // version. + if(NPol == 1) + { + if(solutions[chBlock][i] != 0.0) + { + double a = std::abs((nextSolutions[chBlock][i] - solutions[chBlock][i]) / solutions[chBlock][i]); + if(std::isfinite(a)) + { + avgAbsDiff += a; + ++n; + } + } + solutions[chBlock][i] = nextSolutions[chBlock][i]; + } + else { + MC2x2 s(&solutions[chBlock][i]), sInv(s); + if(sInv.Invert()) + { + MC2x2 ns(&nextSolutions[chBlock][i]); + ns -= s; + ns *= sInv; + double sumabs = 0.0; + for(size_t p=0; p!=NPol; ++p) + { + sumabs += std::abs(ns[p]); + } + if(std::isfinite(sumabs)) + { + avgAbsDiff += sumabs; + n += 4; + } + } + for(size_t p=0; p!=NPol; ++p) + { + solutions[chBlock][i+p] = nextSolutions[chBlock][i+p]; + } + } } } - sum /= n; - normSum /= n; + // The polarized version needs a factor of two normalization to make it work + // like the scalar version would and when given only scalar matrices. + //if(NPol == 4) + // avgSquaredDiff = sqrt(avgSquaredDiff*0.5/n) ; + //else + // avgSquaredDiff = sqrt(avgSquaredDiff/n); + + // The stepsize is taken out, so that a small stepsize won't cause + // a premature stopping criterion. + double stepMagnitude = (n==0 ? 0 : avgAbsDiff/_stepSize/n); + stepMagnitudes.emplace_back(stepMagnitude); + if(useConstraintAccuracy) - return normSum*_stepSize/sum <= _constraintAccuracy; - else - return normSum*_stepSize/sum <= _accuracy; + return stepMagnitude <= _constraintAccuracy; + else { + return stepMagnitude <= _accuracy; + } } MultiDirSolver::SolveResult MultiDirSolver::processScalar(std::vector<Complex *>& data, @@ -163,9 +227,6 @@ MultiDirSolver::SolveResult MultiDirSolver::processScalar(std::vector<Complex *> } } - // TODO the data and model data needs to be preweighted. - // Maybe we can get a non-const pointer from DPPP, that saves copying/allocating - /// /// Start iterating /// @@ -173,7 +234,12 @@ MultiDirSolver::SolveResult MultiDirSolver::processScalar(std::vector<Complex *> bool hasConverged = false, hasPreviouslyConverged = false, - constraintsSatisfied = false; + constraintsSatisfied = false, + hasStalled = false; + + std::vector<double> stepMagnitudes; + stepMagnitudes.reserve(_maxIterations); + do { makeSolutionsFinite(solutions, 1); @@ -189,6 +255,12 @@ MultiDirSolver::SolveResult MultiDirSolver::processScalar(std::vector<Complex *> constraintsSatisfied = true; _timerConstrain.Start(); + + if(statStream) + { + (*statStream) << iteration << '\t'; + } + for(size_t i=0; i!=_constraints.size(); ++i) { // PrepareIteration() might change Satisfied(), and since we always want to @@ -196,29 +268,34 @@ MultiDirSolver::SolveResult MultiDirSolver::processScalar(std::vector<Complex *> // evaluate Satisfied() before preparing. constraintsSatisfied = _constraints[i]->Satisfied() && constraintsSatisfied; _constraints[i]->PrepareIteration(hasPreviouslyConverged, iteration, iteration+1 >= _maxIterations); - result._results[i] = _constraints[i]->Apply(nextSolutions, time); + result._results[i] = _constraints[i]->Apply(nextSolutions, time, statStream); } _timerConstrain.Pause(); if(!constraintsSatisfied) constrainedIterations = iteration+1; - double sum, normSum; - hasConverged = assignSolutions(solutions, nextSolutions, !constraintsSatisfied, sum, normSum); - if(statStream != nullptr) + double avgSquaredDiff; + hasConverged = assignSolutions<1>(solutions, nextSolutions, !constraintsSatisfied, avgSquaredDiff, stepMagnitudes); + if(statStream) { - (*statStream) << iteration << '\t' << normSum*_stepSize/sum << '\t' << normSum << '\n'; + (*statStream) << stepMagnitudes.back() << '\t' << avgSquaredDiff << '\n'; } iteration++; hasPreviouslyConverged = hasConverged || hasPreviouslyConverged; - - } while(iteration < _maxIterations && (!hasConverged || !constraintsSatisfied)); + + if (_detectStalling && constraintsSatisfied) + hasStalled = detectStall(iteration, stepMagnitudes); + + } while(iteration < _maxIterations && (!hasConverged || !constraintsSatisfied) && !hasStalled); - if(hasConverged) - result.iterations = iteration; + // When we have not converged yet, we set the nr of iterations to the max+1, so that + // non-converged iterations can be distinguished from converged ones. + if((!hasConverged || !constraintsSatisfied) && !hasStalled) + result.iterations = iteration+1; else - result.iterations = _maxIterations+1; + result.iterations = iteration; result.constraintIterations = constrainedIterations; return result; } @@ -298,7 +375,7 @@ void MultiDirSolver::performScalarIteration(size_t channelBlockIndex, // solve x^H in [g C] x^H = v bool success = solver.Solve(gTimesCs[ant].data(), vs[ant].data()); Matrix& x = vs[ant]; - if(success) + if(success && x(0, 0) != 0.) { for(size_t d=0; d!=_nDirections; ++d) nextSolutions[ant*_nDirections + d] = x(d, 0); @@ -393,7 +470,14 @@ MultiDirSolver::SolveResult MultiDirSolver::processFullMatrix(std::vector<Comple /// Start iterating /// size_t iteration = 0, constrainedIterations = 0; - bool hasConverged = false, hasPreviouslyConverged = false, constraintsSatisfied = false; + bool hasConverged = false, + hasPreviouslyConverged = false, + constraintsSatisfied = false, + hasStalled = false; + + std::vector<double> step_magnitudes; + step_magnitudes.reserve(_maxIterations); + do { makeSolutionsFinite(solutions, 4); @@ -406,37 +490,58 @@ MultiDirSolver::SolveResult MultiDirSolver::processFullMatrix(std::vector<Comple } makeStep(solutions, nextSolutions); + + if(statStream) + { + (*statStream) << iteration << '\t'; + } constraintsSatisfied = true; for(size_t i=0; i!=_constraints.size(); ++i) { constraintsSatisfied = _constraints[i]->Satisfied() && constraintsSatisfied; _constraints[i]->PrepareIteration(hasPreviouslyConverged, iteration, iteration+1 >= _maxIterations); - result._results[i] = _constraints[i]->Apply(nextSolutions, time); + result._results[i] = _constraints[i]->Apply(nextSolutions, time, statStream); } if(!constraintsSatisfied) constrainedIterations = iteration+1; - double sum, normSum; - hasConverged = assignSolutions(solutions, nextSolutions, !constraintsSatisfied, sum, normSum); - if(statStream != nullptr) + double avgSquaredDiff; + hasConverged = assignSolutions<4>(solutions, nextSolutions, !constraintsSatisfied, avgSquaredDiff, step_magnitudes); + if(statStream) { - (*statStream) << iteration << '\t' << normSum*_stepSize/sum << '\t' << normSum << '\n'; + (*statStream) << step_magnitudes.back() << '\t' << avgSquaredDiff << '\n'; } iteration++; hasPreviouslyConverged = hasConverged || hasPreviouslyConverged; - } while(iteration < _maxIterations && (!hasConverged || !constraintsSatisfied)); - - if(hasConverged) - result.iterations = iteration; + + if (_detectStalling && constraintsSatisfied) + hasStalled = detectStall(iteration, step_magnitudes); + + } while(iteration < _maxIterations && (!hasConverged || !constraintsSatisfied) && !hasStalled); + + // When we have not converged yet, we set the nr of iterations to the max+1, so that + // non-converged iterations can be distinguished from converged ones. + if((!hasConverged || !constraintsSatisfied) && !hasStalled) + result.iterations = iteration+1; else - result.iterations = _maxIterations+1; + result.iterations = iteration; result.constraintIterations = constrainedIterations; return result; } +bool MultiDirSolver::detectStall(size_t iteration, const std::vector<double>& step_magnitudes) const +{ + if (iteration<30) { + return false; + } else { + return std::abs(step_magnitudes[iteration-1]/step_magnitudes[iteration-2]-1) < 1.e-4 && + std::abs(step_magnitudes[iteration-2]/step_magnitudes[iteration-3]-1) < 1.e-4; + } +} + void MultiDirSolver::performFullMatrixIteration(size_t channelBlockIndex, std::vector<Matrix>& gTimesCs, std::vector<Matrix>& vs, @@ -534,7 +639,7 @@ void MultiDirSolver::performFullMatrixIteration(size_t channelBlockIndex, // solve x^H in [g C] x^H = v bool success = solver.Solve(gTimesCs[ant].data(), vs[ant].data()); Matrix& x = vs[ant]; - if(success) + if(success && x(0, 0) != 0.) { for(size_t d=0; d!=_nDirections; ++d) { @@ -545,8 +650,9 @@ void MultiDirSolver::performFullMatrixIteration(size_t channelBlockIndex, } } else { - for(size_t i=0; i!=_nDirections*4; ++i) - nextSolutions[ant*_nDirections + i] = std::numeric_limits<double>::quiet_NaN(); + for(size_t i=0; i!=_nDirections*4; ++i) { + nextSolutions[ant*_nDirections*4 + i] = std::numeric_limits<double>::quiet_NaN(); + } } } _timerSolve.Pause(); diff --git a/CEP/DP3/DPPP_DDECal/src/RotationAndDiagonalConstraint.cc b/CEP/DP3/DPPP_DDECal/src/RotationAndDiagonalConstraint.cc index b42267e6ae7725d506f116fcaf942a27efdd1095..47368347c677e7bf6081c479f4aef8b5395ac8f3 100644 --- a/CEP/DP3/DPPP_DDECal/src/RotationAndDiagonalConstraint.cc +++ b/CEP/DP3/DPPP_DDECal/src/RotationAndDiagonalConstraint.cc @@ -10,75 +10,103 @@ using namespace std; namespace LOFAR { -RotationAndDiagonalConstraint::RotationAndDiagonalConstraint(): - _nAntennas(0), _nDirections(0) -{ +void RotationAndDiagonalConstraint::InitializeDimensions(size_t nAntennas, + size_t nDirections, + size_t nChannelBlocks) { + Constraint::InitializeDimensions(nAntennas, nDirections, nChannelBlocks); + + assert(_nDirections == 1); + + _res.resize(3); + _res[0].vals.resize(_nAntennas*_nChannelBlocks); + _res[0].weights.resize(_nAntennas*_nChannelBlocks); + _res[0].axes="ant,freq"; + _res[0].dims.resize(2); + _res[0].dims[0]=_nAntennas; + _res[0].dims[1]=_nChannelBlocks; + _res[0].name="rotation"; + + _res[1].vals.resize(_nAntennas*_nChannelBlocks*2); + _res[1].weights.resize(_nAntennas*_nChannelBlocks*2); + _res[1].axes="ant,freq,pol"; + _res[1].dims.resize(3); + _res[1].dims[0]=_nAntennas; + _res[1].dims[1]=_nChannelBlocks; + _res[1].dims[2]=2; + _res[1].name="amplitude"; + + _res[2] = _res[1]; + _res[2].name="phase"; } -void RotationAndDiagonalConstraint::initialize(size_t nAntennas, size_t nDirections, size_t nChannelBlocks) { - _nAntennas = nAntennas; - _nDirections = nDirections; - assert(nDirections == 1); - _nChannelBlocks = nChannelBlocks; - - _resTemplate.resize(3); - _resTemplate[0].vals.resize(_nAntennas*_nChannelBlocks); - _resTemplate[0].axes="ant,freq"; - _resTemplate[0].dims.resize(2); - _resTemplate[0].dims[0]=_nAntennas; - _resTemplate[0].dims[1]=_nChannelBlocks; - _resTemplate[0].name="rotation"; - - _resTemplate[1].vals.resize(_nAntennas*_nChannelBlocks*2); - _resTemplate[1].axes="ant,freq,pol"; - _resTemplate[1].dims.resize(3); - _resTemplate[1].dims[0]=_nAntennas; - _resTemplate[1].dims[1]=_nChannelBlocks; - _resTemplate[1].dims[2]=2; - _resTemplate[1].name="amplitude"; - - _resTemplate[2] = _resTemplate[1]; - _resTemplate[2].name="phase"; +void RotationAndDiagonalConstraint::SetWeights(const vector<double>& weights) { + _res[0].weights = weights; + + // Duplicate weights for two polarizations + _res[1].weights.resize(_nAntennas*_nChannelBlocks*2); + size_t indexInWeights = 0; + for (auto weight: weights) { + _res[1].weights[indexInWeights++] = weight; + _res[1].weights[indexInWeights++] = weight; + } + + _res[2].weights = _res[1].weights; } vector<Constraint::Result> RotationAndDiagonalConstraint::Apply( - vector<vector<dcomplex> >& solutions, double) { - // Convert to circular - complex<double> ll, rr; - complex<double> i(0,1.); - - // Find angle + vector<vector<dcomplex> >& solutions, double, + std::ostream* statStream) { + if (statStream) *statStream<<"["; // begin channel + double angle0; for (uint ch=0; ch<_nChannelBlocks; ++ch) { + if (statStream) *statStream<<"["; // begin antenna for (uint ant=0; ant<_nAntennas; ++ant) { // Compute rotation complex<double> *data = &(solutions[ch][4*ant]); double angle = RotationConstraint::get_rotation(data); // Restrict angle between -pi/2 and pi/2 - // Add 2pi to make sure that fmod doesn't see negative numbers + // Add 2pi to make sure that fmod doesn't see negative numbers angle = fmod(angle + 3.5*M_PI, M_PI) - 0.5*M_PI; - _resTemplate[0].vals[ant*_nChannelBlocks + ch] = angle; - + // Right multiply solution with inverse rotation, // save only the diagonal // Use sin(-phi) == -sin(phi) complex<double> a, b; a = data[0]*cos(angle) - data[1]*sin(angle); b = data[3]*cos(angle) + data[2]*sin(angle); - _resTemplate[1].vals[ant*_nChannelBlocks*2 + 2*ch ] = abs(a); - _resTemplate[1].vals[ant*_nChannelBlocks*2 + 2*ch + 1] = abs(b); - _resTemplate[2].vals[ant*_nChannelBlocks*2 + 2*ch ] = arg(a); - _resTemplate[2].vals[ant*_nChannelBlocks*2 + 2*ch +1] = arg(b); + + // Use station 0 as reference station (for every chanblock), to work + // around unitary ambiguity + if (ant==0) { + angle0 = angle; + angle = 0.; + } else { + angle -= angle0; + angle = fmod(angle + 3.5*M_PI, M_PI) - 0.5*M_PI; + } + _res[0].vals[ant*_nChannelBlocks + ch] = angle; + + _res[1].vals[ant*_nChannelBlocks*2 + 2*ch ] = abs(a); + _res[1].vals[ant*_nChannelBlocks*2 + 2*ch + 1] = abs(b); + _res[2].vals[ant*_nChannelBlocks*2 + 2*ch ] = arg(a); + _res[2].vals[ant*_nChannelBlocks*2 + 2*ch +1] = arg(b); // Do the actual constraining data[0] = a * cos(angle); data[1] = -a * sin(angle); data[2] = b * sin(angle); data[3] = b * cos(angle); + if (statStream) *statStream<<"["<<a.real()<<"+"<<a.imag()<<"j,"<<b.real()<<"+"<<b.imag()<<"j,"<<angle<<"]"; + //if (pd) cout<<angle; + if (statStream && ant<_nAntennas-1) *statStream<<","; } + if (statStream) *statStream<<"]"; // end antenna + if (statStream && ch<_nChannelBlocks-1) *statStream<<","; } + if (statStream) *statStream<<"]\t"; //end channel - return _resTemplate; + return _res; } } //namespace LOFAR diff --git a/CEP/DP3/DPPP_DDECal/src/RotationConstraint.cc b/CEP/DP3/DPPP_DDECal/src/RotationConstraint.cc index 2b14c6578e4d05fe80efa9e8a9fb4b24ae51ef35..f71a075e991ae61daa7e84d68e3aca0bb189247b 100644 --- a/CEP/DP3/DPPP_DDECal/src/RotationConstraint.cc +++ b/CEP/DP3/DPPP_DDECal/src/RotationConstraint.cc @@ -9,24 +9,24 @@ using namespace std; namespace LOFAR { -RotationConstraint::RotationConstraint(): - _nAntennas(0), _nDirections(0) -{ -} + void RotationConstraint::InitializeDimensions(size_t nAntennas, + size_t nDirections, + size_t nChannelBlocks) { + Constraint::InitializeDimensions(nAntennas, nDirections, nChannelBlocks); + + assert(_nDirections == 1); -void RotationConstraint::initialize(size_t nAntennas, size_t nDirections, size_t nChannelBlocks) { - _nAntennas = nAntennas; - _nDirections = nDirections; - assert(nDirections == 1); - _nChannelBlocks = nChannelBlocks; + _res.resize(1); + _res[0].vals.resize(_nAntennas*_nChannelBlocks); + _res[0].axes="ant,freq"; + _res[0].dims.resize(2); + _res[0].dims[0]=_nAntennas; + _res[0].dims[1]=_nChannelBlocks; + _res[0].name="rotation"; +} - _resTemplate.resize(1); - _resTemplate[0].vals.resize(_nAntennas*_nChannelBlocks); - _resTemplate[0].axes="ant,freq"; - _resTemplate[0].dims.resize(2); - _resTemplate[0].dims[0]=_nAntennas; - _resTemplate[0].dims[1]=_nChannelBlocks; - _resTemplate[0].name="rotation"; +void RotationConstraint::SetWeights(const vector<double>& weights) { + _res[0].weights = weights; } double RotationConstraint::get_rotation(std::complex<double>* data) { @@ -42,7 +42,8 @@ double RotationConstraint::get_rotation(std::complex<double>* data) { } vector<Constraint::Result> RotationConstraint::Apply( - vector<vector<dcomplex> >& solutions, double) { + vector<vector<dcomplex> >& solutions, double, + std::ostream* statStream) { // Convert to circular complex<double> ll, rr; complex<double> i(0,1.); @@ -52,7 +53,7 @@ vector<Constraint::Result> RotationConstraint::Apply( // Compute rotation complex<double> *data= &(solutions[ch][4*ant]); double angle = get_rotation(data); - _resTemplate[0].vals[ant*_nChannelBlocks+ch] = angle; + _res[0].vals[ant*_nChannelBlocks+ch] = angle; // Constrain the data data[0] = cos(angle); @@ -62,7 +63,7 @@ vector<Constraint::Result> RotationConstraint::Apply( } } - return _resTemplate; + return _res; } } //namespace LOFAR diff --git a/CEP/DP3/DPPP_DDECal/src/ScreenConstraint.cc b/CEP/DP3/DPPP_DDECal/src/ScreenConstraint.cc index 5e60d3bcbbce289ddd376ed598549996b972f579..3e16c70f4669ec4f41c089d771336187dc282428 100644 --- a/CEP/DP3/DPPP_DDECal/src/ScreenConstraint.cc +++ b/CEP/DP3/DPPP_DDECal/src/ScreenConstraint.cc @@ -10,10 +10,8 @@ const size_t ScreenConstraint::maxIter=30; ScreenConstraint::ScreenConstraint(const ParameterSet& parset, const string& prefix) : - _nAntennas(0), - _nDirections(0), - _nChannelBlocks(0), - itsCurrentTime(0) + itsCurrentTime(0), + itsIter(0) { cout<<"=========="<<(prefix + "order")<<"========\n"; itsBeta=parset.getDouble (prefix + "beta", 5./3.); @@ -25,10 +23,7 @@ ScreenConstraint::ScreenConstraint(const ParameterSet& parset, itsDebugMode=parset.getInt(prefix + "debug", 0); } -void ScreenConstraint::initialize(size_t nAntennas, size_t nDirections, size_t nChannelBlocks, const double* frequencies) { - _nAntennas = nAntennas; - _nDirections = nDirections; - _nChannelBlocks = nChannelBlocks; +void ScreenConstraint::initialize(const double* frequencies) { itsFrequencies.resize(_nChannelBlocks); itsprevsol.assign(_nDirections*_nAntennas,-999.); std::memcpy( itsFrequencies.data(),frequencies, sizeof(double) * _nChannelBlocks); @@ -185,7 +180,8 @@ void ScreenConstraint::CalculatePiercepoints(){ } -std::vector<Constraint::Result> ScreenConstraint::Apply(std::vector<std::vector<MultiDirSolver::DComplex> >& solutions,double time) { +std::vector<Constraint::Result> ScreenConstraint::Apply(std::vector<std::vector<MultiDirSolver::DComplex> >& solutions, + double time, std::ostream* statStream) { //check if we need to reinitialize piercepoints setTime(time); size_t nrresults=4; diff --git a/CEP/DP3/DPPP_DDECal/src/SmoothnessConstraint.cc b/CEP/DP3/DPPP_DDECal/src/SmoothnessConstraint.cc new file mode 100644 index 0000000000000000000000000000000000000000..a37f52305e129e834a162caa0d44e104447585fa --- /dev/null +++ b/CEP/DP3/DPPP_DDECal/src/SmoothnessConstraint.cc @@ -0,0 +1,77 @@ +#ifdef AOPROJECT +#include "KernelSmoother.h" +#include "SmoothnessConstraint.h" +#include "omptools.h" +#else +#include <DPPP_DDECal/KernelSmoother.h> +#include <DPPP_DDECal/SmoothnessConstraint.h> +#include <Common/OpenMP.h> +#endif + +SmoothnessConstraint::SmoothnessConstraint(double bandwidthHz) : + _kernelType(Smoother::GaussianKernel), + _bandwidth(bandwidthHz) +{ } + +void SmoothnessConstraint::Initialize(const double* frequencies) +{ + _frequencies.assign(frequencies, frequencies+_nChannelBlocks); + size_t nthreads = +#ifdef AOPROJECT + omp_get_max_threads(); +#else + LOFAR::OpenMP::maxThreads(); +#endif + for(size_t i=0; i!=nthreads; ++i) + _fitData.emplace_back(_frequencies.data(), _frequencies.size(), _kernelType, _bandwidth); +} + +void SmoothnessConstraint::InitializeDimensions(size_t nAntennas, + size_t nDirections, + size_t nChannelBlocks) +{ + Constraint::InitializeDimensions(nAntennas, nDirections, nChannelBlocks); +} + +std::vector<Constraint::Result> SmoothnessConstraint::Apply( + std::vector<std::vector<dcomplex> >& solutions, double, std::ostream*) +{ + const size_t nPol = solutions.front().size() / (_nAntennas*_nDirections); +#pragma omp parallel for + for(size_t antDirIndex = 0; antDirIndex<_nAntennas*_nDirections; ++antDirIndex) + { +#ifdef AOPROJECT + const size_t thread = omp_get_thread_num(); +#else + const size_t thread = LOFAR::OpenMP::threadNum(); +#endif + size_t antIndex = antDirIndex / _nDirections; + for(size_t pol = 0; pol!=nPol; ++pol) + { + size_t solutionIndex = antDirIndex*nPol + pol; + for(size_t ch=0; ch!=_nChannelBlocks; ++ch) + { + // Flag channels where calibration yielded inf or nan + if(std::isfinite(solutions[ch][solutionIndex].real()) && + std::isfinite(solutions[ch][solutionIndex].imag())) + { + _fitData[thread].data[ch] = solutions[ch][solutionIndex]; + _fitData[thread].weight[ch] = _weights[antIndex*_nChannelBlocks + ch]; + } + else { + _fitData[thread].data[ch] = 0.0; + _fitData[thread].weight[ch] = 0.0; + } + } + + _fitData[thread].smoother.Smooth(_fitData[thread].data.data(), _fitData[thread].weight.data()); + + for(size_t ch=0; ch!=_nChannelBlocks; ++ch) + { + solutions[ch][solutionIndex] = _fitData[thread].data[ch]; + } + } + } + + return std::vector<Constraint::Result>(); +} diff --git a/CEP/DP3/DPPP_DDECal/src/TECConstraint.cc b/CEP/DP3/DPPP_DDECal/src/TECConstraint.cc index a7b588667f255be54a2411faa15dff5a32597dc3..c19df7ee953f3dc49aed71921030910325aacaad 100644 --- a/CEP/DP3/DPPP_DDECal/src/TECConstraint.cc +++ b/CEP/DP3/DPPP_DDECal/src/TECConstraint.cc @@ -8,17 +8,11 @@ TECConstraintBase::TECConstraintBase(Mode mode) : _mode(mode), - _nAntennas(0), - _nDirections(0), - _nChannelBlocks(0), _phaseFitters() { } -void TECConstraintBase::initialize(size_t nAntennas, size_t nDirections, size_t nChannelBlocks, const double* frequencies) { - _nAntennas = nAntennas; - _nDirections = nDirections; - _nChannelBlocks = nChannelBlocks; +void TECConstraintBase::initialize(const double* frequencies) { _phaseFitters.resize( #ifdef AOPROJECT omp_get_max_threads() @@ -30,13 +24,17 @@ void TECConstraintBase::initialize(size_t nAntennas, size_t nDirections, size_t for(size_t i=0; i!=_phaseFitters.size(); ++i) { _phaseFitters[i].SetChannelCount(_nChannelBlocks); - std::memcpy(_phaseFitters[i].FrequencyData(), frequencies, sizeof(double) * _nChannelBlocks); - - // TODO this should set the weights of the phase fitter! + std::memcpy(_phaseFitters[i].FrequencyData(), frequencies, + sizeof(double) * _nChannelBlocks); } + _weights.assign(_nChannelBlocks*_nAntennas, 1.0); initializeChild(); } +void TECConstraintBase::SetWeights(const std::vector<double>& weights) { + _weights = weights; +} + void ApproximateTECConstraint::initializeChild() { _pwFitters.resize( @@ -94,7 +92,8 @@ void TECConstraintBase::applyReferenceAntenna(std::vector<std::vector<dcomplex> } std::vector<Constraint::Result> TECConstraint::Apply( - std::vector<std::vector<dcomplex> >& solutions, double) + std::vector<std::vector<dcomplex> >& solutions, double, + std::ostream* /*statStream*/) { size_t nRes = 3; if(_mode == TECOnlyMode) { @@ -106,6 +105,7 @@ std::vector<Constraint::Result> TECConstraint::Apply( std::vector<Constraint::Result> res(nRes); res[0].vals.resize(_nAntennas*_nDirections); + res[0].weights.resize(_nAntennas*_nDirections); res[0].axes="ant,dir,freq"; res[0].name="tec"; res[0].dims.resize(3); @@ -125,6 +125,7 @@ std::vector<Constraint::Result> TECConstraint::Apply( #pragma omp parallel for for(size_t solutionIndex = 0; solutionIndex<_nAntennas*_nDirections; ++solutionIndex) { + size_t antennaIndex = solutionIndex/_nDirections; size_t thread = #ifdef AOPROJECT omp_get_thread_num(); @@ -132,12 +133,15 @@ std::vector<Constraint::Result> TECConstraint::Apply( LOFAR::OpenMP::threadNum(); #endif + // Flag channels where calibration yielded inf or nan + double weightSum = 0.0; for(size_t ch=0; ch!=_nChannelBlocks; ++ch) { if(std::isfinite(solutions[ch][solutionIndex].real()) && std::isfinite(solutions[ch][solutionIndex].imag())) { _phaseFitters[thread].PhaseData()[ch] = std::arg(solutions[ch][solutionIndex]); - _phaseFitters[thread].WeightData()[ch] = 1.0; + _phaseFitters[thread].WeightData()[ch] = _weights[antennaIndex*_nChannelBlocks + ch]; + weightSum += _weights[antennaIndex*_nChannelBlocks + ch]; } else { _phaseFitters[thread].PhaseData()[ch] = 0.0; @@ -151,10 +155,13 @@ std::vector<Constraint::Result> TECConstraint::Apply( } else { res.back().vals[solutionIndex]=_phaseFitters[thread].FitDataToTEC2Model(alpha, beta); } + res.back().weights[solutionIndex] = weightSum; res[0].vals[solutionIndex] = alpha / -8.44797245e9; + res[0].weights[solutionIndex] = weightSum; if(_mode == TECAndCommonScalarMode) { res[1].vals[solutionIndex] = beta; + res[1].weights[solutionIndex] = weightSum; } for(size_t ch=0; ch!=_nChannelBlocks; ++ch) @@ -167,16 +174,18 @@ std::vector<Constraint::Result> TECConstraint::Apply( } std::vector<Constraint::Result> ApproximateTECConstraint::Apply( - std::vector<std::vector<dcomplex> >& solutions, double time) + std::vector<std::vector<dcomplex> >& solutions, double time, + std::ostream* statStream) { if(_finishedApproximateStage) - return TECConstraint::Apply(solutions, time); + return TECConstraint::Apply(solutions, time, statStream); else { applyReferenceAntenna(solutions); #pragma omp parallel for for(size_t solutionIndex = 0; solutionIndex<_nAntennas*_nDirections; ++solutionIndex) { + size_t antennaIndex = solutionIndex/_nDirections; #ifdef AOPROJECT size_t thread = omp_get_thread_num(); #else @@ -186,12 +195,13 @@ std::vector<Constraint::Result> ApproximateTECConstraint::Apply( std::vector<double>& fittedData = _threadFittedData[thread]; std::vector<double>& weights = _threadWeights[thread]; + // Flag channels where calibration yielded inf or nan for(size_t ch=0; ch!=_nChannelBlocks; ++ch) { if(std::isfinite(solutions[ch][solutionIndex].real()) && std::isfinite(solutions[ch][solutionIndex].imag())) { data[ch] = std::arg(solutions[ch][solutionIndex]); - weights[ch] = 1.0; + weights[ch] = _weights[antennaIndex*_nChannelBlocks + ch]; } else { data[ch] = 0.0; diff --git a/CEP/DP3/DPPP_DDECal/test/tDDECal.run b/CEP/DP3/DPPP_DDECal/test/tDDECal.run index c85c65539e4918cc608a1456dfc9253bbc2b564d..36569aa3d6143fe6ec0479e59236ac4bc261fde3 100755 --- a/CEP/DP3/DPPP_DDECal/test/tDDECal.run +++ b/CEP/DP3/DPPP_DDECal/test/tDDECal.run @@ -35,6 +35,12 @@ cmd="NDPPP checkparset=1 msin=tDDECal.MS msout=. msout.datacolumn=DATA\ echo $cmd $cmd >& /dev/null +echo "Predict model data column" +cmd="NDPPP checkparset=1 msin=tDDECal.MS msout=. msout.datacolumn=MODEL_DATA\ + steps=[]" +echo $cmd +$cmd >& /dev/null + for caltype in complexgain scalarcomplexgain amplitudeonly scalaramplitude do for solint in 0 1 2 4 @@ -67,11 +73,11 @@ do #h5sols.py instrument.h5 echo "Check that residual is small, caltype=$caltype, nchan=$nchan, solint=$solint" - cmd="$taqlexe 'select norm_residual/norm_data FROM (select sqrt(abs(gsumsqr(WEIGHT_SPECTRUM*DATA[FLAG]))) as norm_data, sqrt(abs(gsumsqr(WEIGHT_SPECTRUM*SUBTRACTED_DATA[FLAG]))) as norm_residual from tDDECal.MS)'" + cmd="$taqlexe 'select norm_residual/norm_data FROM (select sqrt(abs(gsumsqr(WEIGHT_SPECTRUM*DATA))) as norm_data, sqrt(abs(gsumsqr(WEIGHT_SPECTRUM*SUBTRACTED_DATA))) as norm_residual from tDDECal.MS)'" echo $cmd eval $cmd - cmd="$taqlexe 'select FROM (select sqrt(abs(gsumsqr(WEIGHT_SPECTRUM*DATA[FLAG]))) as norm_data, sqrt(abs(gsumsqr(WEIGHT_SPECTRUM*SUBTRACTED_DATA[FLAG]))) as norm_residual from tDDECal.MS) where norm_residual/norm_data > 0.015 or isinf(norm_residual/norm_data) or isnan(norm_residual/norm_data)' > taql.out" + cmd="$taqlexe 'select FROM (select sqrt(abs(gsumsqr(WEIGHT_SPECTRUM*DATA))) as norm_data, sqrt(abs(gsumsqr(WEIGHT_SPECTRUM*SUBTRACTED_DATA))) as norm_residual from tDDECal.MS) where norm_residual/norm_data > 0.015 or isinf(norm_residual/norm_data) or isnan(norm_residual/norm_data)' > taql.out" echo $cmd eval $cmd @@ -127,3 +133,14 @@ cmd="NDPPP checkparset=1 msin=tDDECal.MS msout=. numthreads=4\ ddecal.h5parm=instrument-tecandphase.h5 ddecal.mode=tecandphase" echo $cmd $cmd + +echo "Create MODEL_DATA" +cmd="NDPPP checkparset=1 msin=tDDECal.MS msout=. msout.datacolumn=MODEL_DATA\ + steps=[]" +echo $cmd +$cmd >& /dev/null +cmd="NDPPP checkparset=1 msin=tDDECal.MS msout=. steps=[ddecal]\ + ddecal.usemodelcolumn=true ddecal.h5parm=instrument-modeldata \ + ddecal.solint=2 ddecal.nchan=3" +echo $cmd +$cmd diff --git a/CEP/DP3/DPPP_DDECal/test/tRotationConstraint.cc b/CEP/DP3/DPPP_DDECal/test/tRotationConstraint.cc index a9fb2e35d20563f2c8764416743af8c3136386de..5d629d526467425b8e1fd273806fbe2578e34601 100644 --- a/CEP/DP3/DPPP_DDECal/test/tRotationConstraint.cc +++ b/CEP/DP3/DPPP_DDECal/test/tRotationConstraint.cc @@ -1,6 +1,6 @@ #include <lofar_config.h> #include <Common/LofarLogger.h> // ASSERT -#include <casa/BasicMath/Math.h> // near +#include <casacore/casa/BasicMath/Math.h> // near #include <vector> #include <iostream> @@ -15,7 +15,7 @@ using namespace LOFAR; void test_rotation() { RotationConstraint constraint; - constraint.initialize(1, 1, 1); + constraint.InitializeDimensions(1, 1, 1); vector<vector<complex<double> > > onesolution(1); onesolution[0].resize(4); @@ -31,7 +31,7 @@ void test_rotation() { onesolution[0][3] = cos(phi); vector<Constraint::Result> constraint_result; - constraint_result = constraint.Apply(onesolution, 0.); + constraint_result = constraint.Apply(onesolution, 0., nullptr); ASSERT( constraint_result.size() == 1 ); ASSERT( constraint_result[0].axes == "ant,freq" ); @@ -46,7 +46,7 @@ void test_rotation() { void test_rotation_and_diagonal() { RotationAndDiagonalConstraint constraint; - constraint.initialize(1, 1, 1); + constraint.InitializeDimensions(1, 1, 1); vector<vector<complex<double> > > onesolution(1); onesolution[0].resize(4); @@ -64,12 +64,12 @@ void test_rotation_and_diagonal() { onesolution[0][3] = b * cos(phi); vector<Constraint::Result> constraint_result; - constraint_result = constraint.Apply(onesolution, 0.); + constraint_result = constraint.Apply(onesolution, 0., nullptr); ASSERT( constraint_result.size() == 3 ); ASSERT( constraint_result[0].name == "rotation" ); ASSERT( constraint_result[0].axes == "ant,freq" ); - ASSERT( near(constraint_result[0].vals[0], phi) ); + ASSERT( near(constraint_result[0].vals[0], 0.) ); ASSERT( constraint_result[0].dims.size() == 2 ); ASSERT( constraint_result[0].dims[0] == 1 ); ASSERT( constraint_result[0].dims[1] == 1 ); diff --git a/CEP/DP3/DPPP_Interpolate/CMake/CheckCXXSymbolExists.cmake b/CEP/DP3/DPPP_Interpolate/CMake/CheckCXXSymbolExists.cmake new file mode 100644 index 0000000000000000000000000000000000000000..084fbb422f5a607b4083e5d7108aefbf90d9aa3c --- /dev/null +++ b/CEP/DP3/DPPP_Interpolate/CMake/CheckCXXSymbolExists.cmake @@ -0,0 +1,49 @@ +#.rst: +# CheckCXXSymbolExists +# -------------------- +# +# Check if a symbol exists as a function, variable, or macro in C++ +# +# CHECK_CXX_SYMBOL_EXISTS(<symbol> <files> <variable>) +# +# Check that the <symbol> is available after including given header +# <files> and store the result in a <variable>. Specify the list of +# files in one argument as a semicolon-separated list. +# CHECK_CXX_SYMBOL_EXISTS() can be used to check in C++ files, as +# opposed to CHECK_SYMBOL_EXISTS(), which works only for C. +# +# If the header files define the symbol as a macro it is considered +# available and assumed to work. If the header files declare the symbol +# as a function or variable then the symbol must also be available for +# linking. If the symbol is a type or enum value it will not be +# recognized (consider using CheckTypeSize or CheckCSourceCompiles). +# +# The following variables may be set before calling this macro to modify +# the way the check is run: +# +# :: +# +# CMAKE_REQUIRED_FLAGS = string of compile command line flags +# CMAKE_REQUIRED_DEFINITIONS = list of macros to define (-DFOO=bar) +# CMAKE_REQUIRED_INCLUDES = list of include directories +# CMAKE_REQUIRED_LIBRARIES = list of libraries to link +# CMAKE_REQUIRED_QUIET = execute quietly without messages + +#============================================================================= +# Copyright 2003-2011 Kitware, Inc. +# +# Distributed under the OSI-approved BSD License (the "License"); +# see accompanying file Copyright.txt for details. +# +# This software is distributed WITHOUT ANY WARRANTY; without even the +# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the License for more information. +#============================================================================= +# (To distribute this file outside of CMake, substitute the full +# License text for the above reference.) + +include(CheckSymbolExists) + +macro(CHECK_CXX_SYMBOL_EXISTS SYMBOL FILES VARIABLE) + _CHECK_SYMBOL_EXISTS("${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/CheckSymbolExists.cxx" "${SYMBOL}" "${FILES}" "${VARIABLE}" ) +endmacro() diff --git a/CEP/DP3/DPPP_Interpolate/CMake/FindCFITSIO.cmake b/CEP/DP3/DPPP_Interpolate/CMake/FindCFITSIO.cmake new file mode 100644 index 0000000000000000000000000000000000000000..6501f7762ca300f4ec488a0adf66e013723cfc5c --- /dev/null +++ b/CEP/DP3/DPPP_Interpolate/CMake/FindCFITSIO.cmake @@ -0,0 +1,49 @@ +# - Try to find CFITSIO. +# Variables used by this module: +# CFITSIO_ROOT_DIR - CFITSIO root directory +# Variables defined by this module: +# CFITSIO_FOUND - system has CFITSIO +# CFITSIO_INCLUDE_DIR - the CFITSIO include directory (cached) +# CFITSIO_INCLUDE_DIRS - the CFITSIO include directories +# (identical to CFITSIO_INCLUDE_DIR) +# CFITSIO_LIBRARY - the CFITSIO library (cached) +# CFITSIO_LIBRARIES - the CFITSIO libraries +# (identical to CFITSIO_LIBRARY) + +# Copyright (C) 2009 +# ASTRON (Netherlands Institute for Radio Astronomy) +# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +# +# $Id: FindCFITSIO.cmake 22498 2012-10-23 10:51:12Z loose $ + +if(NOT CFITSIO_FOUND) + + find_path(CFITSIO_INCLUDE_DIR fitsio.h + HINTS ${CFITSIO_ROOT_DIR} PATH_SUFFIXES include include/cfitsio include/libcfitsio0) + find_library(CFITSIO_LIBRARY cfitsio + HINTS ${CFITSIO_ROOT_DIR} PATH_SUFFIXES lib) + find_library(M_LIBRARY m) + mark_as_advanced(CFITSIO_INCLUDE_DIR CFITSIO_LIBRARY M_LIBRARY) + + include(FindPackageHandleStandardArgs) + find_package_handle_standard_args(CFITSIO DEFAULT_MSG + CFITSIO_LIBRARY M_LIBRARY CFITSIO_INCLUDE_DIR) + + set(CFITSIO_INCLUDE_DIRS ${CFITSIO_INCLUDE_DIR}) + set(CFITSIO_LIBRARIES ${CFITSIO_LIBRARY} ${M_LIBRARY}) + +endif(NOT CFITSIO_FOUND) diff --git a/CEP/DP3/DPPP_Interpolate/CMake/FindCasacore.cmake b/CEP/DP3/DPPP_Interpolate/CMake/FindCasacore.cmake new file mode 100644 index 0000000000000000000000000000000000000000..e4689f75102cc9e72847ed03ac2e71bc7ded82c7 --- /dev/null +++ b/CEP/DP3/DPPP_Interpolate/CMake/FindCasacore.cmake @@ -0,0 +1,267 @@ +# - Try to find Casacore include dirs and libraries +# Usage: +# find_package(Casacore [REQUIRED] [COMPONENTS components...]) +# Valid components are: +# casa, coordinates, derivedmscal, fits, images, lattices, +# meas, measures, mirlib, ms, msfits, python, scimath, scimath_f, tables +# +# Note that most components are dependent on other (more basic) components. +# In that case, it suffices to specify the "top-level" components; dependent +# components will be searched for automatically. +# +# The dependency tree can be generated using the script get_casacore_deps.sh. +# For this, you need to have a complete casacore installation, built with shared +# libraries, at your disposal. +# +# The dependencies in this macro were generated against casacore release 1.7.0. +# +# Variables used by this module: +# CASACORE_ROOT_DIR - Casacore root directory. +# +# Variables defined by this module: +# CASACORE_FOUND - System has Casacore, which means that the +# include dir was found, as well as all +# libraries specified (not cached) +# CASACORE_INCLUDE_DIR - Casacore include directory (cached) +# CASACORE_INCLUDE_DIRS - Casacore include directories (not cached) +# identical to CASACORE_INCLUDE_DIR +# CASACORE_LIBRARIES - The Casacore libraries (not cached) +# CASA_${COMPONENT}_LIBRARY - The absolute path of Casacore library +# "component" (cached) +# HAVE_AIPSPP - True if system has Casacore (cached) +# for backward compatibility with AIPS++ +# HAVE_CASACORE - True if system has Casacore (cached) +# identical to CASACORE_FOUND +# TAQL_EXECUTABLE - The absolute path of the TaQL executable +# (cached) +# +# ATTENTION: The component names need to be in lower case, just as the +# casacore library names. However, the CMake variables use all upper case. + +# Copyright (C) 2009 +# ASTRON (Netherlands Institute for Radio Astronomy) +# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +# +# $Id: FindCasacore.cmake 31487 2015-04-16 11:28:17Z dijkema $ + +# - casacore_resolve_dependencies(_result) +# +# Resolve the Casacore library dependencies for the given components. +# The list of dependent libraries will be returned in the variable result. +# It is sorted from least dependent to most dependent library, so it can be +# directly fed to the linker. +# +# Usage: casacore_resolve_dependencies(result components...) +# +macro(casacore_resolve_dependencies _result) + set(${_result} ${ARGN}) + set(_index 0) + # Do a breadth-first search through the dependency graph; append to the + # result list the dependent components for each item in that list. + # Duplicates will be removed later. + while(1) + list(LENGTH ${_result} _length) + if(NOT _index LESS _length) + break() + endif(NOT _index LESS _length) + list(GET ${_result} ${_index} item) + list(APPEND ${_result} ${Casacore_${item}_DEPENDENCIES}) + math(EXPR _index "${_index}+1") + endwhile(1) + # Remove all duplicates in the current result list, while retaining only the + # last of each duplicate. + list(REVERSE ${_result}) + list(REMOVE_DUPLICATES ${_result}) + list(REVERSE ${_result}) +endmacro(casacore_resolve_dependencies _result) + + +# - casacore_find_library(_name) +# +# Search for the library ${_name}. +# If library is found, add it to CASACORE_LIBRARIES; if not, add ${_name} +# to CASACORE_MISSING_COMPONENTS and set CASACORE_FOUND to false. +# +# Usage: casacore_find_library(name) +# +macro(casacore_find_library _name) + string(TOUPPER ${_name} _NAME) + find_library(${_NAME}_LIBRARY ${_name} + HINTS ${CASACORE_ROOT_DIR} PATH_SUFFIXES lib) + mark_as_advanced(${_NAME}_LIBRARY) + if(${_NAME}_LIBRARY) + list(APPEND CASACORE_LIBRARIES ${${_NAME}_LIBRARY}) + else(${_NAME}_LIBRARY) + set(CASACORE_FOUND FALSE) + list(APPEND CASACORE_MISSING_COMPONENTS ${_name}) + endif(${_NAME}_LIBRARY) +endmacro(casacore_find_library _name) + + +# - casacore_find_package(_name) +# +# Search for the package ${_name}. +# If the package is found, add the contents of ${_name}_INCLUDE_DIRS to +# CASACORE_INCLUDE_DIRS and ${_name}_LIBRARIES to CASACORE_LIBRARIES. +# +# If Casacore itself is required, then, strictly speaking, the packages it +# requires must be present. However, when linking against static libraries +# they may not be needed. One can override the REQUIRED setting by switching +# CASACORE_MAKE_REQUIRED_EXTERNALS_OPTIONAL to ON. Beware that this might cause +# compile and/or link errors. +# +# Usage: casacore_find_package(name [REQUIRED]) +# +macro(casacore_find_package _name) + if("${ARGN}" MATCHES "^REQUIRED$" AND + Casacore_FIND_REQUIRED AND + NOT CASACORE_MAKE_REQUIRED_EXTERNALS_OPTIONAL) + find_package(${_name} REQUIRED) + else() + find_package(${_name}) + endif() + if(${_name}_FOUND) + list(APPEND CASACORE_INCLUDE_DIRS ${${_name}_INCLUDE_DIRS}) + list(APPEND CASACORE_LIBRARIES ${${_name}_LIBRARIES}) + endif(${_name}_FOUND) +endmacro(casacore_find_package _name) + +# Define the Casacore components. +set(Casacore_components + casa + coordinates + derivedmscal + fits + images + lattices + meas + measures + mirlib + ms + msfits + python + scimath + scimath_f + tables +) + +# Define the Casacore components' inter-dependencies. +set(Casacore_casa_DEPENDENCIES) +set(Casacore_coordinates_DEPENDENCIES fits measures casa) +set(Casacore_derivedmscal_DEPENDENCIES ms measures tables casa) +set(Casacore_fits_DEPENDENCIES measures tables casa) +set(Casacore_images_DEPENDENCIES mirlib lattices coordinates fits measures scimath tables casa) +set(Casacore_lattices_DEPENDENCIES tables scimath casa) +set(Casacore_meas_DEPENDENCIES measures tables casa) +set(Casacore_measures_DEPENDENCIES tables casa) +set(Casacore_mirlib_DEPENDENCIES) +set(Casacore_ms_DEPENDENCIES measures scimath tables casa) +set(Casacore_msfits_DEPENDENCIES ms fits measures tables casa) +set(Casacore_python_DEPENDENCIES casa) +set(Casacore_scimath_DEPENDENCIES scimath_f casa) +set(Casacore_scimath_f_DEPENDENCIES) +set(Casacore_tables_DEPENDENCIES casa) + +# Initialize variables. +set(CASACORE_FOUND FALSE) +set(CASACORE_DEFINITIONS) +set(CASACORE_LIBRARIES) +set(CASACORE_MISSING_COMPONENTS) + +# Search for the header file first. +if(NOT CASACORE_INCLUDE_DIR) + find_path(CASACORE_INCLUDE_DIR casacore/casa/aips.h + HINTS ${CASACORE_ROOT_DIR} PATH_SUFFIXES include) + mark_as_advanced(CASACORE_INCLUDE_DIR) +endif(NOT CASACORE_INCLUDE_DIR) + +# Fallback for systems that have old casacore installed in directory not called 'casacore' +# This fallback can be removed once we move to casacore 2.0 which always puts headers in 'casacore' +if(NOT CASACORE_INCLUDE_DIR) + find_path(CASACORE_INCLUDE_DIR casa/aips.h + HINTS ${CASACORE_ROOT_DIR} PATH_SUFFIXES include) + mark_as_advanced(CASACORE_INCLUDE_DIR) +endif(NOT CASACORE_INCLUDE_DIR) + +if(NOT CASACORE_INCLUDE_DIR) + set(CASACORE_ERROR_MESSAGE "Casacore: unable to find the header file casa/aips.h.\nPlease set CASACORE_ROOT_DIR to the root directory containing Casacore.") +else(NOT CASACORE_INCLUDE_DIR) + # We've found the header file; let's continue. + set(CASACORE_FOUND TRUE) + # Note that new Casacore uses #include<casacore/casa/...>, while + # LOFAR still uses #include<casa/...>. Hence use both in -I path. + set(CASACORE_INCLUDE_DIRS ${CASACORE_INCLUDE_DIR} ${CASACORE_INCLUDE_DIR}/casacore) + + # Search for some often used binaries. + find_program(TAQL_EXECUTABLE taql + HINTS ${CASACORE_ROOT_DIR}/bin) + mark_as_advanced(TAQL_EXECUTABLE) + + # If the user specified components explicity, use that list; otherwise we'll + # assume that the user wants to use all components. + if(NOT Casacore_FIND_COMPONENTS) + set(Casacore_FIND_COMPONENTS ${Casacore_components}) + endif(NOT Casacore_FIND_COMPONENTS) + + # Get a list of all dependent Casacore libraries that need to be found. + casacore_resolve_dependencies(_find_components ${Casacore_FIND_COMPONENTS}) + + # Find the library for each component, and handle external dependencies + foreach(_comp ${_find_components}) + casacore_find_library(casa_${_comp}) + if(${_comp} STREQUAL casa) + casacore_find_package(HDF5) + casacore_find_library(m) + list(APPEND CASACORE_LIBRARIES ${CMAKE_DL_LIBS}) + elseif(${_comp} STREQUAL coordinates) + casacore_find_package(WCSLIB REQUIRED) + elseif(${_comp} STREQUAL fits) + casacore_find_package(CFITSIO REQUIRED) + elseif(${_comp} STREQUAL scimath_f) + casacore_find_package(LAPACK REQUIRED) + endif(${_comp} STREQUAL casa) + endforeach(_comp ${_find_components}) +endif(NOT CASACORE_INCLUDE_DIR) + +# Set HAVE_CASACORE; and HAVE_AIPSPP (for backward compatibility with AIPS++). +if(CASACORE_FOUND) + set(HAVE_CASACORE TRUE CACHE INTERNAL "Define if Casacore is installed") + set(HAVE_AIPSPP TRUE CACHE INTERNAL "Define if AIPS++/Casacore is installed") +endif(CASACORE_FOUND) + +# Compose diagnostic message if not all necessary components were found. +if(CASACORE_MISSING_COMPONENTS) + set(CASACORE_ERROR_MESSAGE "Casacore: the following components could not be found:\n ${CASACORE_MISSING_COMPONENTS}") +endif(CASACORE_MISSING_COMPONENTS) + +# Print diagnostics. +if(CASACORE_FOUND) + if(NOT Casacore_FIND_QUIETLY) + message(STATUS "Found the following Casacore components: ") + foreach(_comp ${_find_components}) + string(TOUPPER casa_${_comp} _COMP) + message(STATUS " ${_comp}: ${${_COMP}_LIBRARY}") + endforeach(_comp ${_find_components}) + endif(NOT Casacore_FIND_QUIETLY) +else(CASACORE_FOUND) + if(Casacore_FIND_REQUIRED) + message(FATAL_ERROR "${CASACORE_ERROR_MESSAGE}") + else(Casacore_FIND_REQUIRED) + message(STATUS "${CASACORE_ERROR_MESSAGE}") + endif(Casacore_FIND_REQUIRED) +endif(CASACORE_FOUND) + diff --git a/CEP/DP3/DPPP_Interpolate/CMakeLists-standalone.txt b/CEP/DP3/DPPP_Interpolate/CMakeLists-standalone.txt new file mode 100644 index 0000000000000000000000000000000000000000..798f11ee87b4e503c317dc8e8d280c36025c39cf --- /dev/null +++ b/CEP/DP3/DPPP_Interpolate/CMakeLists-standalone.txt @@ -0,0 +1,27 @@ +cmake_minimum_required(VERSION 2.8) + +project(DPPP_Interpolate) + +# Casacore has a separate CMake file in this directory +set(CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/CMake) + +SET(CASACORE_MAKE_REQUIRED_EXTERNALS_OPTIONAL TRUE) +find_package(Casacore REQUIRED COMPONENTS casa ms tables measures) + +find_package(Threads REQUIRED) + +include_directories(${CASACORE_INCLUDE_DIRS} ../../../build/gnucxx11_opt/include/) + +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O3 -Wall -DNDEBUG --std=c++11 -ggdb") + +add_library(interpolate-object OBJECT + src/Interpolate.cc + ../../../LCS/Common/src/LofarLogCout.cc) +set_property(TARGET interpolate-object PROPERTY POSITION_INDEPENDENT_CODE 1) + +# Note: casapy fails if Casa is linked in the storage manager, so we have to trust that +# casapy's version of casacore is binary compatible with this storage manager's casacore. +add_library(dppp_interpolate SHARED $<TARGET_OBJECTS:interpolate-object>) +target_link_libraries(dppp_interpolate ${CASACORE_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT}) + +install (TARGETS dppp_interpolate DESTINATION lib) diff --git a/CEP/DP3/DPPP_Interpolate/CMakeLists.txt b/CEP/DP3/DPPP_Interpolate/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..a45bd5adc12c880e93b85c0dda08e8c0df827277 --- /dev/null +++ b/CEP/DP3/DPPP_Interpolate/CMakeLists.txt @@ -0,0 +1,10 @@ +# $Id: CMakeLists.txt 27640 2013-12-04 08:02:49Z diepen $ + +lofar_package(DPPP_Interpolate 1.0 DEPENDS DPPP) + +include(LofarFindPackage) +lofar_find_package(Casacore COMPONENTS casa ms tables REQUIRED) + +add_subdirectory(include/DPPP_Interpolate) +add_subdirectory(src) +add_subdirectory(test) diff --git a/CEP/DP3/DPPP_Interpolate/include/DPPP_Interpolate/CMakeLists.txt b/CEP/DP3/DPPP_Interpolate/include/DPPP_Interpolate/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..3d9cb5dee52325f9070cf7377adc73744106910e --- /dev/null +++ b/CEP/DP3/DPPP_Interpolate/include/DPPP_Interpolate/CMakeLists.txt @@ -0,0 +1,14 @@ +# $Id: CMakeLists.txt 39071 2018-02-08 15:50:02Z dijkema $ + +# List of header files that will be installed. +set(inst_HEADERS + buffered_lane.h Interpolate.h lane.h + ) + +# Create symbolic link to include directory. +execute_process(COMMAND ${CMAKE_COMMAND} -E create_symlink + ${CMAKE_CURRENT_SOURCE_DIR} + ${CMAKE_BINARY_DIR}/include/${PACKAGE_NAME}) + +# Install header files. +install(FILES ${inst_HEADERS} DESTINATION include/${PACKAGE_NAME}) diff --git a/CEP/DP3/DPPP_Interpolate/include/DPPP_Interpolate/Interpolate.h b/CEP/DP3/DPPP_Interpolate/include/DPPP_Interpolate/Interpolate.h new file mode 100644 index 0000000000000000000000000000000000000000..97570460a720a504820db81f30831d21cf5be37f --- /dev/null +++ b/CEP/DP3/DPPP_Interpolate/include/DPPP_Interpolate/Interpolate.h @@ -0,0 +1,77 @@ +#ifndef INTERPOLATE_H +#define INTERPOLATE_H + +#include <deque> +#include <string> + +#include <DPPP/DPInput.h> +#include <DPPP/DPBuffer.h> + +#include <Common/ParameterSet.h> + +#include <casacore/casa/Arrays/Cube.h> + +#include "lane.h" + +extern "C" void register_interpolate(); + +namespace LOFAR { namespace DPPP { + + class Interpolate : public DPStep + { + public: + // Construct the object. + // Parameters are obtained from the parset using the given prefix. + Interpolate (DPInput*, const ParameterSet&, const string& prefix); + + virtual ~Interpolate() = default; + + // Process the data. + // It keeps the data. + // When processed, it invokes the process function of the next step. + virtual bool process (const DPBuffer&); + + // Finish the processing of this step and subsequent steps. + virtual void finish(); + + // Update the general info. + virtual void updateInfo (const DPInfo&); + + // Show the step parameters. + virtual void show (std::ostream&) const; + + // Show the timings. + virtual void showTimings (std::ostream&, double duration) const; + + static DPStep::ShPtr makeStep(DPInput* input, const ParameterSet& parset, const std::string& prefix); + + private: + void interpolateTimestep(size_t index); + void interpolateSample(size_t timestep, size_t baseline, size_t channel, size_t pol); + void sendFrontBufferToNextStep(); + void interpolationThread(); + + struct Sample { + Sample() = default; + Sample(size_t timestep_, size_t baseline_, size_t channel_, size_t pol_) : + timestep(timestep_), baseline(baseline_), channel(channel_), pol(pol_) + { } + size_t timestep; + size_t baseline; + size_t channel; + size_t pol; + }; + + //# Data members. + std::string _name; + size_t _interpolatedPos; + std::deque<DPBuffer> _buffers; + size_t _windowSize; + NSTimer _timer; + ao::lane<Sample> _lane; + std::vector<float> _kernelLookup; + }; + +} } //# end namespace + +#endif diff --git a/CEP/DP3/DPPP_Interpolate/include/DPPP_Interpolate/buffered_lane.h b/CEP/DP3/DPPP_Interpolate/include/DPPP_Interpolate/buffered_lane.h new file mode 100644 index 0000000000000000000000000000000000000000..d9c0f9ec07316b11d42c73c8661a425fce426ae3 --- /dev/null +++ b/CEP/DP3/DPPP_Interpolate/include/DPPP_Interpolate/buffered_lane.h @@ -0,0 +1,122 @@ +#ifndef BUFFER_LANE_H +#define BUFFER_LANE_H + +#include <vector> + +#include "lane.h" + +template<typename Tp> +class lane_write_buffer +{ +public: + typedef typename ao::lane<Tp>::size_type size_type; + typedef typename ao::lane<Tp>::value_type value_type; + + lane_write_buffer() : _buffer_size(0), _lane(0) + { } + + lane_write_buffer(ao::lane<Tp>* lane, size_type buffer_size) : _buffer_size(buffer_size), _lane(lane) + { + _buffer.reserve(buffer_size); + } + + ~lane_write_buffer() + { + flush(); + } + + void reset(ao::lane<Tp>* lane, size_type buffer_size) + { + _buffer.clear(); + _buffer.reserve(buffer_size); + _buffer_size = buffer_size; + _lane = lane; + } + + void clear() + { + _lane->clear(); + _buffer.clear(); + } + + void write(const value_type& element) + { + _buffer.push_back(element); + if(_buffer.size() == _buffer_size) + flush(); + } + + void write(value_type&& element) + { + _buffer.push_back(std::move(element)); + if(_buffer.size() == _buffer_size) + flush(); + } + + template<typename... Args> + void emplace(Args&&... args) + { + _buffer.emplace_back(args...); + if(_buffer.size() == _buffer_size) + flush(); + } + + void write_end() + { + flush(); + _lane->write_end(); + } + + void flush() + { + _lane->move_write(&_buffer[0], _buffer.size()); + _buffer.clear(); + } +private: + size_type _buffer_size; + std::vector<value_type> _buffer; + ao::lane<Tp>* _lane; +}; + +template<typename Tp> +class lane_read_buffer +{ +public: + lane_read_buffer(ao::lane<Tp>* lane, size_t buffer_size) : + _buffer(new Tp[buffer_size]), + _buffer_size(buffer_size), + _buffer_pos(0), + _buffer_fill_count(0), + _lane(lane) + { + } + + ~lane_read_buffer() + { + delete[] _buffer; + } + + bool read(Tp& element) + { + if(_buffer_pos == _buffer_fill_count) + { + _buffer_fill_count = _lane->read(_buffer, _buffer_size); + _buffer_pos = 0; + if(_buffer_fill_count == 0) + return false; + } + element = std::move(_buffer[_buffer_pos]); + ++_buffer_pos; + return true; + } + +private: + lane_read_buffer(const lane_read_buffer&) = delete; + lane_read_buffer& operator=(const lane_read_buffer&) = delete; + + Tp* _buffer; + size_t _buffer_size, _buffer_pos, _buffer_fill_count; + ao::lane<Tp>* _lane; +}; + +#endif diff --git a/CEP/DP3/DPPP_Interpolate/include/DPPP_Interpolate/lane.h b/CEP/DP3/DPPP_Interpolate/include/DPPP_Interpolate/lane.h new file mode 100644 index 0000000000000000000000000000000000000000..4be42a6e061dfb3acc26c7d25f75d611040b1bfe --- /dev/null +++ b/CEP/DP3/DPPP_Interpolate/include/DPPP_Interpolate/lane.h @@ -0,0 +1,576 @@ +#ifndef AO_LANE_11_H +#define AO_LANE_11_H + +#include <cstring> +#include <deque> +#include <mutex> +#include <condition_variable> + +/** + * @file + * Internal header file for the lane. + * @headername{lane.h} + */ + +//#define LANE_DEBUG_MODE + +#ifdef LANE_DEBUG_MODE +#include <string> +#include <iostream> +#include <sstream> +#include <cmath> +#endif + +namespace ao +{ + +#ifdef LANE_DEBUG_MODE +#define set_lane_debug_name(lane, str) (lane).setDebugName(str) +#define LANE_REGISTER_DEBUG_INFO registerDebugInfo() +#define LANE_REGISTER_DEBUG_WRITE_WAIT registerDebugWriteWait() +#define LANE_REGISTER_DEBUG_READ_WAIT registerDebugReadWait() +#define LANE_REPORT_DEBUG_INFO reportDebugInfo() + +#else + +#define set_lane_debug_name(lane, str) +#define LANE_REGISTER_DEBUG_INFO +#define LANE_REGISTER_DEBUG_WRITE_WAIT +#define LANE_REGISTER_DEBUG_READ_WAIT +#define LANE_REPORT_DEBUG_INFO + +#endif + +/** + * @brief The lane is an efficient cyclic buffer that is synchronized. + * @details + * A lane can typically be used in a multi-threaded producer-consumer + * situation. The lane also holds a state which allows for + * an ellegant way of communicating from producer(s) to + * consumer(s) that all data has been produced. + * + * A simple example: + * @code + * void producer(lane<Task>* taskLane) + * { + * while(moreTasks) + * taskLane->write(nextTask()); + * taskLane->write_end(); + * } + * + * void consumer(lane<Task>* taskLane) + * { + * Task task; + * while(taskLane->read(task)) + * processTask(task); + * } + * + * void run() + * { + * lane<Task> taskLane; + * std::thread consumerThread(&consumer(), &taskLane); + * producer(&taskLane); + * consumerThread.join(); + * } + * @endcode + * + * The various read and write methods, as well as the empty(), + * capacity() and size() methods are always thread safe. The other + * methods are not: assignment, swap(), clear() and resize() can not + * be called from a different thread while another thread is also + * accessing the lane. The same holds obviously for the constructors + * and destructor. This is chosen because these methods should almost never + * be called in parallel with other methods, and hence it is not worth + * to increase every call with extra locks to make this possible. + * + * With one reader and one writer, the order is guaranteed to be consistent. + * With multiple readers or writers in combination with multi-element + * write or read functions, a sequence of symbols might be interrupted. For + * example, if a multi-element write() won't fit completely in the buffer, + * the thread will wait for free space. Another thread might get now write + * access first, causing the single call to the multi-element write to be + * "split up". + * + * @author Andre Offringa + * @tparam Tp Type of elements to be stored in the lane. + */ +template<typename Tp> +class lane +{ + public: + /** @brief Integer type used to store size types. */ + typedef std::size_t size_type; + + /** @brief Type of elements stored in the lane. */ + typedef Tp value_type; + + /** @brief Construct a lane with zero elements. + * @details A lane with zero elements can not be written to or read to + * (both operations will wait forever). + * + * This constructor makes it easy to construct e.g. a container + * of lanes. After the container is created, the lanes can be + * resized with @ref resize(). + */ + lane() noexcept : + _buffer(0), + _capacity(0), + _write_position(0), + _free_write_space(0), + _status(status_normal) + { + } + + /** @brief Construct a lane with the given capacity. + * @details After construction, the lane is ready for writing to and reading from. + * @param capacity Number of elements that the lane can hold at once. + */ + explicit lane(size_t capacity) : + _buffer(new Tp[capacity]), + _capacity(capacity), + _write_position(0), + _free_write_space(_capacity), + _status(status_normal) + { + } + + lane(const lane<Tp>& source) = delete; + + /** @brief Move construct a lane. + * @details This operation is not thread safe: the behaviour is undefined when + * other threads access the source lane. + * @param source Original lane to be moved from. + */ + lane(lane<Tp>&& source) noexcept : + _buffer(0), + _capacity(0), + _write_position(0), + _free_write_space(0), + _status(status_normal) + { + swap(source); + } + + /** @brief Destructor. + * @details The destructor is not synchronized. + */ + ~lane() + { + LANE_REPORT_DEBUG_INFO; + delete[] _buffer; + } + + lane<Tp>& operator=(const lane<Tp>& source) = delete; + + /** @brief Move assignment. + * @details This operation is not thread safe: the behaviour is undefined when + * other threads access the source lane. + * @param source Original lane to be moved from. + * @returns This lane. + */ + lane<Tp>& operator=(lane<Tp>&& source) noexcept + { + swap(source); + return *this; + } + + /** @brief Swap the contents of this lane with another. + * @details This operation is not thread safe: the behaviour is undefined when + * other threads access either lane. + */ + void swap(lane<Tp>& other) noexcept + { + std::swap(_buffer, other._buffer); + std::swap(_capacity, other._capacity); + std::swap(_write_position, other._write_position); + std::swap(_free_write_space, other._free_write_space); + std::swap(_status, other._status); + } + + /** @brief Clear the contents and reset the state of the lane. + * @details After calling clear(), the lane is in the same state as after + * construction. This also means that after clearing the lane, it + * is as if write_end() has not been called yet. + * + * This method is not thread safe. + */ + void clear() noexcept + { + _write_position = 0; + _free_write_space = _capacity; + _status = status_normal; + } + + /** @brief Write a single element. + * @details This method is thread safe, and can be called together with + * other write and read methods from different threads. + * + * If this call comes after a call to write_end(), the call + * will be ignored. + * @param element Object to be copied into the cyclic buffer. + */ + void write(const value_type& element) + { + std::unique_lock<std::mutex> lock(_mutex); + LANE_REGISTER_DEBUG_INFO; + + if(_status == status_normal) + { + while(_free_write_space == 0) + { + LANE_REGISTER_DEBUG_WRITE_WAIT; + _writing_possible_condition.wait(lock); + } + + _buffer[_write_position] = element; + _write_position = (_write_position+1) % _capacity; + --_free_write_space; + // Now that there is less free write space, there is more free read + // space and thus readers can possibly continue. + _reading_possible_condition.notify_all(); + } + } + + /** @brief Write a single element by constructing it. + * @details This method is thread safe, and can be called together with + * other write and read methods from different threads. + * + * If this call comes after a call to write_end(), the call + * will be ignored. The implementation does not construct the value + * in place, but rather constructs the value and then move assigns it. + * This is because the value that it is moved into has already been + * constructed (in the current implementation). + * @param element Object to be moved into the cyclic buffer. + */ + template<typename... Args> + void emplace(Args&&... args) + { + std::unique_lock<std::mutex> lock(_mutex); + LANE_REGISTER_DEBUG_INFO; + + if(_status == status_normal) + { + while(_free_write_space == 0) + { + LANE_REGISTER_DEBUG_WRITE_WAIT; + _writing_possible_condition.wait(lock); + } + + _buffer[_write_position] = value_type(args...); + _write_position = (_write_position+1) % _capacity; + --_free_write_space; + // Now that there is less free write space, there is more free read + // space and thus readers can possibly continue. + _reading_possible_condition.notify_all(); + } + } + + /** @brief Write a single element by moving it in. + * @details This method is thread safe, and can be called together with + * other write and read methods from different threads. + * + * If this call comes after a call to write_end(), the call + * will be ignored. + * @param element Object to be moved into the cyclic buffer. + */ + void write(value_type&& element) + { + std::unique_lock<std::mutex> lock(_mutex); + LANE_REGISTER_DEBUG_INFO; + + if(_status == status_normal) + { + while(_free_write_space == 0) + { + LANE_REGISTER_DEBUG_WRITE_WAIT; + _writing_possible_condition.wait(lock); + } + + _buffer[_write_position] = std::move(element); + _write_position = (_write_position+1) % _capacity; + --_free_write_space; + // Now that there is less free write space, there is more free read + // space and thus readers can possibly continue. + _reading_possible_condition.notify_all(); + } + } + + void write(const value_type* elements, size_t n) + { + write_generic(elements, n); + } + + void move_write(value_type* elements, size_t n) + { + write_generic(elements, n); + } + + bool read(value_type& destination) + { + std::unique_lock<std::mutex> lock(_mutex); + LANE_REGISTER_DEBUG_INFO; + while(free_read_space() == 0 && _status == status_normal) + { + LANE_REGISTER_DEBUG_READ_WAIT; + _reading_possible_condition.wait(lock); + } + if(free_read_space() == 0) + return false; + else + { + destination = std::move(_buffer[read_position()]); + ++_free_write_space; + // Now that there is more free write space, writers can possibly continue. + _writing_possible_condition.notify_all(); + return true; + } + } + + size_t read(value_type* destinations, size_t n) + { + size_t n_left = n; + + std::unique_lock<std::mutex> lock(_mutex); + LANE_REGISTER_DEBUG_INFO; + + size_t free_space = free_read_space(); + size_t read_size = free_space > n ? n : free_space; + immediate_read(destinations, read_size); + n_left -= read_size; + + while(n_left != 0 && _status == status_normal) + { + destinations += read_size; + + do { + LANE_REGISTER_DEBUG_READ_WAIT; + _reading_possible_condition.wait(lock); + } while(free_read_space() == 0 && _status == status_normal); + + free_space = free_read_space(); + read_size = free_space > n_left ? n_left : free_space; + immediate_read(destinations, read_size); + n_left -= read_size; + } + return n - n_left; + } + + void write_end() + { + std::lock_guard<std::mutex> lock(_mutex); + LANE_REGISTER_DEBUG_INFO; + _status = status_end; + _writing_possible_condition.notify_all(); + _reading_possible_condition.notify_all(); + } + + size_t capacity() const noexcept + { + return _capacity; + } + + size_t size() const + { + std::lock_guard<std::mutex> lock(_mutex); + return _capacity - _free_write_space; + } + + bool empty() const + { + std::lock_guard<std::mutex> lock(_mutex); + return _capacity == _free_write_space; + } + + /** + * Change the capacity of the lane. This will erase all data in the lane. + */ + void resize(size_t new_capacity) + { + Tp *new_buffer = new Tp[new_capacity]; + delete[] _buffer; + _buffer = new_buffer; + _capacity = new_capacity; + _write_position = 0; + _free_write_space = new_capacity; + _status = status_normal; + } + +#ifdef LANE_DEBUG_MODE + /** + * Change the name of this lane to make it appear in the output along + * with statistics. Do not use this function directly; use the + * set_lane_debug_name() macro instead. + * @param nameStr New debug description of this lane. + */ + void setDebugName(const std::string& nameStr) + { + _debugName = nameStr; + } +#endif + private: + Tp* _buffer; + + size_t _capacity; + + size_t _write_position; + + size_t _free_write_space; + + enum { status_normal, status_end } _status; + + mutable std::mutex _mutex; + + std::condition_variable _writing_possible_condition, _reading_possible_condition; + + size_t read_position() const noexcept + { + return (_write_position + _free_write_space) % _capacity; + } + + size_t free_read_space() const noexcept + { + return _capacity - _free_write_space; + } + + // This is a template to allow const and non-const (to be able to move) + template<typename T> + void write_generic(T* elements, size_t n) + { + std::unique_lock<std::mutex> lock(_mutex); + LANE_REGISTER_DEBUG_INFO; + + if(_status == status_normal) + { + size_t write_size = _free_write_space > n ? n : _free_write_space; + immediate_write(elements, write_size); + n -= write_size; + + while(n != 0) { + elements += write_size; + + do { + LANE_REGISTER_DEBUG_WRITE_WAIT; + _writing_possible_condition.wait(lock); + } while(_free_write_space == 0 && _status == status_normal); + + write_size = _free_write_space > n ? n : _free_write_space; + immediate_write(elements, write_size); + n -= write_size; + } while(n != 0); + } + } + + // This is a template to allow const and non-const (to be able to move) + template<typename T> + void immediate_write(T *elements, size_t n) noexcept + { + // Split the writing in two ranges if needed. The first range fits in + // [_write_position, _capacity), the second range in [0, end). By doing + // so, we only have to calculate the modulo in the write position once. + if(n > 0) + { + size_t nPart; + if(_write_position + n > _capacity) + { + nPart = _capacity - _write_position; + } else { + nPart = n; + } + for(size_t i = 0; i < nPart ; ++i, ++_write_position) + { + _buffer[_write_position] = std::move(elements[i]); + } + + _write_position = _write_position % _capacity; + + for(size_t i = nPart; i < n ; ++i, ++_write_position) + { + _buffer[_write_position] = std::move(elements[i]); + } + + _free_write_space -= n; + + // Now that there is less free write space, there is more free read + // space and thus readers can possibly continue. + _reading_possible_condition.notify_all(); + } + } + + void immediate_read(value_type *elements, size_t n) noexcept + { + // As with write, split in two ranges if needed. The first range fits in + // [read_position(), _capacity), the second range in [0, end). + if(n > 0) + { + size_t nPart; + size_t position = read_position(); + if(position + n > _capacity) + { + nPart = _capacity - position; + } else { + nPart = n; + } + for(size_t i = 0; i < nPart ; ++i, ++position) + { + elements[i] = std::move(_buffer[position]); + } + + position = position % _capacity; + + for(size_t i = nPart; i < n ; ++i, ++position) + { + elements[i] = std::move(_buffer[position]); + } + + _free_write_space += n; + + // Now that there is more free write space, writers can possibly continue. + _writing_possible_condition.notify_all(); + } + } +#ifdef LANE_DEBUG_MODE + void registerDebugInfo() noexcept + { + _debugSummedSize += _capacity - _free_write_space; + _debugMeasureCount++; + } + void registerDebugReadWait() noexcept + { + ++_debugReadWaitCount; + } + void registerDebugWriteWait() noexcept + { + ++_debugWriteWaitCount; + } + void reportDebugInfo() + { + if(!_debugName.empty()) + { + std::stringstream str; + str + << "*** Debug report for the following lane: ***\n" + << "\"" << _debugName << "\"\n" + << "Capacity: " << _capacity << '\n' + << "Total read/write ops: " << _debugMeasureCount << '\n' + << "Average size of buffer, measured per read/write op.: " << round(double(_debugSummedSize)*100.0/_debugMeasureCount)/100.0 << '\n' + << "Number of wait events during reading: " << _debugReadWaitCount << '\n' + << "Number of wait events during writing: " << _debugWriteWaitCount << '\n'; + std::cout << str.str(); + } + } + std::string _debugName; + size_t + _debugSummedSize = 0, _debugMeasureCount = 0, + _debugReadWaitCount = 0, _debugWriteWaitCount = 0; +#endif +}; + +template<typename Tp> +void swap(ao::lane<Tp>& first, ao::lane<Tp>& second) noexcept +{ + first.swap(second); +} + +} // end of namespace + +#endif // AO_LANE11_H diff --git a/CEP/DP3/DPPP_Interpolate/src/CMakeLists.txt b/CEP/DP3/DPPP_Interpolate/src/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..52d938d9014f9bdb933302d19c1184f257dddece --- /dev/null +++ b/CEP/DP3/DPPP_Interpolate/src/CMakeLists.txt @@ -0,0 +1,5 @@ +include(LofarPackageVersion) + +lofar_add_library(dppp_interpolate + Interpolate.cc ../../../../LCS/Common/src/LofarLogCout.cc +) diff --git a/CEP/DP3/DPPP_Interpolate/src/Interpolate.cc b/CEP/DP3/DPPP_Interpolate/src/Interpolate.cc new file mode 100644 index 0000000000000000000000000000000000000000..79ace62ae8ddece0d3ca6d0efb1b4d4b263afa2c --- /dev/null +++ b/CEP/DP3/DPPP_Interpolate/src/Interpolate.cc @@ -0,0 +1,245 @@ +#include <DPPP_Interpolate/Interpolate.h> +#include <DPPP_Interpolate/buffered_lane.h> + +#include <lofar_config.h> +#include <DPPP/DPBuffer.h> +#include <DPPP/DPInfo.h> +#include <DPPP/DPRun.h> +#include <Common/ParameterSet.h> +#include <Common/LofarLogger.h> +#include <casacore/casa/Arrays/ArrayMath.h> +#include <Common/StringUtil.h> + +#include <iostream> +#include <iomanip> +#include <thread> + +using namespace casacore; + +void register_interpolate() +{ + LOFAR::DPPP::DPRun::registerStepCtor("interpolate", LOFAR::DPPP::Interpolate::makeStep); +} + +namespace LOFAR { namespace DPPP { + +Interpolate::Interpolate(DPInput* /*input*/, const ParameterSet& parset, const string& prefix) : + _name(prefix), + _interpolatedPos(0), + _windowSize(parset.getUint(prefix+"windowsize", 15)) +{ + if(_windowSize%2 != 1) + throw std::runtime_error("Window size of Interpolate action should be an odd number"); + + _kernelLookup.reserve(_windowSize*_windowSize); + for(int t=0; t!=int(_windowSize); ++t) + { + int y = t - int(_windowSize/2); + for(int ch=0; ch!=int(_windowSize); ++ch) + { + int x = ch - int(_windowSize/2); + double windowDist = double(x*x + y*y); + // Gaussian function with sigma = 1 + // (evaluated with double prec, then converted to floats) + double w = std::exp(windowDist * -0.5); + _kernelLookup.emplace_back(w); + } + } +} + +DPStep::ShPtr Interpolate::makeStep(DPInput* input, const ParameterSet& parset, const std::string& prefix) +{ + return DPStep::ShPtr(new Interpolate(input, parset, prefix)); +} + +void Interpolate::updateInfo(const DPInfo& infoIn) +{ + info() = infoIn; + info().setNeedVisData(); + info().setWriteData(); + info().setWriteFlags(); +} + +void Interpolate::show(std::ostream& os) const +{ + os << "Interpolate " << _name << '\n'; + os << " windowsize: " << _windowSize << '\n'; +} + +void Interpolate::showTimings(std::ostream& os, double duration) const +{ + os << " "; + FlagCounter::showPerc1 (os, _timer.getElapsed(), duration); + os << " Interpolate " << _name << endl; +} + +bool Interpolate::process(const DPBuffer& buf) +{ + _timer.start(); + // Collect the data in buffers. + _buffers.emplace_back(); + _buffers.back().copy(buf); + // If we have a full window of data, interpolate everything + // up to the middle of the window + if(_buffers.size() >= _windowSize) + { + size_t mid = _windowSize/2; + while(_interpolatedPos <= mid) + { + interpolateTimestep(_interpolatedPos); + ++_interpolatedPos; + } + // Buffers are only pushed to the next step when they are completely + // out of the window. This is because flags need to be set to false, + // however the flag information of the entire window is needed during + // interpolation, so these can only be set to false after processing. + sendFrontBufferToNextStep(); + } + _timer.stop(); + return true; +} + +void Interpolate::sendFrontBufferToNextStep() +{ + IPosition shp = _buffers.front().getData().shape(); + size_t + nPol = shp[0], + nChan = shp[1], + nBl = shp[2], + n = nPol * nChan * nBl; + // Set all flags to false + bool* flags = _buffers.front().getFlags().data(); + Complex* data = _buffers.front().getData().data(); + std::fill(flags, flags+n, false); + // Flag NaN values (values for which the entire window was flagged on input) + for(size_t i=0; i!=n; ++i) + { + if(!std::isfinite(data[i].real()) || !std::isfinite(data[i].imag())) + { + // The datum value is also set to 0, because NaNs sometimes give problems in + // certain software, even when they are flagged (e.g. in Sagecal). + data[i] = 0.0; + flags[i] = true; + } + } + + _timer.stop(); + getNextStep()->process(_buffers.front()); + _timer.start(); + + _buffers.pop_front(); + --_interpolatedPos; +} + +void Interpolate::finish() +{ + _timer.start(); + + // Interpolate everything up to the end of the window + while(_interpolatedPos<_buffers.size()) { + interpolateTimestep(_interpolatedPos); + ++_interpolatedPos; + } + while(!_buffers.empty()) + { + sendFrontBufferToNextStep(); + } + + _timer.stop(); + + getNextStep()->finish(); +} + +#define BUFFER_SIZE 1024 + +void Interpolate::interpolateTimestep(size_t index) +{ + const IPosition shp = _buffers.front().getData().shape(); + const size_t + nPol = shp[0], + nChan = shp[1], + nPerBl = nPol*nChan, + nBl = shp[2]; + + std::vector<std::thread> threads; + size_t nthreads = std::min<size_t>(sysconf(_SC_NPROCESSORS_ONLN), 8); + _lane.resize(nthreads*BUFFER_SIZE); + lane_write_buffer<Sample> buflane(&_lane, BUFFER_SIZE); + threads.reserve(nthreads); + for(size_t i=0; i!=nthreads; ++i) + threads.emplace_back(&Interpolate::interpolationThread, this); + + std::vector<Complex> dataBlock; + for (size_t bl=0; bl<nBl; ++bl) + { + bool* flags = _buffers[index].getFlags().data() + bl*nPerBl; + for(size_t ch=0; ch!=nChan; ++ch) + { + for(size_t p=0; p!=nPol; ++p) + { + if(*flags) { + buflane.emplace(index, bl, ch, p); + } + ++flags; + } + } + } + buflane.write_end(); + + for(std::thread& t : threads) + t.join(); +} + +void Interpolate::interpolationThread() +{ + lane_read_buffer<Sample> buflane(&_lane, BUFFER_SIZE); + Sample sample; + while(buflane.read(sample)) + { + interpolateSample(sample.timestep, sample.baseline, sample.channel, sample.pol); + } +} + +void Interpolate::interpolateSample(size_t timestep, size_t baseline, size_t channel, size_t pol) +{ + const IPosition shp = _buffers.front().getData().shape(); + const size_t + nPol = shp[0], + nChan = shp[1], + timestepBegin = (timestep > _windowSize/2) ? (timestep - _windowSize/2) : 0, + timestepEnd = std::min(timestep + _windowSize/2 + 1, _buffers.size()), + channelBegin = (channel > _windowSize/2) ? (channel - _windowSize/2) : 0, + channelEnd = std::min(channel + _windowSize/2 + 1, nChan); + + std::complex<float> valueSum = 0.0; + float windowSum = 0.0; + + for(size_t t=timestepBegin; t!=timestepEnd; ++t) + { + Complex* data = _buffers[t].getData().data() + (baseline*nChan + channelBegin)*nPol + pol; + const bool* flags = _buffers[t].getFlags().data() + (baseline*nChan + channelBegin)*nPol + pol; + const float* row = &_kernelLookup[_windowSize * (t + int(_windowSize/2) - timestep)]; + for(size_t ch=channelBegin; ch!=channelEnd; ++ch) + { + if(!*flags) + { + int x = ch + int(_windowSize/2) - channel; + float w = row[x]; + valueSum += *data * w; + windowSum += w; + } + + data += nPol; + flags += nPol; + } + } + // This write is multithreaded, but is allowed because this value is never read from in + // the loops above (because flagged values are skipped). + Complex& value = _buffers[timestep].getData().data()[(baseline*nChan + channel)*nPol + pol]; + if(windowSum != 0.0) + value = valueSum / windowSum; + else + value = Complex(std::numeric_limits<float>::quiet_NaN(), std::numeric_limits<float>::quiet_NaN()); +} + +} } //# end namespace diff --git a/CEP/DP3/DPPP_Interpolate/test/CMakeLists.txt b/CEP/DP3/DPPP_Interpolate/test/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..d7d8fbd417438b518d304edfee883f15727efa62 --- /dev/null +++ b/CEP/DP3/DPPP_Interpolate/test/CMakeLists.txt @@ -0,0 +1,5 @@ +# $Id: CMakeLists.txt 26355 2013-09-04 11:51:07Z dijkema $ + +include(LofarCTest) + +lofar_add_test(tInterpolateStep tInterpolateStep.cc) diff --git a/CEP/DP3/DPPP_Interpolate/test/tInterpolateStep.cc b/CEP/DP3/DPPP_Interpolate/test/tInterpolateStep.cc new file mode 100644 index 0000000000000000000000000000000000000000..ec28beb48f226c86c5f7f7417ac298ad5a59308a --- /dev/null +++ b/CEP/DP3/DPPP_Interpolate/test/tInterpolateStep.cc @@ -0,0 +1,233 @@ +//# tInterpolateStep.cc: Test program for class InterpolateStep +//# Copyright (C) 2010 +//# ASTRON (Netherlands Institute for Radio Astronomy) +//# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +//# +//# This file is part of the LOFAR software suite. +//# The LOFAR software suite is free software: you can redistribute it and/or +//# modify it under the terms of the GNU General Public License as published +//# by the Free Software Foundation, either version 3 of the License, or +//# (at your option) any later version. +//# +//# The LOFAR software suite is distributed in the hope that it will be useful, +//# but WITHOUT ANY WARRANTY; without even the implied warranty of +//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +//# GNU General Public License for more details. +//# +//# You should have received a copy of the GNU General Public License along +//# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +//# +//# $Id: tInterpolateStep.cc 24221 2013-03-12 12:24:48Z diepen $ +//# +//# @author Ger van Diepen + +#include <lofar_config.h> +#include <DPPP_Interpolate/Interpolate.h> +#include <DPPP/DPRun.h> +#include <DPPP/DPInput.h> +#include <DPPP/DPBuffer.h> +#include <DPPP/DPInfo.h> +#include <Common/ParameterSet.h> +#include <Common/StringUtil.h> +#include <casa/Arrays/ArrayMath.h> +#include <casa/Arrays/ArrayLogical.h> +#include <casa/Arrays/ArrayIO.h> +#include <iostream> + +using namespace LOFAR; +using namespace LOFAR::DPPP; +using namespace casa; +using namespace std; + +// Simple class to generate input arrays. +// It can only set all flags to true or all to false. +// Weights are always 1. +// It can be used with different nr of times, channels, etc. +class TestInput: public DPInput +{ +public: + TestInput(int ntime, int nant, int nchan, int ncorr, bool flag) + : itsCount(0), itsNTime(ntime), itsNBl(nant*(nant+1)/2), itsNChan(nchan), + itsNCorr(ncorr), itsFlag(flag) + { + // Fill the baseline stations; use 4 stations. + // So they are called 00 01 02 03 10 11 12 13 20, etc. + Vector<Int> ant1(itsNBl); + Vector<Int> ant2(itsNBl); + int st1 = 0; + int st2 = 0; + for (int i=0; i<itsNBl; ++i) { + ant1[i] = st1; + ant2[i] = st2; + if (++st2 == 4) { + st2 = 0; + if (++st1 == 4) { + st1 = 0; + } + } + } + Vector<String> antNames(4); + antNames[0] = "rs01.s01"; + antNames[1] = "rs02.s01"; + antNames[2] = "cs01.s01"; + antNames[3] = "cs01.s02"; + // Define their positions (more or less WSRT RT0-3). + vector<MPosition> antPos (4); + Vector<double> vals(3); + vals[0] = 3828763; vals[1] = 442449; vals[2] = 5064923; + antPos[0] = MPosition(Quantum<Vector<double> >(vals,"m"), + MPosition::ITRF); + vals[0] = 3828746; vals[1] = 442592; vals[2] = 5064924; + antPos[1] = MPosition(Quantum<Vector<double> >(vals,"m"), + MPosition::ITRF); + vals[0] = 3828729; vals[1] = 442735; vals[2] = 5064925; + antPos[2] = MPosition(Quantum<Vector<double> >(vals,"m"), + MPosition::ITRF); + vals[0] = 3828713; vals[1] = 442878; vals[2] = 5064926; + antPos[3] = MPosition(Quantum<Vector<double> >(vals,"m"), + MPosition::ITRF); + Vector<double> antDiam(4, 70.); + info().set (antNames, antDiam, antPos, ant1, ant2); + // Define the frequencies. + Vector<double> chanFreqs(nchan); + Vector<double> chanWidth(nchan, 100000.); + indgen (chanFreqs, 1050000., 100000.); + info().set (chanFreqs, chanWidth); + } +private: + virtual bool process (const DPBuffer&) + { + // Stop when all times are done. + if (itsCount == itsNTime) { + return false; + } + cout << "Input step " << itsCount << ' '<< itsCount*5+2<<endl; + Cube<Complex> data(itsNCorr, itsNChan, itsNBl); + for (int i=0; i<int(data.size()); ++i) { + data.data()[i] = Complex(1.6, 0.9); + } + if (itsCount == 5) { + data += Complex(10.,10.); + } + DPBuffer buf; + buf.setTime (itsCount*5 + 2); //same interval as in updateAveragInfo + buf.setData (data); + Cube<float> weights(data.shape()); + weights = 1.; + buf.setWeights (weights); + Cube<bool> flags(data.shape()); + flags = itsFlag; + buf.setFlags (flags); + // The fullRes flags are a copy of the XX flags, but differently shaped. + // They are not averaged, thus only 1 time per row. + Cube<bool> fullResFlags(itsNChan, 1, itsNBl); + fullResFlags = itsFlag; + buf.setFullResFlags (fullResFlags); + getNextStep()->process (buf); + ++itsCount; + return true; + } + + virtual void finish() {getNextStep()->finish();} + virtual void show (std::ostream&) const {} + virtual void updateInfo (const DPInfo&) + // Use startchan=0 and timeInterval=5 + { info().init (itsNCorr, itsNChan, itsNTime, 100, 5, string(), string()); } + + int itsCount, itsNTime, itsNBl, itsNChan, itsNCorr; + bool itsFlag; +}; + +// Class to check result. +class TestOutput: public DPStep +{ +public: + TestOutput(int ntime, int nant, int nchan, int ncorr) + : itsCount(0), itsNTime(ntime), itsNBl(nant*(nant+1)/2), itsNChan(nchan), + itsNCorr(ncorr) + {} +private: + virtual bool process (const DPBuffer& buf) + { + cout << "Output step " << itsCount << ' '<<itsCount*5+2<<endl; + // Fill expected result in similar way as TestInput. + Cube<Complex> result(itsNCorr,itsNChan,itsNBl); + for (int i=0; i<int(result.size()); ++i) { + result.data()[i] = Complex(1.6, 0.9); + } + if (itsCount == 5) { + result += Complex(10.,10.); + } + // Check the result. + ///cout << buf.getData()<< result; + ASSERT (allNear(real(buf.getData()), real(result), 1e-10)); + ASSERT (allNear(imag(buf.getData()), imag(result), 1e-10)); + ASSERT (near(buf.getTime(), 2+5.*itsCount)); + ++itsCount; + return true; + } + + virtual void finish() {} + virtual void show (std::ostream&) const {} + virtual void updateInfo (const DPInfo& info) + { + ASSERT (int(info.origNChan())==itsNChan); + ASSERT (int(info.nchan())==itsNChan); + ASSERT (int(info.ntime())==itsNTime); + ASSERT (info.startTime()==100); + ASSERT (info.timeInterval()==5); + ASSERT (int(info.nchanAvg())==1); + ASSERT (int(info.ntimeAvg())==1); + ASSERT (int(info.chanFreqs().size()) == itsNChan); + ASSERT (int(info.chanWidths().size()) == itsNChan); + ASSERT (info.msName().empty()); + } + + int itsCount; + int itsNTime, itsNBl, itsNChan, itsNCorr; +}; + + +// Execute steps. +void execute (const DPStep::ShPtr& step1) +{ + // Set the info in each step. + step1->setInfo (DPInfo()); + // Execute the steps. + DPBuffer buf; + while (step1->process(buf)); + step1->finish(); + DPStep::ShPtr step = step1; + while (step) { + step->showCounts (cout); + step = step->getNextStep(); + } +} + +void test1(int ntime, int nant, int nchan, int ncorr, bool flag, int threshold) +{ + cout << "test1: ntime=" << ntime << " nrant=" << nant << " nchan=" << nchan + << " ncorr=" << ncorr << " threshold=" << threshold << endl; + // Create the steps. + TestInput* in = new TestInput(ntime, nant, nchan, ncorr, flag); + DPStep::ShPtr step1(in); + ParameterSet parset; + parset.add ("windowsize", "9"); + DPStep::ShPtr step2 = DPRun::findStepCtor("interpolate")(in, parset, ""); + DPStep::ShPtr step3(new TestOutput(ntime, nant, nchan, ncorr)); + step1->setNextStep (step2); + step2->setNextStep (step3); + step2->show (cout); + execute (step1); +} + +int main() +{ + try { + test1(10, 2, 32, 4, false, 1); + } catch (std::exception& x) { + cout << "Unexpected exception: " << x.what() << endl; + return 1; + } + return 0; +} diff --git a/CEP/DP3/DPPP_Interpolate/test/tInterpolateStep.sh b/CEP/DP3/DPPP_Interpolate/test/tInterpolateStep.sh new file mode 100755 index 0000000000000000000000000000000000000000..7cd507992e304dd04023b576c26a8c3b948cd87f --- /dev/null +++ b/CEP/DP3/DPPP_Interpolate/test/tInterpolateStep.sh @@ -0,0 +1,2 @@ +#!/bin/sh +./runctest.sh tInterpolateStep diff --git a/CEP/DP3/PythonDPPP/src/PythonStep.cc b/CEP/DP3/PythonDPPP/src/PythonStep.cc index ec36289830c62b3462b460e4f55764e1b2787ab5..2badb164da685a5c492b5eb9f25a0415a1fc1aab 100644 --- a/CEP/DP3/PythonDPPP/src/PythonStep.cc +++ b/CEP/DP3/PythonDPPP/src/PythonStep.cc @@ -72,7 +72,11 @@ namespace LOFAR { string workingDir = Path(".").absoluteName(); char path[] = "path"; // needed to avoid warning if "path" used below PyObject* sysPath = PySys_GetObject(path); +#if PYTHON_VERSION_MAJOR < 3 PyList_Insert (sysPath, 0, PyString_FromString(workingDir.c_str())); +#else + PyList_Insert (sysPath, 0, PyUnicode_FromString(workingDir.c_str())); +#endif // Register converters for casa types from/to python types casa::pyrap::register_convert_excp(); casa::pyrap::register_convert_basicdata(); @@ -317,6 +321,12 @@ namespace LOFAR { } else if (! itsNChanChg && ! itsNBlChg) { itsBufOut.getFlags().assign (itsBufIn.getFlags()); } + if (rec.isDefined("FULLRESFLAGS")) { + itsBufOut.getFullResFlags().assign (rec.toArrayBool("FULLRESFLAGS")); + narr++; + } else if (! itsNChanChg && ! itsNBlChg) { + itsBufOut.getFullResFlags().assign (itsBufIn.getFullResFlags()); + } if (rec.isDefined("WEIGHTS")) { itsBufOut.getWeights().assign (rec.toArrayFloat("WEIGHTS")); narr++; diff --git a/CEP/DP3/PythonDPPP/src/__init__.py b/CEP/DP3/PythonDPPP/src/__init__.py index 855f27e9a61e12676a7c59f0b44b31a251915734..bc1c6303f30eca08f16b8973cef6b46bea5a7b13 100644 --- a/CEP/DP3/PythonDPPP/src/__init__.py +++ b/CEP/DP3/PythonDPPP/src/__init__.py @@ -136,7 +136,7 @@ class DPStep(_DPStepBase): The default implementation shows all parset keys. """ s = '' - for k,v in self.itsParset.iteritems(): + for k,v in self.itsParset.items(): if k not in ['type', 'python.class', 'python.module']: s += ' %-15s %s\n' % (k+':', v) return s diff --git a/CEP/DP3/PythonDPPP/test/tPythonStep.py b/CEP/DP3/PythonDPPP/test/tPythonStep.py index 3b3ad8cfb48e56ad84e8d2d613f29f57880f3da6..33aaef591c40cae0ee6580a4d5ddacd0e22ff5b9 100644 --- a/CEP/DP3/PythonDPPP/test/tPythonStep.py +++ b/CEP/DP3/PythonDPPP/test/tPythonStep.py @@ -19,6 +19,7 @@ # # $Id: __init__.py 23074 2012-12-03 07:51:29Z diepen $ +from __future__ import print_function from lofar.pythondppp import DPStep from lofar.parameterset import parameterset @@ -52,7 +53,7 @@ class tPythonStep(DPStep): self.getWeights (self.itsWeights); self.getUVW (self.itsUVW); # Process the data. - print "process tPythonStep", time-4.47203e9, exposure, self.itsData.sum(), self.itsFlags.sum(), self.itsWeights.sum(), self.itsUVW.sum() + print("process tPythonStep", time-4.47203e9, exposure, self.itsData.sum(), self.itsFlags.sum(), self.itsWeights.sum(), self.itsUVW.sum()) # Execute the next step in the DPPP pipeline. TIME,UVW are changed. return self.processNext ({'TIME': time+self.itsIncr, 'UVW': self.itsUVW+self.itsIncr}) @@ -60,7 +61,7 @@ class tPythonStep(DPStep): # Finish the step as needed. # This function does not need to be implemented. # Note: finish of the next step is called by the C++ layer. - print "finish tPythonStep" + print("finish tPythonStep") def showCounts(self): # Show the counts of this test. @@ -70,4 +71,4 @@ class tPythonStep(DPStep): def addToMS(self, msname): # Add some info the the output MeasurementSet. # This function does not need to be implemented. - print "addToMS tPythonStep", msname + print("addToMS tPythonStep", msname) diff --git a/CEP/MS/src/mstools.py b/CEP/MS/src/mstools.py index a335238ed9944477b8e3e91464f912048b861d15..00664c5b9cc55711337bc7f21d3f202e8c98c2d4 100644 --- a/CEP/MS/src/mstools.py +++ b/CEP/MS/src/mstools.py @@ -1,3 +1,5 @@ +from __future__ import print_function + import os import os.path import re @@ -58,7 +60,7 @@ def checkSAP_SB (fileNames, bandsPerBeam): for name in fileNames: parts = sapre.split (name) if len(parts) != 2: - print "File name %s does not contain a single string _SAP" % name + print("File name %s does not contain a single string _SAP" % name) return False assert (len(parts) == 2) sap = int(parts[1][0:3]) @@ -69,17 +71,17 @@ def checkSAP_SB (fileNames, bandsPerBeam): sbmax = sb else: if sap != sapUsed: - print "Error: multiple SAP numbers found in file names" + print("Error: multiple SAP numbers found in file names") return -1 if sb < sbmin: sbmin = sb if sb > sbmax: sbmax = sb if sbmax - sbmin + 1 > bandsPerBeam: - print 'Error: SB number range in file names exceeds bands per beam', bandsPerBeam + print('Error: SB number range in file names exceeds bands per beam', bandsPerBeam) return -1 if sbmax - sbmin + 1 < bandsPerBeam: - print 'Warning: SB number range in file names < bands per beam', bandsPerBeam + print('Warning: SB number range in file names < bands per beam', bandsPerBeam) return sapUsed @@ -90,10 +92,10 @@ def movemss (srcPattern, dstPattern, userName, bandsPerBeam=80, dryrun=False): (srcHosts, srcFiles) = findDirs(srcPattern) (dstHosts, dstFiles) = findDirs(dstPattern) if len(dstFiles) == 0: - print 'Error: no files found matching', dstPattern + print('Error: no files found matching', dstPattern) return False if len(srcFiles) < len(dstFiles): - print 'Error: fewer SRC files', srcPattern, 'found than DST files', dstPattern + print('Error: fewer SRC files', srcPattern, 'found than DST files', dstPattern) return False srcSAP = checkSAP_SB(srcFiles, bandsPerBeam) dstSAP = checkSAP_SB(dstFiles, bandsPerBeam) @@ -135,10 +137,10 @@ def movemss (srcPattern, dstPattern, userName, bandsPerBeam=80, dryrun=False): else: # Has DST to be moved from another node? if not srcMap.has_key(srcName): - print 'Src', srcName, 'not found for DST', dstFiles[i] + print('Src', srcName, 'not found for DST', dstFiles[i]) else: inx = srcMap[srcName] - print 'Move', srcName, 'from', srcHosts[inx], 'to', dstHosts[i] + print('Move', srcName, 'from', srcHosts[inx], 'to', dstHosts[i]) srcDir = os.path.dirname(srcName) cmd = '' if createDir: @@ -149,10 +151,10 @@ def movemss (srcPattern, dstPattern, userName, bandsPerBeam=80, dryrun=False): userName + '@' + dstHosts[i] + ':' + srcDir + \ ' && rm -rf ' + srcName + '"' # '" &' - print cmd + print(cmd) if not dryrun: os.system (cmd) - print nInPlace, "source files are already on the correct destination mode" + print(nInPlace, "source files are already on the correct destination mode") def addfileglob (filename, pattern): """ If needed, add the glob pattern to the filename @@ -187,18 +189,18 @@ def getSBmap (names, nsubbands): sbnrs = np.array([int(patt2.sub ('', patt1.sub('',x))) for x in names]) firstSB = 0 if len(sbnrs) == 0: - print 'no subbands found' + print('no subbands found') else: firstSB = sbnrs[0] / nsubbands * nsubbands if len(sbnrs) != nsubbands: - print 'subbands are missing for', patt3.sub('',names[0]) + print('subbands are missing for', patt3.sub('',names[0])) sbs = [-1 for i in range(nsubbands)] for i in range(len(sbnrs)): sbs[sbnrs[i] % nsubbands] = i if len(sbnrs) > 0: for i in range(len(sbs)): if sbs[i] < 0: - print ' subband', firstSB+i, 'seems to be missing' + print(' subband', firstSB+i, 'seems to be missing') return (sbs,firstSB) def getNamesSkip (locs, names, nsubbands): @@ -353,7 +355,7 @@ def expandps (parsetin, parsetout, keymap, nsubbands, ngroups=0, nodeindex=0, no havegroups = True # Check and initialize. if nodeindex < 0 or nodeindex >= nsbpergroup: - raise ValueError, "Argument ngroups or nodeindex has an invalid value" + raise ValueError("Argument ngroups or nodeindex has an invalid value") # Process input keywords. They must be present. inkeys = keymap["in"] @@ -368,7 +370,7 @@ def expandps (parsetin, parsetout, keymap, nsubbands, ngroups=0, nodeindex=0, no # Otherwise it defines the glob patterns. if isinstance(keyin, str): if keyin not in pskeys: - raise KeyError, "keyword " + keyin + " not found in parset " + parsetin + raise KeyError("keyword " + keyin + " not found in parset " + parsetin) # Get the file name pattern(s) patterns = ps.getStringVector(keyin) ps.remove (keyin) @@ -426,18 +428,18 @@ def expandps (parsetin, parsetout, keymap, nsubbands, ngroups=0, nodeindex=0, no # Process output keywords if they are present. if 'out' in keymap: if len(nameslist) == 0: - raise ValueError, "No input datasets have been defined" + raise ValueError("No input datasets have been defined") outkeys = keymap["out"] nrproc += 1 for (keyin,keyout) in outkeys: if isinstance(keyin, str): if keyin not in pskeys: - raise KeyError, "keyword " + keyin + " not found in parset " + parsetin + raise KeyError("keyword " + keyin + " not found in parset " + parsetin) name = ps.getString(keyin) ps.remove (keyin) else: if len(keyin) != 1: - raise KeyError, "Output key " + keyin + " is not a string, thus should be a sequence of length 1" + raise KeyError("Output key " + keyin + " is not a string, thus should be a sequence of length 1") name = keyin[0] locs = [] names = [] @@ -479,7 +481,7 @@ def expandps (parsetin, parsetout, keymap, nsubbands, ngroups=0, nodeindex=0, no # If given, use nodes to find the node to run on. if len(nodes) > 0: if i >= len(nodes): - raise ValueError, "Image seqnr " + str(i) + " beyond nodes list" + raise ValueError("Image seqnr " + str(i) + " beyond nodes list") locparts[0] = nodes[i] # Find OBSID, SAP, SB, and TYPE (as in L12345_SAP000_SB000_uv) # Make sure at least 4 entries. @@ -506,7 +508,7 @@ def expandps (parsetin, parsetout, keymap, nsubbands, ngroups=0, nodeindex=0, no # Check if all keymap keywords have been processed. if nrproc != len(keymap): - raise ValueError, "Only keys 'in' and 'out' are possible in the keymap argument" + raise ValueError("Only keys 'in' and 'out' are possible in the keymap argument") # Write the resulting parset. ps.writeFile (parsetout) - print "Created output parset " + parsetout + print("Created output parset " + parsetout) diff --git a/CEP/Pipeline/framework/lofarpipe/CMakeLists.txt b/CEP/Pipeline/framework/lofarpipe/CMakeLists.txt index 6ecc2a1b27e09bb7958fe47506ac0453f8c557b4..17217dd8176aa5f5c8d1cbbe06316d268a01395d 100644 --- a/CEP/Pipeline/framework/lofarpipe/CMakeLists.txt +++ b/CEP/Pipeline/framework/lofarpipe/CMakeLists.txt @@ -19,6 +19,7 @@ python_install( support/clusterdesc.py support/control.py support/data_map.py + support/feedback_version.py support/group_data.py support/jobserver.py support/lofarexceptions.py @@ -34,4 +35,5 @@ python_install( support/utilities.py support/xmllogging.py support/usagestats.py + support/feedback_version.py DESTINATION lofarpipe) diff --git a/CEP/Pipeline/framework/lofarpipe/support/feedback_version.py b/CEP/Pipeline/framework/lofarpipe/support/feedback_version.py new file mode 100644 index 0000000000000000000000000000000000000000..ee32c35062c4f667d3adb494909972cfb54e9c0b --- /dev/null +++ b/CEP/Pipeline/framework/lofarpipe/support/feedback_version.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python + +# Copyright (C) 2018 +# ASTRON (Netherlands Institute for Radio Astronomy) +# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it +# and/or modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be +# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +# +# $Id: feedback.py 1580 2015-09-30 14:18:57Z loose $ + +# for MoM, to distinguish between different versions of feedback +# LOFAR RELEASE 3.1.0 +MAJOR_VERSION = 3 +MINOR_VERSION = 1 +PATCH_NUMBER = 0 +VERSION = "%02d.%02d.%02d" % (MAJOR_VERSION, MINOR_VERSION, PATCH_NUMBER) # 03.01.00 +# See also RTCP/Cobalt/CoInterface/src/LTAfeedback.cc/h \ No newline at end of file diff --git a/CEP/Pipeline/recipes/sip/CMakeLists.txt b/CEP/Pipeline/recipes/sip/CMakeLists.txt index e52af408f9598e286becfc725e886711e1ed31a0..87cdd00cf499e483d4c98183c457abe45b49490a 100644 --- a/CEP/Pipeline/recipes/sip/CMakeLists.txt +++ b/CEP/Pipeline/recipes/sip/CMakeLists.txt @@ -143,3 +143,4 @@ configure_file( ${CMAKE_CURRENT_SOURCE_DIR}/tasks.cfg.CEP4.in ${CMAKE_CURRENT_BINARY_DIR}/tasks.cfg.CEP4) +add_subdirectory(helpers/test/) diff --git a/CEP/Pipeline/recipes/sip/bin/calibration_pipeline.py b/CEP/Pipeline/recipes/sip/bin/calibration_pipeline.py index 18edcaec53a7052200a7a4860b2afa4804c4c54c..ac418f3457b4c6bb254cc493fd39bad78ba4d783 100755 --- a/CEP/Pipeline/recipes/sip/bin/calibration_pipeline.py +++ b/CEP/Pipeline/recipes/sip/bin/calibration_pipeline.py @@ -15,6 +15,7 @@ from lofarpipe.support.lofarexceptions import PipelineException from lofarpipe.support.utilities import create_directory from lofar.parameterset import parameterset from lofarpipe.support.loggingdecorators import mail_log_on_exception, duration +from lofarpipe.support.feedback_version import VERSION as feedback_version class calibration_pipeline(control): @@ -286,7 +287,7 @@ class calibration_pipeline(control): product_type="InstrumentModel", metadata_file=instrument_metadata_file) - self.send_feedback_processing(parameterset()) + self.send_feedback_processing(parameterset({'feedback_version': feedback_version})) self.send_feedback_dataproducts(parameterset(correlated_metadata_file)) self.send_feedback_dataproducts(parameterset(instrument_metadata_file)) diff --git a/CEP/Pipeline/recipes/sip/bin/imaging_pipeline.py b/CEP/Pipeline/recipes/sip/bin/imaging_pipeline.py index 93d394a9709ff7458676c4dafe02f69449f6f79f..5c163a3d079f37c5336484aa6ca3b493efbbebbf 100755 --- a/CEP/Pipeline/recipes/sip/bin/imaging_pipeline.py +++ b/CEP/Pipeline/recipes/sip/bin/imaging_pipeline.py @@ -17,6 +17,7 @@ from lofarpipe.support.lofarexceptions import PipelineException from lofarpipe.support.data_map import DataMap, validate_data_maps, MultiDataMap from lofarpipe.support.utilities import patch_parset, get_parset from lofarpipe.support.loggingdecorators import xml_node, mail_log_on_exception +from lofarpipe.support.feedback_version import VERSION as feedback_version from lofar.parameterset import parameterset @@ -221,7 +222,7 @@ class imaging_pipeline(control): product_type = "SkyImage", metadata_file = metadata_file) - self.send_feedback_processing(parameterset()) + self.send_feedback_processing(parameterset({'feedback_version': feedback_version})) self.send_feedback_dataproducts(parameterset(metadata_file)) return 0 diff --git a/CEP/Pipeline/recipes/sip/bin/long_baseline_pipeline.py b/CEP/Pipeline/recipes/sip/bin/long_baseline_pipeline.py index 104aeea30bf3429f750c0385d5451ffdb854a840..60e9504e891babbd5a99ad81e55348afdc8d0fd1 100644 --- a/CEP/Pipeline/recipes/sip/bin/long_baseline_pipeline.py +++ b/CEP/Pipeline/recipes/sip/bin/long_baseline_pipeline.py @@ -20,6 +20,7 @@ from lofarpipe.support.lofarexceptions import PipelineException from lofarpipe.support.data_map import DataMap, validate_data_maps, MultiDataMap from lofarpipe.support.utilities import patch_parset, get_parset from lofarpipe.support.loggingdecorators import xml_node, mail_log_on_exception +from lofarpipe.support.feedback_version import VERSION as feedback_version from lofar.parameterset import parameterset @@ -141,7 +142,7 @@ class longbaseline_pipeline(control): # ********************************************************************* # (7) Get metadata # create a parset with information that is available on the toplevel - toplevel_meta_data = parameterset() + toplevel_meta_data = parameterset({'feedback_version': feedback_version}) # get some parameters from the imaging pipeline parset: subbandgroups_per_ms = self.parset.getInt("LongBaseline.subbandgroups_per_ms") diff --git a/CEP/Pipeline/recipes/sip/bin/msss_calibrator_pipeline.py b/CEP/Pipeline/recipes/sip/bin/msss_calibrator_pipeline.py index eb641e574a125d85011e5005447ed1b8c0f5d768..a2c7aa7b8d348e8d14a2aab5d01099ffcf03b0f9 100755 --- a/CEP/Pipeline/recipes/sip/bin/msss_calibrator_pipeline.py +++ b/CEP/Pipeline/recipes/sip/bin/msss_calibrator_pipeline.py @@ -15,6 +15,7 @@ from lofarpipe.support.lofarexceptions import PipelineException from lofarpipe.support.utilities import create_directory from lofar.parameterset import parameterset from lofarpipe.support.loggingdecorators import mail_log_on_exception, duration +from lofarpipe.support.feedback_version import VERSION as feedback_version class msss_calibrator_pipeline(control): @@ -300,7 +301,7 @@ class msss_calibrator_pipeline(control): product_type="InstrumentModel", metadata_file=instrument_metadata_file) - self.send_feedback_processing(parameterset()) + self.send_feedback_processing(parameterset({'feedback_version': feedback_version})) self.send_feedback_dataproducts(parameterset(correlated_metadata_file)) self.send_feedback_dataproducts(parameterset(instrument_metadata_file)) diff --git a/CEP/Pipeline/recipes/sip/bin/msss_imager_pipeline.py b/CEP/Pipeline/recipes/sip/bin/msss_imager_pipeline.py index f2ce16de7cad52ae7019ee5ccb71c4459852c269..f3041b39f2e96aa7f109776e7e66123f52f2edb2 100755 --- a/CEP/Pipeline/recipes/sip/bin/msss_imager_pipeline.py +++ b/CEP/Pipeline/recipes/sip/bin/msss_imager_pipeline.py @@ -17,6 +17,7 @@ from lofarpipe.support.lofarexceptions import PipelineException from lofarpipe.support.data_map import DataMap, validate_data_maps, MultiDataMap from lofarpipe.support.utilities import patch_parset, get_parset from lofarpipe.support.loggingdecorators import xml_node, mail_log_on_exception +from lofarpipe.support.feedback_version import VERSION as feedback_version from lofar.parameterset import parameterset @@ -215,7 +216,7 @@ class msss_imager_pipeline(control): # ********************************************************************* # (7) Get metadata # create a parset with information that is available on the toplevel - toplevel_meta_data = parameterset() + toplevel_meta_data = parameterset({'feedback_version': feedback_version}) toplevel_meta_data.replace("numberOfMajorCycles", str(number_of_major_cycles)) diff --git a/CEP/Pipeline/recipes/sip/bin/msss_target_pipeline.py b/CEP/Pipeline/recipes/sip/bin/msss_target_pipeline.py index 1d8cee64e7ef17be827fa41597a905bc14477f0c..a26a06d4dc392a94b248cbe8d85bf32be856e6cf 100755 --- a/CEP/Pipeline/recipes/sip/bin/msss_target_pipeline.py +++ b/CEP/Pipeline/recipes/sip/bin/msss_target_pipeline.py @@ -17,6 +17,7 @@ from lofarpipe.support.data_map import DataMap, validate_data_maps from lofarpipe.support.utilities import create_directory from lofar.parameterset import parameterset from lofarpipe.support.loggingdecorators import mail_log_on_exception, duration +from lofarpipe.support.feedback_version import VERSION as feedback_version class msss_target_pipeline(control): @@ -325,7 +326,7 @@ class msss_target_pipeline(control): product_type="Correlated", metadata_file=metadata_file) - self.send_feedback_processing(parameterset()) + self.send_feedback_processing(parameterset({'feedback_version': feedback_version})) self.send_feedback_dataproducts(parameterset(metadata_file)) return 0 diff --git a/CEP/Pipeline/recipes/sip/bin/preprocessing_pipeline.py b/CEP/Pipeline/recipes/sip/bin/preprocessing_pipeline.py index cc4644ea4a808e6e9e6da02b47217f398d57e7ed..679dfb69af3b24d163fb17179b831726bc4fb621 100755 --- a/CEP/Pipeline/recipes/sip/bin/preprocessing_pipeline.py +++ b/CEP/Pipeline/recipes/sip/bin/preprocessing_pipeline.py @@ -15,6 +15,7 @@ from lofarpipe.support.lofarexceptions import PipelineException from lofarpipe.support.utilities import create_directory from lofar.parameterset import parameterset from lofarpipe.support.loggingdecorators import mail_log_on_exception, duration +from lofarpipe.support.feedback_version import VERSION as feedback_version class preprocessing_pipeline(control): """ @@ -210,7 +211,7 @@ class preprocessing_pipeline(control): product_type="Correlated", metadata_file=metadata_file) - self.send_feedback_processing(parameterset()) + self.send_feedback_processing(parameterset({'feedback_version': feedback_version})) self.send_feedback_dataproducts(parameterset(metadata_file)) return 0 diff --git a/CEP/Pipeline/recipes/sip/bin/pulsar_pipeline.py b/CEP/Pipeline/recipes/sip/bin/pulsar_pipeline.py index 7fb03e3d794d69b3c39462a576731af256a926d8..9edef4fddeb5a4e0467d3eb580da9198550761bd 100755 --- a/CEP/Pipeline/recipes/sip/bin/pulsar_pipeline.py +++ b/CEP/Pipeline/recipes/sip/bin/pulsar_pipeline.py @@ -25,6 +25,7 @@ from lofarpipe.support.lofarexceptions import PipelineException from lofarpipe.support.utilities import create_directory from lofar.parameterset import parameterset from lofarpipe.support.loggingdecorators import mail_log_on_exception, duration +from lofarpipe.support.feedback_version import VERSION as feedback_version class pulsar_pipeline(control): @@ -188,7 +189,7 @@ class pulsar_pipeline(control): self.logger.error("Could not read feedback from %s: %s" % (metadata_file,e)) return 1 - self.send_feedback_processing(parameterset()) + #self.send_feedback_processing(parameterset({'feedback_version': feedback_version})) self.send_feedback_dataproducts(metadata) return 0 diff --git a/CEP/Pipeline/recipes/sip/bin/selfcal_imager_pipeline.py b/CEP/Pipeline/recipes/sip/bin/selfcal_imager_pipeline.py index 040e51e7b9291f16593b20398604de04fc53c519..0e3361bd950c006035cfa0458f34334c6ee3c05b 100644 --- a/CEP/Pipeline/recipes/sip/bin/selfcal_imager_pipeline.py +++ b/CEP/Pipeline/recipes/sip/bin/selfcal_imager_pipeline.py @@ -21,6 +21,7 @@ from lofarpipe.support.data_map import DataMap, validate_data_maps,\ MultiDataMap, align_data_maps from lofarpipe.support.utilities import patch_parset, get_parset from lofarpipe.support.loggingdecorators import xml_node, mail_log_on_exception +from lofarpipe.support.feedback_version import VERSION as feedback_version from lofar.parameterset import parameterset @@ -355,7 +356,7 @@ class selfcal_imager_pipeline(control): parset_prefix = full_parset.getString('prefix') + \ full_parset.fullModuleName('DataProducts') - toplevel_meta_data = parameterset() + toplevel_meta_data = parameterset({'feedback_version': feedback_version}) toplevel_meta_data.replace( parset_prefix + ".numberOfMajorCycles", str(number_of_major_cycles)) diff --git a/CEP/Pipeline/recipes/sip/helpers/metadata.py b/CEP/Pipeline/recipes/sip/helpers/metadata.py index 247761de26ea56938e31469d793e1797a7b8226b..c0d31166c105e12e9b26c255ecea82cc73cc0793 100644 --- a/CEP/Pipeline/recipes/sip/helpers/metadata.py +++ b/CEP/Pipeline/recipes/sip/helpers/metadata.py @@ -79,6 +79,65 @@ def to_parset(data, prefix=''): result.replace(fullkey, str(value)) return result +class StorageWriterTypes: + ''' + type definitions for the used storage writer for the dataproducts, + and method to get the type and version from an MS table + ''' + CASA = 'CASA' # any default casa storage manager in MS datasets + LOFAR = 'LOFAR' # the lofar storage manager in MS datasets + DYSCO = 'DYSCO' # the dyscostorage manager in MS datasets + HDF5DEFAULT = 'HDF5DEFAULT' # normal hdf5 + UNKNOWN = 'UNKNOWN' # miscellaneous/unknown + + UNKNOWN_VERSION = 'UNKNOWN' + + @staticmethod + def get_type_and_version(main_table, logger=None): + ''' + tries to determine the used StorageWriterType and version from a MS main table + :param main_table: the main table of a casacore Measurement Set + :param logger: a logging.logger instance + :return: a tuple of (StorageWriterTypes.<constant>, '<version>') + ''' + try: + dminfo = main_table.getdminfo('DATA') + dataManagerType = dminfo.get('TYPE') + + if dataManagerType in ['StandardStMan', 'IncrementalStMan', + 'TiledColumnStMan', 'TiledCellStMan', 'TiledShapeStMan']: + return StorageWriterTypes.CASA, StorageWriterTypes.get_casa_version() + + if dataManagerType == 'LofarStMan': + try: + version = dminfo['SPEC']['version'] + except: + version = StorageWriterTypes.UNKNOWN_VERSION + return StorageWriterTypes.LOFAR, version + + if dataManagerType == 'DyscoStMan': + version = os.environ.get('DYSCO_VERSION', StorageWriterTypes.UNKNOWN_VERSION) + return StorageWriterTypes.DYSCO, StorageWriterTypes.get_dysco_version() + + except Exception as e: + if logger: + logger.error('Could not determine the used storageWriter type and version: %s', e) + + return StorageWriterTypes.UNKNOWN, StorageWriterTypes.UNKNOWN_VERSION + + @staticmethod + def get_casa_version(): + ''' + :return the casa version from the environment, or unknown + ''' + return os.environ.get('CASACORE_VERSION', StorageWriterTypes.UNKNOWN_VERSION) + + @staticmethod + def get_dysco_version(): + ''' + :return the dysco version from the environment, or unknown + ''' + return os.environ.get('DYSCO_VERSION', StorageWriterTypes.UNKNOWN_VERSION) class DataProduct(object): @@ -91,7 +150,10 @@ class DataProduct(object): 'fileFormat' : "", 'filename' : "", 'location' : "", - 'percentageWritten' : 0 + 'percentageWritten' : 0, + # by default the type of storagewriter is unknown and overridden in each subclass if needed + 'storageWriter': StorageWriterTypes.UNKNOWN, + 'storageWriterVersion': StorageWriterTypes.UNKNOWN_VERSION } self.logger = logger @@ -163,16 +225,21 @@ class Correlated(DataProduct): pyrap.tables.taql('calc ctod($startTime s)')[0] .replace('/', '-', 2).replace('/', 'T') ) + + sw_type, sw_version = StorageWriterTypes.get_type_and_version(main, self.logger) + self._data.update({ 'percentageWritten' : 100, + 'storageWriter': sw_type, + 'storageWriterVersion': sw_version, 'startTime' : startTimeString, 'duration' : endTime - startTime, 'integrationInterval' : exposure, 'centralFrequency' : spw.getcell('REF_FREQUENCY', 0), - 'channelWidth' : spw.getcell('RESOLUTION', 0)[0], + 'channelWidth' : spw.getcell('RESOLUTION', [3052])[0], 'channelsPerSubband' : spw.getcell('NUM_CHAN', 0), # Assume subband name has format 'SB-nn' - 'subband' : int(spw.getcell('NAME', 0)[3:]), + 'subband' : int(spw.getcell('NAME', 'SB-064')[3:]), 'stationSubband' : 0 ### NOT CORRECT! ### }) except Exception, error: @@ -202,11 +269,12 @@ class InstrumentModel(DataProduct): Collect instrument model metadata from the Measurement Set `filename`. """ super(InstrumentModel, self).collect(filename) + self._data['storageWriter'] = StorageWriterTypes.CASA + self._data['storageWriterVersion'] = StorageWriterTypes.get_casa_version() if self._data['size'] > 0: self._data['percentageWritten'] = 100 - class SkyImage(DataProduct): """ Class representing the metadata associated with a sky image. @@ -242,6 +310,16 @@ class SkyImage(DataProduct): """ super(SkyImage, self).collect(filename) try: + if filename.endswith('.h5'): + self._data['storageWriter'] = StorageWriterTypes.HDF5DEFAULT + self._data['storageWriterVersion'] = StorageWriterTypes.UNKNOWN_VERSION + elif filename.endswith('.IM'): + self._data['storageWriter'] = StorageWriterTypes.CASA + self._data['storageWriterVersion'] = StorageWriterTypes.get_casa_version() + else: + self._data['storageWriter'] = StorageWriterTypes.UNKNOWN + self._data['storageWriterVersion'] = StorageWriterTypes.UNKNOWN_VERSION + image = pyrap.images.image(filename) coord = image.coordinates() beaminfo = image.imageinfo()['restoringbeam'] diff --git a/CEP/Pipeline/recipes/sip/helpers/test/CMakeLists.txt b/CEP/Pipeline/recipes/sip/helpers/test/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..f566fde1ae2292337318648b14ed1805886363b5 --- /dev/null +++ b/CEP/Pipeline/recipes/sip/helpers/test/CMakeLists.txt @@ -0,0 +1,6 @@ +# $Id$ +include(LofarCTest) + +lofar_add_test(t_metadata) + + diff --git a/CEP/Pipeline/recipes/sip/helpers/test/t_metadata.py b/CEP/Pipeline/recipes/sip/helpers/test/t_metadata.py new file mode 100755 index 0000000000000000000000000000000000000000..c62fee90f82bc9b25376f9abefba7436e41b85cc --- /dev/null +++ b/CEP/Pipeline/recipes/sip/helpers/test/t_metadata.py @@ -0,0 +1,233 @@ +#!/usr/bin/env python + +# Copyright (C) 2017 +# ASTRON (Netherlands Institute for Radio Astronomy) +# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it +# and/or modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be +# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +import unittest +from lofarpipe.recipes.helpers.metadata import * +from numpy import * +import mock + +import logging +logger = logging.getLogger(__name__) + +class AbstractMockTable: + '''a mocked version of the pyrap.tables.table class, + can be used with dependency injection in the tests''' + def __init__(self, *args, **kwargs): + pass + + def getkeyword(self, *args, **kwargs): + return '' + + def getcell(self, name, default=None): + return default + + def nrows(self): + return 0 + + def getdminfo(self, columnname): + return {} + +class LofarMockTable(AbstractMockTable): + def getdminfo(self, columnname): + assert columnname == 'DATA' + # return a real world example of datamanager info from a MS with a LofarStMan + return {'NAME': 'LofarStMan', + 'SEQNR': 0, + 'SPEC': {'alignment': 512, + 'bigEndian': False, + 'maxNrSample': 3056.0, + 'nbaseline': 741, + 'nrBytesPerNrValidSamples': 2, + 'startTime': 5022530520.500695, + 'timeInterval': 1.00139008, + 'useSeqnrFile': True, + 'version': 3}, + 'TYPE': 'LofarStMan'} + +class DyscoMockTable(AbstractMockTable): + def getdminfo(self, columnname): + assert columnname == 'DATA' + # return a real world example of datamanager info from a MS with a DyscoStMan + return {'NAME': 'DyscoData', + 'SEQNR': 3, + 'SPEC': {'dataBitCount': 10, + 'distribution': 'TruncatedGaussian', + 'distributionTruncation': 2.5, + 'normalization': 'AF', + 'studentTNu': 0.0, + 'weightBitCount': 12}, + 'TYPE': 'DyscoStMan'} + +class CasaTiledMockTable(AbstractMockTable): + def getdminfo(self, columnname): + assert columnname == 'DATA' + # return a real world example of datamanager info from a MS with a TiledColumnStMan + return {'NAME': 'TiledFlag', + 'SEQNR': 4, + 'SPEC': {'ActualMaxCacheSize': 0, + 'DEFAULTTILESHAPE': array([4, 4, 65536], dtype=int32), + 'HYPERCUBES': {'*1': {'BucketSize': 131072, + 'CellShape': array([4, 4], dtype=int32), + 'CubeShape': array([4, 4, 5993949], dtype=int32), + 'ID': {}, + 'TileShape': array([4, 4, 65536], dtype=int32)}}, + 'MAXIMUMCACHESIZE': 0, + 'SEQNR': 4}, + 'TYPE': 'TiledColumnStMan'} + +class CasaStandardMockTable(AbstractMockTable): + def getdminfo(self, columnname): + assert columnname == 'DATA' + # return a real world example of datamanager info from a MS with a StandardStMan + return {'NAME': 'SSMVar', + 'SEQNR': 0, + 'SPEC': {'ActualCacheSize': 2, + 'BUCKETSIZE': 32768, + 'IndexLength': 11830, + 'PERSCACHESIZE': 2}, + 'TYPE': 'StandardStMan'} + + +# for some reason, the casa and dysco versions are 'encoded' in the running environment +# define the here and set them for this test in the enviroment +CASA_VERSION = "2.2.0" +DYSCO_VERSION = "1.01" +os.environ['CASACORE_VERSION'] = CASA_VERSION +os.environ['DYSCO_VERSION'] = DYSCO_VERSION + + +class StorageWriterTypesTest(unittest.TestCase): + ''' + Tests the StorageWriterTypes class + ''' + + def test_get_type_and_version_casa_standard(self): + main = CasaStandardMockTable() + sw_type, sw_version = StorageWriterTypes.get_type_and_version(main) + self.assertEqual(StorageWriterTypes.CASA, sw_type) + self.assertEqual(CASA_VERSION, sw_version) + + def test_get_type_and_version_casa_tiled(self): + main = CasaTiledMockTable() + sw_type, sw_version = StorageWriterTypes.get_type_and_version(main) + self.assertEqual(StorageWriterTypes.CASA, sw_type) + self.assertEqual(CASA_VERSION, sw_version) + + def test_get_type_and_version_dysco(self): + main = DyscoMockTable() + sw_type, sw_version = StorageWriterTypes.get_type_and_version(main) + self.assertEqual(StorageWriterTypes.DYSCO, sw_type) + self.assertEqual(DYSCO_VERSION, sw_version) + + def test_get_type_and_version_lofar(self): + main = LofarMockTable() + sw_type, sw_version = StorageWriterTypes.get_type_and_version(main) + self.assertEqual(StorageWriterTypes.LOFAR, sw_type) + self.assertEqual(3, sw_version) + + def test_get_type_and_version_unknown(self): + main = AbstractMockTable() + sw_type, sw_version = StorageWriterTypes.get_type_and_version(main) + self.assertEqual(StorageWriterTypes.UNKNOWN, sw_type) + self.assertEqual(StorageWriterTypes.UNKNOWN_VERSION, sw_version) + + +class MetaDataTest(unittest.TestCase): + ''' + Tests the creation of correct meta data parsets + ''' + + def test_correlated_casa_standard(self): + with mock.patch('pyrap.tables.table', new=CasaStandardMockTable): + dataproduct_metadata = Correlated(logger=None, filename='casa-standard') + metadata_parset = dataproduct_metadata.as_parameterset() + logger.info('casa_standard metadata parset:\n%s', metadata_parset) + self.assertEqual(StorageWriterTypes.CASA, metadata_parset.getString('storageWriter')) + self.assertEqual(CASA_VERSION, metadata_parset.getString('storageWriterVersion')) + + def test_correlated_casa_tiled(self): + with mock.patch('pyrap.tables.table', new=CasaTiledMockTable): + dataproduct_metadata = Correlated(logger=None, filename='casa-tiled') + metadata_parset = dataproduct_metadata.as_parameterset() + logger.info('casa_lofar metadata parset:\n%s', metadata_parset) + self.assertEqual(StorageWriterTypes.CASA, metadata_parset.getString('storageWriter')) + self.assertEqual(CASA_VERSION, metadata_parset.getString('storageWriterVersion')) + + def test_correlated_lofar(self): + with mock.patch('pyrap.tables.table', new=LofarMockTable): + dataproduct_metadata = Correlated(logger=None, filename='lofar') + metadata_parset = dataproduct_metadata.as_parameterset() + logger.info('lofar metadata parset:\n%s', metadata_parset) + self.assertEqual(StorageWriterTypes.LOFAR, metadata_parset.getString('storageWriter')) + self.assertEqual('3', metadata_parset.getString('storageWriterVersion')) + + def test_correlated_dysco(self): + with mock.patch('pyrap.tables.table', new=DyscoMockTable): + dataproduct_metadata = Correlated(logger=None, filename='dysco') + metadata_parset = dataproduct_metadata.as_parameterset() + logger.info('dysco metadata parset:\n%s', metadata_parset) + self.assertEqual(StorageWriterTypes.DYSCO, metadata_parset.getString('storageWriter')) + self.assertEqual(DYSCO_VERSION, metadata_parset.getString('storageWriterVersion')) + + def test_correlated_unknown(self): + with mock.patch('pyrap.tables.table', new=AbstractMockTable): + dataproduct_metadata = Correlated(logger=None, filename='foo.bar') + metadata_parset = dataproduct_metadata.as_parameterset() + logger.info('unknown metadata parset:\n%s', metadata_parset) + self.assertEqual(StorageWriterTypes.UNKNOWN, metadata_parset.getString('storageWriter')) + self.assertEqual(StorageWriterTypes.UNKNOWN_VERSION, metadata_parset.getString('storageWriterVersion')) + + def test_instrument_model(self): + with mock.patch('pyrap.images.image'): + dataproduct_metadata = InstrumentModel(logger=None, filename='foo.INST') + metadata_parset = dataproduct_metadata.as_parameterset() + logger.info('instrument model metadata parset:\n%s', metadata_parset) + self.assertEqual(StorageWriterTypes.CASA, metadata_parset.getString('storageWriter')) + self.assertEqual(CASA_VERSION, metadata_parset.getString('storageWriterVersion')) + + def test_skyimage_h5(self): + with mock.patch('pyrap.images.image'): + dataproduct_metadata = SkyImage(logger=None, filename='foo.h5') + metadata_parset = dataproduct_metadata.as_parameterset() + logger.info('instrument model metadata parset:\n%s', metadata_parset) + self.assertEqual(StorageWriterTypes.HDF5DEFAULT, metadata_parset.getString('storageWriter')) + self.assertEqual(StorageWriterTypes.UNKNOWN_VERSION, metadata_parset.getString('storageWriterVersion')) + + def test_skyimage_casa(self): + with mock.patch('pyrap.images.image'): + dataproduct_metadata = SkyImage(logger=None, filename='foo.IM') + metadata_parset = dataproduct_metadata.as_parameterset() + logger.info('instrument model metadata parset:\n%s', metadata_parset) + self.assertEqual(StorageWriterTypes.CASA, metadata_parset.getString('storageWriter')) + self.assertEqual(CASA_VERSION, metadata_parset.getString('storageWriterVersion')) + + def test_skyimage_other(self): + with mock.patch('pyrap.images.image'): + dataproduct_metadata = SkyImage(logger=None, filename='foo.fits') + metadata_parset = dataproduct_metadata.as_parameterset() + logger.info('instrument model metadata parset:\n%s', metadata_parset) + self.assertEqual(StorageWriterTypes.UNKNOWN, metadata_parset.getString('storageWriter')) + self.assertEqual(StorageWriterTypes.UNKNOWN_VERSION, metadata_parset.getString('storageWriterVersion')) + + +if __name__ == '__main__': + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.DEBUG) + unittest.main() + diff --git a/CEP/Pipeline/recipes/sip/helpers/test/t_metadata.sh b/CEP/Pipeline/recipes/sip/helpers/test/t_metadata.sh new file mode 100755 index 0000000000000000000000000000000000000000..f5b276f5ce3931415e054a13e4ae816f3577573b --- /dev/null +++ b/CEP/Pipeline/recipes/sip/helpers/test/t_metadata.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +./runctest.sh t_metadata diff --git a/CEP/Pipeline/test/test_framework/fixture/lofar/common/__init__.py b/CEP/Pipeline/test/test_framework/fixture/lofar/common/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/CEP/Pipeline/test/test_framework/fixture/lofar/common/defaultmailaddresses.py b/CEP/Pipeline/test/test_framework/fixture/lofar/common/defaultmailaddresses.py new file mode 100644 index 0000000000000000000000000000000000000000..908fb53102299a30106ce346b858c7f2e71ac4e7 --- /dev/null +++ b/CEP/Pipeline/test/test_framework/fixture/lofar/common/defaultmailaddresses.py @@ -0,0 +1,80 @@ +# defaultmailaddresses.py: default mail addresses for the LOFAR software +# +# Copyright (C) 2017 +# ASTRON (Netherlands Institute for Radio Astronomy) +# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it +# and/or modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be +# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +# +# $Id$ +# +""" +This package contains the default mail addresses used in the LOFAR software +""" + +from ConfigParser import ConfigParser +import os +import pwd +from glob import glob + +# obtain the environment, and add USER and HOME if needed (since supervisord does not) +environ = os.environ +user_info = pwd.getpwuid(os.getuid()) +environ.setdefault("HOME", user_info.pw_dir) +environ.setdefault("USER", user_info.pw_name) + + + +def findfiles(pattern): + """ Returns a list of files matched by `pattern'. + The pattern can include environment variables using the + {VAR} notation. + """ + try: + return glob(pattern.format(**environ)) + except KeyError: + return [] + + +# Addresses used for the pipelines +class PipelineEmailConfig(): + """ + Pipeline email configuration class + """ + + def __init__(self, filepatterns = None): + if filepatterns is None: + filepatterns = ["{LOFARROOT}/etc/email/*.ini", + "{HOME}/.lofar/email/*.ini"]# TODO correct the pattern here + self.configfiles = sum([findfiles(p) for p in filepatterns],[]) + if not self.configfiles: + raise Exception("no config file found") + self.config = None + + def load_config(self): + self.config = ConfigParser() + self.config.read(self.configfiles) + + def get(self, key): + if not self.config: + self.load_config() + return self.config.get("Pipeline",key) + + def __getitem__(self, what): + return self.get(what) + + + + diff --git a/CEP/pyparmdb/CMakeLists.txt b/CEP/pyparmdb/CMakeLists.txt index 6722696a7944a997dcc455d8f4bb3fd2fb3ff46d..c051e47709f6b697da435d51f0c4b3654829f50c 100644 --- a/CEP/pyparmdb/CMakeLists.txt +++ b/CEP/pyparmdb/CMakeLists.txt @@ -5,7 +5,7 @@ lofar_package(pyparmdb 1.0 DEPENDS Common ParmDB) include(LofarFindPackage) lofar_find_package(Boost REQUIRED COMPONENTS python) lofar_find_package(Python 2.6 REQUIRED) -lofar_find_package(Pyrap REQUIRED) +lofar_find_package(Casacore REQUIRED COMPONENTS python) add_subdirectory(src) add_subdirectory(test) diff --git a/CEP/pyparmdb/src/__init__.py b/CEP/pyparmdb/src/__init__.py index cd17f86050b5a298ec3b551b0f52b799c5124f62..221af0ae3773383103c875b41690d8f24bd9bf28 100755 --- a/CEP/pyparmdb/src/__init__.py +++ b/CEP/pyparmdb/src/__init__.py @@ -19,7 +19,7 @@ # # $Id$ -from _parmdb import ParmDB +from ._parmdb import ParmDB class parmdb(ParmDB): """ @@ -41,7 +41,7 @@ class parmdb(ParmDB): import lofar.parmdb pdb = parmdb(dbname) # open existing - pdb = 0 # close + pdb = 0 # close Almost all functions work on a local as well as a distributed database. The exception is :func:`addValues`. For the time being it only works @@ -65,7 +65,7 @@ class parmdb(ParmDB): return self._getRange (parmnamepattern) def getNames (self, parmnamepattern='', includeDefaults=False): - """Return the list of matching parameter names with actual values. + """Return the list of matching parameter names with actual values. parmnamepattern Parameter name pattern given as a shell-like filename pattern. @@ -78,7 +78,7 @@ class parmdb(ParmDB): return self._getNames (parmnamepattern) def getDefNames (self, parmnamepattern=''): - """Return the list of matching parameter names with default values. + """Return the list of matching parameter names with default values. The pattern must be given as a shell-like filename pattern. An empty pattern (the default) means '*' (thus all names). @@ -87,7 +87,7 @@ class parmdb(ParmDB): return self._getDefNames (parmnamepattern) def getDefValues (self, parmnamepattern=''): - """Return the default values of matching parameter names as a dict. + """Return the default values of matching parameter names as a dict. The pattern must be given as a shell-like filename pattern. An empty pattern (the default) means '*' (thus all names). diff --git a/CEP/pyparmdb/test/CMakeLists.txt b/CEP/pyparmdb/test/CMakeLists.txt index 81a127052e63f175dc75807b90707a4deae4980d..0fd8973b28719308ba20e39a1e55dcfd13df3487 100644 --- a/CEP/pyparmdb/test/CMakeLists.txt +++ b/CEP/pyparmdb/test/CMakeLists.txt @@ -1,6 +1,7 @@ # $Id$ include(LofarCTest) +include(FindPythonModule) # We need to create a symlink to the parmdbm executable in the current # binary directory, so that the test program(s) can find it. @@ -8,4 +9,9 @@ lofar_create_target_symlink( parmdbm ${CMAKE_CURRENT_BINARY_DIR}/parmdbm) -lofar_add_test(tpyparmdb DEPENDS parmdbm _parmdb) +find_python_module(numpy) +if(PYTHON_NUMPY_FOUND) + lofar_add_test(tpyparmdb DEPENDS parmdbm _parmdb) +else(PYTHON_NUMPY_FOUND) + message(WARNING "Numpy not found, disabling tpyparmdb test") +endif(PYTHON_NUMPY_FOUND) diff --git a/CEP/pyparmdb/test/tpyparmdb.py b/CEP/pyparmdb/test/tpyparmdb.py index 3728970b4eca92d62189b0478acaaaf668278859..52abe973e70a6d9dd325e7f0998d1b7d35290fab 100644 --- a/CEP/pyparmdb/test/tpyparmdb.py +++ b/CEP/pyparmdb/test/tpyparmdb.py @@ -1,28 +1,48 @@ +from __future__ import print_function + from lofar.parmdb import * +import os +import sys + + +def createTestFile(): + """Create a test parmdb using parmdbm""" + return os.system(""" +parmdbm <<EOF > tpyparmdb_tmp.pdbout + create tablename='tpyparmdb_tmp.pdb' + add parm1 domain=[1,5,4,10],values=2 + add parm2 type='polc', domain=[1,5,4,10], values=[2,0.1], nx=2 + adddef parmdef values=[3,1], nx=2 + quit +EOF""") + +### NOTE: parmdbm always returns exit code 0, so we cannot test if it worked +if createTestFile() != 0: + raise RuntimeError("Could not create parmdb for tpyparmdb") def showValues (pdb, pattern='*', nf=4, nt=2): # Get the names. - print pdb.getNames() + print(pdb.getNames()) # Get the range. rng = pdb.getRange() - print rng + print(rng) # Get the values. - print pdb.getValuesStep(pattern, rng[0], rng[2], nf, rng[1], rng[3], nt, True) + print(pdb.getValuesStep(pattern, rng[0], rng[2], nf, rng[1], rng[3], nt, True)) # Get values and grid. - print pdb.getValuesGrid(pattern, rng[0], rng[2], rng[1], rng[3]) + print(pdb.getValuesGrid(pattern, rng[0], rng[2], rng[1], rng[3])) # Print default names and values. - print pdb.getDefNames(pattern); - print pdb.getDefValues(pattern) + print(pdb.getDefNames(pattern)) + print(pdb.getDefValues(pattern)) # The test is the same as in tParmFacade.cc. # Open the parameterset (created in .run file). pdb=parmdb("tpyparmdb_tmp.pdb") -print ">>>" -print pdb.version("tree") -print pdb.version("full") -print pdb.version("top") -print pdb.version() -print "<<<" +print(">>>") +print(pdb.version("tree")) +print(pdb.version("full")) +print(pdb.version("top")) +print(pdb.version()) +print("<<<") showValues (pdb); showValues (pdb, '', 1); showValues (pdb, 'parm1', 1); diff --git a/CEP/pyparmdb/test/tpyparmdb.run b/CEP/pyparmdb/test/tpyparmdb.run deleted file mode 100755 index c64c4ee3360f90494e4a895ecb280cb4b03560c5..0000000000000000000000000000000000000000 --- a/CEP/pyparmdb/test/tpyparmdb.run +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh - -parmdbm <<EOF > tpyparmdb_tmp.pdbout - create tablename='tpyparmdb_tmp.pdb' - add parm1 domain=[1,5,4,10],values=2 - add parm2 type='polc', domain=[1,5,4,10], values=[2,0.1], nx=2 - adddef parmdef values=[3,1], nx=2 - quit -EOF -if [ $? != 0 ]; then - cat tpyparmdb_tmp.pdbout - exit 1 -fi - -python tpyparmdb.py diff --git a/CMake/FindBoost.cmake b/CMake/FindBoost.cmake index 61dad5ba2359cacbf2974d3e93700dce9b4e6eb3..24be3e7216b8835921ae2ba4c5cac6d29ef07595 100644 --- a/CMake/FindBoost.cmake +++ b/CMake/FindBoost.cmake @@ -58,6 +58,28 @@ foreach(_comp ${Boost_FIND_COMPONENTS}) endif(DEFINED USE_BOOST_${_COMP} AND NOT USE_BOOST_${_COMP}) endforeach(_comp ${Boost_FIND_COMPONENTS}) +# For python3, append the python component with a suffix +if("${Boost_FIND_COMPONENTS}" MATCHES "python") + find_package(Python) + if(PYTHON_FOUND) + if(PYTHON_VERSION_MAJOR GREATER 2) + # TODO: add support for CentOS7 here (name should be python3 there) + if(APPLE) + # On apple (homebrew), boost-python for python 3 is called boost-python3 + string(REPLACE "python" "python3" + Boost_FIND_COMPONENTS "${Boost_FIND_COMPONENTS}") + else(APPLE) + # On ubuntu, boost-python for python 3 is called e.g. boost-python-py35 + string(REPLACE "python" + "python-py${PYTHON_VERSION_MAJOR}${PYTHON_VERSION_MINOR}" + Boost_FIND_COMPONENTS "${Boost_FIND_COMPONENTS}") + endif(APPLE) + endif(PYTHON_VERSION_MAJOR GREATER 2) + else(PYTHON_FOUND) + message(SEND_ERROR "boost-python was requested but python was not found.") + endif(PYTHON_FOUND) +endif("${Boost_FIND_COMPONENTS}" MATCHES "python") + # Call the "real" FindBoost module. include(${CMAKE_ROOT}/Modules/FindBoost.cmake) diff --git a/CMake/FindCasacore.cmake b/CMake/FindCasacore.cmake index cf824314f79980ec1a1a5b35f41b6a62f8ec9eec..8652068af5580933cb4cc40899b3b54f68db304b 100644 --- a/CMake/FindCasacore.cmake +++ b/CMake/FindCasacore.cmake @@ -5,6 +5,9 @@ # casa, coordinates, derivedmscal, fits, images, lattices, meas, # measures, mirlib, ms, msfits, python, scimath, scimath_f, tables # +# The component python will be replaced by python3 if the version of the +# python interpreter found is >= 3. +# # Note that most components are dependent on other (more basic) components. # In that case, it suffices to specify the "top-level" components; dependent # components will be searched for automatically. @@ -154,6 +157,7 @@ set(Casacore_components ms msfits python + python3 scimath scimath_f tables @@ -172,6 +176,7 @@ set(Casacore_mirlib_DEPENDENCIES) set(Casacore_ms_DEPENDENCIES measures scimath tables casa) set(Casacore_msfits_DEPENDENCIES ms fits measures tables casa) set(Casacore_python_DEPENDENCIES casa) +set(Casacore_python3_DEPENDENCIES casa) set(Casacore_scimath_DEPENDENCIES scimath_f casa) set(Casacore_scimath_f_DEPENDENCIES) set(Casacore_tables_DEPENDENCIES casa) @@ -220,6 +225,18 @@ else(NOT CASACORE_INCLUDE_DIR) # Get a list of all dependent Casacore libraries that need to be found. casacore_resolve_dependencies(_find_components ${Casacore_FIND_COMPONENTS}) + # For python3, change the python component to python3 + if("${_find_components}" MATCHES "python") + find_package(Python) + if(PYTHON_FOUND) + if(PYTHON_VERSION_MAJOR GREATER 2) + string(REPLACE "python" "python3" _find_components "${_find_components}") + endif(PYTHON_VERSION_MAJOR GREATER 2) + else(PYTHON_FOUND) + message(SEND_ERROR "casacore-python was requested but python was not found.") + endif(PYTHON_FOUND) + endif("${_find_components}" MATCHES "python") + # Find the library for each component, and handle external dependencies foreach(_comp ${_find_components}) casacore_find_library(casa_${_comp}) diff --git a/CMake/FindMPI.cmake b/CMake/FindMPI.cmake index f2d9798110a009d7fde41da1cf7a49a4d1831804..b19a537d5f627c8ba58d61cc2339feae15d89199 100644 --- a/CMake/FindMPI.cmake +++ b/CMake/FindMPI.cmake @@ -253,6 +253,7 @@ else (MPI_COMPILE_CMDLINE) /usr/local/include /usr/include /usr/include/mpi + /usr/include/openmpi-x86_64 /usr/local/mpi/include "C:/Program Files/MPICH/SDK/Include" "$ENV{SystemDrive}/Program Files/MPICH2/include" @@ -268,7 +269,7 @@ else (MPI_COMPILE_CMDLINE) find_library(MPI_LIBRARY NAMES mpi mpich msmpi - PATHS /usr/lib /usr/local/lib /usr/local/mpi/lib + PATHS /usr/lib /usr/local/lib /usr/local/mpi/lib /usr/lib64/openmpi/lib/ "C:/Program Files/MPICH/SDK/Lib" "$ENV{SystemDrive}/Program Files/MPICH/SDK/Lib" "$ENV{SystemDrive}/Program Files/Microsoft Compute Cluster Pack/Lib/${MS_MPI_ARCH_DIR}" @@ -280,7 +281,7 @@ else (MPI_COMPILE_CMDLINE) find_library(MPI_EXTRA_LIBRARY NAMES mpi++ - PATHS /usr/lib /usr/local/lib /usr/local/mpi/lib + PATHS /usr/lib /usr/local/lib /usr/local/mpi/lib /usr/lib64/openmpi/lib/ "C:/Program Files/MPICH/SDK/Lib" DOC "Extra MPI libraries to link against.") diff --git a/CMake/FindPython.cmake b/CMake/FindPython.cmake index b6e729e27812f30e4ca0515aa12c3c43cd6fc4ad..ae5d0eecdc31d3a666b2bb2f026708c2eb8268f0 100644 --- a/CMake/FindPython.cmake +++ b/CMake/FindPython.cmake @@ -59,4 +59,3 @@ set(PYTHON_FOUND FALSE) if(PYTHONINTERP_FOUND AND PYTHONLIBS_FOUND) set(PYTHON_FOUND TRUE) endif(PYTHONINTERP_FOUND AND PYTHONLIBS_FOUND) - diff --git a/CMake/FindPythonModule.cmake b/CMake/FindPythonModule.cmake index bd54e5df539a20c530baf4d325899f1ce052fc37..190b85f210d94cdfe71a71188a13f5af5336ff0a 100644 --- a/CMake/FindPythonModule.cmake +++ b/CMake/FindPythonModule.cmake @@ -42,7 +42,7 @@ find_package(PythonInterp) # ----------------------------------------------------------------------------- include(CMakeParseArguments) -function(find_python_module _module) +macro(find_python_module _module) # Name of module in uppercase. string(TOUPPER "${_module}" _MODULE) @@ -75,7 +75,7 @@ function(find_python_module _module) # Try to import the python module we need to find, and get its file path. if(PYTHON_EXECUTABLE) set(ENV{PYTHONPATH} ${PYTHON_${_MODULE}_FIND_HINTS}:$ENV{PYTHONPATH}) - set(_cmd "import ${_module}; print ${_module}.__file__") + set(_cmd "from __future__ import print_function; import ${_module}; print(${_module}.__file__)") execute_process( COMMAND "${PYTHON_EXECUTABLE}" "-c" "${_cmd}" RESULT_VARIABLE _result @@ -103,4 +103,4 @@ function(find_python_module _module) endif(is_required GREATER -1) endif(NOT PYTHON_${_MODULE}_FOUND) -endfunction() +endmacro() diff --git a/CMake/FindWINCC.cmake b/CMake/FindWINCC.cmake index 52da9ffd9cf117cd899183a871202bd5456de94e..801479e8d23c1af82be232ac8cf8cd15867f6d04 100644 --- a/CMake/FindWINCC.cmake +++ b/CMake/FindWINCC.cmake @@ -64,12 +64,12 @@ set(wincc_include_dirs # Define WINCC libraries. set(wincc_libraries + ComDrv Manager Messages Datapoint Basics - bcm - ComDrv) + bcm) if(NOT WINCC_FOUND) diff --git a/CMake/LofarPackageList.cmake b/CMake/LofarPackageList.cmake index 4759cde975e6ed4a81aa289489aac310d66c19cc..ef02966164e359c9a5e26dae903fcc1045e50a7e 100644 --- a/CMake/LofarPackageList.cmake +++ b/CMake/LofarPackageList.cmake @@ -1,7 +1,7 @@ # - Create for each LOFAR package a variable containing the absolute path to # its source directory. # -# Generated by gen_LofarPackageList_cmake.sh at Do 15. Mär 14:31:17 CET 2018 +# Generated by gen_LofarPackageList_cmake.sh at wo 18 jul 2018 15:04:44 CEST # # ---- DO NOT EDIT ---- # @@ -36,6 +36,7 @@ if(NOT DEFINED LOFAR_PACKAGE_LIST_INCLUDED) set(TestDynDPPP_SOURCE_DIR ${CMAKE_SOURCE_DIR}/CEP/DP3/TestDynDPPP) set(PythonDPPP_SOURCE_DIR ${CMAKE_SOURCE_DIR}/CEP/DP3/PythonDPPP) set(DPPP_AOFlag_SOURCE_DIR ${CMAKE_SOURCE_DIR}/CEP/DP3/DPPP_AOFlag) + set(DPPP_Interpolate_SOURCE_DIR ${CMAKE_SOURCE_DIR}/CEP/DP3/DPPP_Interpolate) set(SPW_Combine_SOURCE_DIR ${CMAKE_SOURCE_DIR}/CEP/DP3/SPWCombine) set(DPPP_DDECal_SOURCE_DIR ${CMAKE_SOURCE_DIR}/CEP/DP3/DPPP_DDECal) set(LofarFT_SOURCE_DIR ${CMAKE_SOURCE_DIR}/CEP/Imager/LofarFT) @@ -55,6 +56,7 @@ if(NOT DEFINED LOFAR_PACKAGE_LIST_INCLUDED) set(MAC_SOURCE_DIR ${CMAKE_SOURCE_DIR}/MAC) set(LCU_SOURCE_DIR ${CMAKE_SOURCE_DIR}/LCU) set(LTA_SOURCE_DIR ${CMAKE_SOURCE_DIR}/LTA) + set(QA_SOURCE_DIR ${CMAKE_SOURCE_DIR}/QA) set(SubSystems_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SubSystems) set(ALC_SOURCE_DIR ${CMAKE_SOURCE_DIR}/LCS/ACC/ALC) set(PLC_SOURCE_DIR ${CMAKE_SOURCE_DIR}/LCS/ACC/PLC) @@ -91,6 +93,7 @@ if(NOT DEFINED LOFAR_PACKAGE_LIST_INCLUDED) set(PPSTune_SOURCE_DIR ${CMAKE_SOURCE_DIR}/LCU/PPSTune) set(Firmware-Tools_SOURCE_DIR ${CMAKE_SOURCE_DIR}/LCU/Firmware/tools) set(MACTools_SOURCE_DIR ${CMAKE_SOURCE_DIR}/LCU/StationTest/MACTools) + set(LTACommon_SOURCE_DIR ${CMAKE_SOURCE_DIR}/LTA/LTACommon) set(LTAIngest_SOURCE_DIR ${CMAKE_SOURCE_DIR}/LTA/LTAIngest) set(ltastorageoverview_SOURCE_DIR ${CMAKE_SOURCE_DIR}/LTA/ltastorageoverview) set(sip_SOURCE_DIR ${CMAKE_SOURCE_DIR}/LTA/sip) @@ -147,6 +150,8 @@ if(NOT DEFINED LOFAR_PACKAGE_LIST_INCLUDED) set(TaskManagementClient_SOURCE_DIR ${CMAKE_SOURCE_DIR}/MAC/Services/TaskManagement/Client) set(TaskManagementCommon_SOURCE_DIR ${CMAKE_SOURCE_DIR}/MAC/Services/TaskManagement/Common) set(TaskManagementServer_SOURCE_DIR ${CMAKE_SOURCE_DIR}/MAC/Services/TaskManagement/Server) + set(QA_Common_SOURCE_DIR ${CMAKE_SOURCE_DIR}/QA/QA_Common) + set(QA_Service_SOURCE_DIR ${CMAKE_SOURCE_DIR}/QA/QA_Service) set(Cobalt_SOURCE_DIR ${CMAKE_SOURCE_DIR}/RTCP/Cobalt) set(InputProc_SOURCE_DIR ${CMAKE_SOURCE_DIR}/RTCP/Cobalt/InputProc) set(OutputProc_SOURCE_DIR ${CMAKE_SOURCE_DIR}/RTCP/Cobalt/OutputProc) diff --git a/CMake/testscripts/assay b/CMake/testscripts/assay index a2ceb3ee0915791726bcdccf2fac88bbb88d734c..2430132a5012e0941456c2766b6e88cfcddd7e34 100755 --- a/CMake/testscripts/assay +++ b/CMake/testscripts/assay @@ -195,8 +195,9 @@ exit $STATUS' 0 1 2 3 15 # If there is a .py file then use it. +# Force unbuffered output to give the same behavior in python3 and python2 if [ -f "$PROG.py" ]; then - COMMAND="python $PROG.py" + COMMAND="${PYTHON_EXECUTABLE:-python} -u $PROG.py" LOFAR_CHECKTOOL= fi diff --git a/CMake/testscripts/runctest.sh.in b/CMake/testscripts/runctest.sh.in index 052b94c0dc33f15c52896d02a8191e717d41e7dc..a99cfd4f5db46d667724580049dadc18d6f6fb3e 100755 --- a/CMake/testscripts/runctest.sh.in +++ b/CMake/testscripts/runctest.sh.in @@ -42,6 +42,9 @@ export LD_LIBRARY_PATH # Add the Python build directory to PYTHONPATH. PYTHONPATH="@srcdir@:@PYTHON_BUILD_DIR@:${PYTHONPATH}"; export PYTHONPATH +# Set the Python interpreter to the one found by CMake +PYTHON_EXECUTABLE="@PYTHON_EXECUTABLE@"; export PYTHON_EXECUTABLE + # Start CMake/testscripts/runtest.sh "@LOFAR_ROOT@/CMake/testscripts/runtest.sh" "$@" diff --git a/CMake/variants/variants.lcs157 b/CMake/variants/variants.lcs157 index d432f4622c08fc430f0a2841703bcba174222159..148f76b8073418c33117450ccd70133fe4836213 100644 --- a/CMake/variants/variants.lcs157 +++ b/CMake/variants/variants.lcs157 @@ -4,5 +4,10 @@ set(WINCC_ROOT_DIR /opt/WinCC_OA/3.14) set(CASACORE_ROOT_DIR "/opt/casacore") +set(CASAREST_ROOT_DIR "/opt/casarest") +set(PYRAP_ROOT_DIR "/opt/pyrap") +set(AOFLAGGER_ROOT_DIR "/opt/aoflagger/build") +set(BDSF_ROOT_DIR "/opt/PyBDSF/lib64/python2.7/site-packages/") +set(DAL_ROOT_DIR "/opt/DAL") #set(QPID_ROOT_DIR /opt/qpid) diff --git a/CMake/variants/variants.lofar-documentation b/CMake/variants/variants.lofar-documentation index 3af5476efa7720397f39ad016ed554a791061802..b8bd7aca1f49d532f6e85cfcd13710314abe41a0 100644 --- a/CMake/variants/variants.lofar-documentation +++ b/CMake/variants/variants.lofar-documentation @@ -8,10 +8,13 @@ set(WINCC_ROOT_DIR /opt/WinCC_OA/3.14) set(CASACORE_ROOT_DIR /opt/casacore) set(CASAREST_ROOT_DIR /opt/casarest) set(CASA_ROOT_DIR /opt/casarest/casarest-1.4.2) -set(AOFLAGGER_LIBRARY /opt/aoflagger-2.9.0/build/lib/libaoflagger.so) -set(AOFLAGGER_INCLUDE_DIR /opt/aoflagger-2.9.0/build/include) +set(AOFLAGGER_ROOT_DIR "/opt/aoflagger-2.9.0/build") set(DAL_LIBRARY /opt/dal/DAL-master/build/lib/liblofardal.so) set(DAL_INCLUDE_DIR /opt/dal/DAL-master/build/include) set(CUDA_SDK_ROOT_DIR /usr/local/cuda-8.0) set(CUDA_TOOLKIT_ROOT_DIR /usr/local/cuda) set(MPI_ROOT_DIR /usr/lib64/openmpi) +set(BDSF_ROOT_DIR "/opt/pybdsf-1.8.9/PyBDSF-1.8.9/lib64/python2.7/site-packages/") + +#set(AOFLAGGER_LIBRARY /opt/aoflagger-2.9.0/build/lib/libaoflagger.so) +#set(AOFLAGGER_INCLUDE_DIR /opt/aoflagger-2.9.0/build/include) diff --git a/CMakeLists.txt b/CMakeLists.txt index ea914605bb0e6cfd80c3f8dcb4117ad3c8697aa1..b1cf98b456f67b28a88bb5432fc1dc90b0f38943 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -26,6 +26,7 @@ if(NOT DEFINED BUILD_PACKAGES) lofar_add_package(MAC) lofar_add_package(LCU) lofar_add_package(LTA) + lofar_add_package(QA) lofar_add_package(SubSystems) else(NOT DEFINED BUILD_PACKAGES) separate_arguments(BUILD_PACKAGES) diff --git a/Docker/lofar-pipeline/Dockerfile.tmpl b/Docker/lofar-pipeline/Dockerfile.tmpl index 1ae4c72764b326465b629c86b2895bd0ae0c237b..ca558efa87ff6c67366dc599f70debef18c923c7 100644 --- a/Docker/lofar-pipeline/Dockerfile.tmpl +++ b/Docker/lofar-pipeline/Dockerfile.tmpl @@ -47,6 +47,32 @@ RUN apt-get update && apt-get install -y wget cmake g++ libxml++2.6-dev libpng12 apt-get -y purge wget cmake g++ libxml++2.6-dev libpng12-dev libfftw3-dev libboost-filesystem${BOOST_VERSION}-dev libboost-date-time${BOOST_VERSION}-dev libboost-signals${BOOST_VERSION}-dev libboost-thread${BOOST_VERSION}-dev libcfitsio3-dev libopenblas-dev && \ apt-get -y autoremove --purge + +# +# ******************* +# DYSCO +# ******************* +# + +ENV DYSCO_VERSION=1.0.1 + +RUN apt-get update && apt-get install -y git cmake g++ doxygen libboost-all-dev libgsl0-dev libhdf5-dev && \ + mkdir ${INSTALLDIR}/dysco && \ + cd ${INSTALLDIR}/dysco && git clone https://github.com/aroffringa/dysco && \ + cd ${INSTALLDIR}/dysco/dysco && git checkout tags/v${DYSCO_VERSION} && \ + mkdir build && \ + cd build && \ + cmake -DCMAKE_INSTALL_PREFIX=${INSTALLDIR}/dysco/ -DCASACORE_ROOT_DIR=${INSTALLDIR}/casacore/ ../ && \ + make -j 4 && \ + make install && \ + #mkdir -p ${INSTALLDIR}/dysco/lib/python${PYTHON_VERSION}/site-packages/ && \ + #export PYTHONPATH=${INSTALLDIR}/dysco/lib/python${PYTHON_VERSION}/site-packages:${INSTALLDIR}/dysco/lib64/python${PYTHON_VERSION}/site-packages:$PYTHONPATH && \ + apt-get -y purge git cmake g++ doxygen libboost-all-dev libgsl0-dev libhdf5-dev + + ENV LD_LIBRARY_PATH=${INSTALLDIR}/dysco/lib:${LD_LIBRARY_PATH} + ENV PATH=${INSTALLDIR}/dysco/bin:${PATH} + + # # ******************* # LOFAR diff --git a/LCS/Common/src/FileLocator.cc b/LCS/Common/src/FileLocator.cc index 304a873e9d5d6ba7c90422fa126c09c65cd21cab..22342b26a195b0139ba5b78552dbe66a0da8f2b3 100644 --- a/LCS/Common/src/FileLocator.cc +++ b/LCS/Common/src/FileLocator.cc @@ -1,3 +1,4 @@ +/* vim: set tabstop=4:softtabstop=4:shiftwidth=4:noexpandtab */ //# FileLocator.cc: Tries to locate a file in an earlier defined path. //# //# Copyright (C) 2006 @@ -75,7 +76,7 @@ FileLocator::~FileLocator() // // addPathAtBack(aPath): bool // -// Adds the given pah(chain) at the back of the search path +// Adds the given path(chain) at the back of the search path // void FileLocator::addPathAtBack (const string& aPath) { @@ -229,24 +230,17 @@ string FileLocator::locate (const string& aFile) } // Otherwise, search the path chain else { - iterator iter = itsPaths.begin(); - iterator chainEnd = itsPaths.end(); + iterator iter = itsPaths.begin(); + iterator chainEnd = itsPaths.end(); while (iter != chainEnd) { - // when itsSubdir is filled each test much be performed also with subdir - for (int test = 0; test <= (itsSubdir.empty() ? 0 : 1); test++) { - string fullname; - fullname = *iter + (*iter != "/" ? "/" : ""); - if (test == 0) { // basedir? - fullname += aFile; - } - else { // test subdir - fullname += itsSubdir + "/" + aFile; - } - result = stat(fullname.c_str(), &fileStat); - if (result == 0) { // found? - return (fullname); - } - } + // When itsSubdir is filled test subdir and basedir, in that order + string basedir = *iter + (*iter != "/" ? "/" : ""); + string file = basedir + (itsSubdir.empty() ? aFile : itsSubdir + "/" + aFile); + if (stat(file.c_str(), &fileStat) == 0) + return (file); + file = basedir + aFile; + if (stat(file.c_str(), &fileStat) == 0) + return (file); ++iter; } // not found, return empty string. diff --git a/LCS/Common/test/tFileLocator.cc b/LCS/Common/test/tFileLocator.cc index ee4c5f476c56515d1326deb40346694bcf38b013..69e8e6224c45d46515b6cb4fe97dac5fc55d773f 100644 --- a/LCS/Common/test/tFileLocator.cc +++ b/LCS/Common/test/tFileLocator.cc @@ -78,6 +78,12 @@ int main (int, char *argv[]) { LOG_INFO_STR ("registered path = " << Locator1.getPath()); CHECK(Locator1.getPath() == "/usr"); + Locator1.setSubdir("bin"); + LOG_INFO ("Searching file 'wc' in subdir (bin)"); + LOG_INFO_STR ("fullname = " << Locator1.locate("wc")); + CHECK(Locator1.locate("wc") == "/usr/bin/wc"); + Locator1.setSubdir(""); + LOG_INFO ("Adding '/usr/bin:./' at end of chain"); Locator1.addPathAtBack("/usr/bin:./"); LOG_INFO_STR ("registered path = " << Locator1.getPath()); diff --git a/LCS/Messaging/python/messaging/RPC.py b/LCS/Messaging/python/messaging/RPC.py index ddacc0e6e2511988c99a4808980de827ab969e65..33819ec8d03082b7607dc96dd757f06792d763de 100644 --- a/LCS/Messaging/python/messaging/RPC.py +++ b/LCS/Messaging/python/messaging/RPC.py @@ -306,7 +306,7 @@ class RPCWrapper(object): def close(self): '''Close all opened rpc connections''' for rpc in self._serviceRPCs.values(): - logger.info('closing rpc connection %s at %s', rpc.Request.address, rpc.broker) + logger.debug('closing rpc connection %s at %s', rpc.Request.address, rpc.broker) rpc.close() def __enter__(self): @@ -334,7 +334,7 @@ class RPCWrapper(object): # not in cache # so, create RPC for this service method, open it, and cache it rpc = RPC(service_method, busname=self.busname, broker=self.broker, ForwardExceptions=True, **rpckwargs) - logger.info('opening rpc connection %s at %s', rpc.Request.address, rpc.broker) + logger.debug('opening rpc connection %s at %s', rpc.Request.address, rpc.broker) rpc.open() self._serviceRPCs[service_method] = rpc diff --git a/LCS/Messaging/python/messaging/test/CMakeLists.txt b/LCS/Messaging/python/messaging/test/CMakeLists.txt index 361727be5357441a3abe5f8bf3a66c8853f99871..a4f5902ced4f273bff6e3e566e6e8dc6298ca1ff 100644 --- a/LCS/Messaging/python/messaging/test/CMakeLists.txt +++ b/LCS/Messaging/python/messaging/test/CMakeLists.txt @@ -8,12 +8,13 @@ set(_qpid_tests t_RPC t_service_message_handler) -if(HAVE_QPID) +execute_process(COMMAND qpid-config RESULT_VARIABLE QPID_CONFIG_RESULT OUTPUT_QUIET ERROR_QUIET) + +if(${QPID_CONFIG_RESULT} EQUAL 0) foreach(_test ${_qpid_tests}) lofar_add_test(${_test}) endforeach() else() lofar_join_arguments(_qpid_tests) - message(WARNING "Qpid is not set." - "The following tests will not be run: ${_qpid_tests}") + message(WARNING "No running qpid daemon found. The following tests will not be run: ${_qpid_tests}") endif() diff --git a/LCS/PyCommon/CMakeLists.txt b/LCS/PyCommon/CMakeLists.txt index 5d6cdb28631646683d940fdf23bfa56ec986dedf..8a8e55d488fe805f469f702e948e6b19038074e9 100644 --- a/LCS/PyCommon/CMakeLists.txt +++ b/LCS/PyCommon/CMakeLists.txt @@ -10,6 +10,7 @@ find_python_module(psycopg2) set(_py_files __init__.py + cep4_utils.py cache.py dbcredentials.py defaultmailaddresses.py diff --git a/LCS/PyCommon/cep4_utils.py b/LCS/PyCommon/cep4_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..7588f79524bacec2668059066affc3cd9745f7c5 --- /dev/null +++ b/LCS/PyCommon/cep4_utils.py @@ -0,0 +1,317 @@ +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +from subprocess import check_output, Popen, PIPE +from random import randint +import math +import os +from time import sleep +from datetime import datetime, timedelta + +import logging +logger = logging.getLogger(__name__) + + +def ssh_cmd_list(host): + ''' + returns a subprocess compliant command list to do an ssh call to the given node + uses ssh option -tt to force remote pseudo terminal + uses ssh option -q for ssh quiet mode (no ssh warnings/errors) + uses ssh option -o StrictHostKeyChecking=no to prevent prompts about host keys + :param host: the node name or ip address + :return: a subprocess compliant command list + ''' + return ['ssh', '-T', '-q', '-o StrictHostKeyChecking=no', host] + + +def wrap_command_in_cep4_head_node_ssh_call(cmd): + '''wrap the command in an ssh call to head.cep4 + :param list cmd: a subprocess cmd list + cpu node. Otherwise, the command is executed on the head node. + :return: the same subprocess cmd list, but then wrapped with cep4 ssh calls + ''' + return ssh_cmd_list('lofarsys@head.cep4.control.lofar') + cmd + +def wrap_command_in_cep4_random_cpu_node_ssh_call(cmd, via_head=True): + '''wrap the command in an ssh call an available random cep4 cpu node (via head.cep4) + :param list cmd: a subprocess cmd list + :param bool via_head: when True, route the cmd first via the cep4 head node + :return: the same subprocess cmd list, but then wrapped with cep4 ssh calls + ''' + # pick a random available cpu node + node_nrs = get_cep4_available_cpu_nodes() + node_nr = node_nrs[randint(0, len(node_nrs)-1)] + return wrap_command_in_cep4_cpu_node_ssh_call(cmd, node_nr, via_head=via_head) + +def wrap_command_in_cep4_available_cpu_node_with_lowest_load_ssh_call(cmd, via_head=True): + '''wrap the command in an ssh call to the available random cep4 cpu node with the lowest load (via head.cep4) + :param list cmd: a subprocess cmd list + :param bool via_head: when True, route the cmd first via the cep4 head node + :return: the same subprocess cmd list, but then wrapped with cep4 ssh calls + ''' + lowest_load_node_nr = get_cep4_available_cpu_node_with_lowest_load() + return wrap_command_in_cep4_cpu_node_ssh_call(cmd, lowest_load_node_nr, via_head=via_head) + +def wrap_command_in_cep4_cpu_node_ssh_call(cmd, cpu_node_nr, via_head=True): + '''wrap the command in an ssh call the given cep4 cpu node (via head.cep4) + :param list cmd: a subprocess cmd list + :param int cpu_node_nr: the number of the cpu node where to execute the command + :param bool via_head: when True, route the cmd first via the cep4 head node + :return: the same subprocess cmd list, but then wrapped with cep4 ssh calls + ''' + remote_cmd = ssh_cmd_list('lofarsys@cpu%02d.cep4' % cpu_node_nr) + cmd + if via_head: + return wrap_command_in_cep4_head_node_ssh_call(remote_cmd) + else: + return remote_cmd + +def wrap_command_for_docker(cmd, image_name, image_label='', mount_dirs=['/data']): + '''wrap the command to be run in a docker container for the lofarsys user and environment + :param list cmd: a subprocess cmd list + :param string image_name: the name of the docker image to run + :param string image_label: the optional label of the docker image to run + :return: the same subprocess cmd list, but then wrapped with docker calls + ''' + #fetch the lofarsys user id and group id first from the cep4 head node + id_string = '%s:%s' % (check_output(wrap_command_in_cep4_head_node_ssh_call(['id', '-u'])).strip(), + check_output(wrap_command_in_cep4_head_node_ssh_call(['id', '-g'])).strip()) + + #return the docker run command for the lofarsys user and environment + dockerized_cmd = ['docker', 'run', '--rm', '--net=host'] + for d in mount_dirs: + dockerized_cmd += ['-v', '%s:%s' % (d,d)] + + dockerized_cmd += ['-u', id_string, + '-v', '/etc/passwd:/etc/passwd:ro', + '-v', '/etc/group:/etc/group:ro', + '-v', '$HOME:$HOME', + '-e', 'HOME=$HOME', + '-e', 'USER=$USER', + '-w', '$HOME', + '%s:%s' % (image_name, image_label) if image_label else image_name] + dockerized_cmd += cmd + return dockerized_cmd + +def get_cep4_available_cpu_nodes(): + ''' + get a list of cep4 cpu nodes which are currently up and running according to slurm + :return: a list of cpu node numbers (ints) for the up and running cpu nodes + ''' + available_cep4_nodes = [] + + try: + logger.debug('determining available cep4 cpu nodes') + + # find out which nodes are available + cmd = ['sinfo -p cpu -t idle,mix'] + cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + + logger.debug('executing command: %s', ' '.join(cmd)) + out = check_output(cmd) + lines = out.split('\n') + for state in ['idle', 'mix']: + try: + line = next(l for l in lines if state in l).strip() + # get nodes string part of line: + nodes_part = line.split(' ')[-1] + if '[' in nodes_part: + # example: line='cpu* up infinite 42 mix cpu[01-17,23-47]' + # then: nodes='01-17,23-47' + nodes = nodes_part[4:-1] + for part in nodes.split(','): + if '-' in part: + lower, sep, upper = part.partition('-') + available_cep4_nodes += list(range(int(lower), int(upper) + 1)) + else: + available_cep4_nodes.append(int(part)) + else: + # example: line='cpu* up infinite 42 mix cpu01' + # then: nodes='01' + node = int(nodes_part[3:]) + available_cep4_nodes += [node] + + except StopIteration: + pass # no line with state in line + + except Exception as e: + logger.exception(e) + + available_cep4_nodes = sorted(list(set(available_cep4_nodes))) + logger.debug('available cep4 cpu nodes: %s', ','.join(str(x) for x in available_cep4_nodes)) + if not available_cep4_nodes: + logger.warning('no cep4 cpu nodes available') + + return available_cep4_nodes + +def get_cep4_cpu_nodes_loads(node_nrs=None, normalized=False): + ''' + get the 5min load for each given cep4 cpu node nr + :param node_nrs: optional list of node numbers to get the load for. If None, then all available nodes are queried. + :param bool normalized: when True, then normalize the loads with the number of cores. + :return: dict with node_nr -> load mapping + ''' + if node_nrs == None: + node_nrs = get_cep4_available_cpu_nodes() + + procs = {} + loads = {} + logger.debug('getting 5min load for cep4 cpu nodes %s', ', '.join((str(x) for x in node_nrs))) + # spawn load commands in parallel + for node_nr in node_nrs: + load_cmd = ['cat', '/proc/loadavg', '|', 'awk', "'{print $2}'"] + node_load_cmd = wrap_command_in_cep4_cpu_node_ssh_call(load_cmd, node_nr, via_head=True) + logger.debug('executing command: %s', ' '.join(node_load_cmd)) + + proc = Popen(node_load_cmd, stdout=PIPE, stderr=PIPE) + procs[node_nr] = proc + + # wait for procs to finish, and try to parse the resulting load value + for node_nr, proc in procs.items(): + out, err = proc.communicate() + try: + load = float(out.strip()) + except: + load = 1e10 + loads[node_nr] = load + + if normalized: + # spawn num-cores commands in parallel grep -c ^processor /proc/cpuinfo + for node_nr in node_nrs: + num_proc_cmd = ['grep', '-c', '^processor', '/proc/cpuinfo'] + node_num_proc_cmd = wrap_command_in_cep4_cpu_node_ssh_call(num_proc_cmd, node_nr, via_head=True) + logger.debug('executing command: %s', ' '.join(node_num_proc_cmd)) + + proc = Popen(node_num_proc_cmd, stdout=PIPE, stderr=PIPE) + procs[node_nr] = proc + + # wait for procs to finish, and try to parse the resulting num_proc value + for node_nr, proc in procs.items(): + out, err = proc.communicate() + try: + num_proc = int(out.strip()) + except: + num_proc = 1 + loads[node_nr] = loads[node_nr]/float(num_proc) + + logger.debug('5min %sloads for cep4 cpu nodes: %s', 'normalized ' if normalized else '', loads) + return loads + +def get_cep4_available_cpu_nodes_sorted_ascending_by_load(max_normalized_load=0.33): + ''' + get the cep4 available cpu node numbers sorted ascending by load (5min). + :param float max_normalized_load: filter available nodes which a at most max_normalized_load + :return: sorted list of node numbers. + ''' + node_nrs = get_cep4_available_cpu_nodes() + loads = get_cep4_cpu_nodes_loads(node_nrs, normalized=True) + filtered_loads = {k:v for k,v in loads.items() if v <= max_normalized_load} + sorted_loads = sorted(filtered_loads.items(), key=lambda x: x[1]) + sorted_node_nrs = [item[0] for item in sorted_loads] + logger.debug('cep4 cpu nodes sorted (asc) by load: %s', sorted_node_nrs) + return sorted_node_nrs + +def get_cep4_available_cpu_node_with_lowest_load(max_normalized_load=0.33): + ''' + get the cep4 cpu node which is available and has the lowest (5min) load of them all. + :param float max_normalized_load: filter available nodes which a at most max_normalized_load + :return: the node number (int) with the lowest load. + ''' + node_nrs = get_cep4_available_cpu_nodes_sorted_ascending_by_load(max_normalized_load=max_normalized_load) + if node_nrs: + logger.debug('cep4 cpu node with lowest load: %s', node_nrs[0]) + return node_nrs[0] + return None + +def parallelize_cmd_over_cep4_cpu_nodes(cmd, parallelizable_option, parallelizable_option_values, timeout=3600): + '''run the given cmd in parallel on multiple available cpu nodes. + :param list cmd: a subprocess cmd list + :param string parallelizable_option: the option which is given to the parallelized cmd for a subset of the parallelizable_option_values + :param list parallelizable_option_values: the list of values which is chunked for the parallelized cmd for the parallelizable_option + :param int timeout: timeout in seconds after which the workers are killed + :return: True if all processes on all cpu nodes exited ok, else False + ''' + available_cep4_nodes = get_cep4_available_cpu_nodes_sorted_ascending_by_load() + + if len(available_cep4_nodes) == 0: + logger.warning('No cep4 cpu nodes available..') + return False + + num_workers = max(1, min(len(available_cep4_nodes), len(parallelizable_option_values))) + num_option_values_per_worker = int(math.ceil(len(parallelizable_option_values) / float(num_workers))) + workers = {} + + logger.info('parallelizing cmd: %s over option %s with values %s', + ' '.join(str(x) for x in cmd), + parallelizable_option, + ' '.join(str(x) for x in parallelizable_option_values)) + + start = datetime.utcnow() + + # start the workers + for i in range(num_workers): + option_values_for_worker = parallelizable_option_values[i * num_option_values_per_worker:(i + 1) * num_option_values_per_worker] + if option_values_for_worker: + option_values_for_worker_csv = ','.join([str(s) for s in option_values_for_worker]) + + worker_cmd = cmd + [parallelizable_option, option_values_for_worker_csv] + + worker_cmd = wrap_command_in_cep4_cpu_node_ssh_call(worker_cmd, available_cep4_nodes[i], via_head=False) + worker_cmd_str = ' '.join([str(x) for x in worker_cmd]) + logger.info('starting worker %d with parallelized cmd: %s', i, worker_cmd_str) + worker = Popen(worker_cmd, bufsize=-1, env=os.environ) + workers[worker_cmd_str] = worker + + logger.info('waiting for all %d workers to finish...', len(workers)) + + failed_worker_cmds = set() + + #wait for all workers to finish + #print worker loglines + while workers: + finished_workers = {worker_cmd_str:worker for worker_cmd_str,worker in workers.items() + if worker.poll() is not None} + + if finished_workers: + for worker_cmd_str, worker in finished_workers.items(): + logger.info('worker finished with exitcode=%d cmd=%s', + worker.returncode, + worker_cmd_str) + del workers[worker_cmd_str] + + logger.info('waiting for %d more workers...', len(workers)) + + if worker.returncode != 0: + failed_worker_cmds.add(worker_cmd_str) + else: + sleep(1.0) + + if datetime.utcnow() - start >= timedelta(seconds=timeout): + logger.warning('timeout while waiting for %d more workers...', len(workers)) + for worker_cmd_str, worker in workers.items(): + logger.warning('killing worker with parallelized cmd: %s', worker_cmd_str) + worker.kill() + failed_worker_cmds.add(worker_cmd_str) + del workers[worker_cmd_str] + + success = len(failed_worker_cmds)==0 + + if success: + logger.info('all parallelized cmds finished successfully') + else: + logger.error('%s/%s parallelized cmds finished with errors', len(failed_worker_cmds), num_workers) + + return success diff --git a/LCS/PyCommon/datetimeutils.py b/LCS/PyCommon/datetimeutils.py index 6b32db4efa0caaf0aae442841ce54827d862794d..038c880ea04a3f3b3027e62fc736835132c9c7f6 100644 --- a/LCS/PyCommon/datetimeutils.py +++ b/LCS/PyCommon/datetimeutils.py @@ -24,24 +24,24 @@ import sys import os -def monthRanges(min_date, max_date): +def monthRanges(min_date, max_date, month_step=1): ranges = [] min_month_start = datetime(min_date.year, min_date.month, 1, tzinfo=min_date.tzinfo) month_start = min_month_start while month_start < max_date: - if month_start.month < 12: - month_end = datetime(month_start.year, month_start.month+1, 1, tzinfo=month_start.tzinfo) - timedelta(milliseconds=1) + if month_start.month <= 12-month_step: + month_end = datetime(month_start.year, month_start.month+month_step, 1, tzinfo=month_start.tzinfo) - timedelta(milliseconds=1) else: - month_end = datetime(month_start.year+1, month_start.month-11, 1, tzinfo=month_start.tzinfo) - timedelta(milliseconds=1) + month_end = datetime(month_start.year+1, month_start.month-12+month_step, 1, tzinfo=month_start.tzinfo) - timedelta(milliseconds=1) ranges.append((month_start, month_end)) - if month_start.month < 12: - month_start = datetime(month_start.year, month_start.month+1, 1, tzinfo=min_date.tzinfo) + if month_start.month <= 12-month_step: + month_start = datetime(month_start.year, month_start.month+month_step, 1, tzinfo=min_date.tzinfo) else: - month_start = datetime(month_start.year+1, month_start.month-11, 1, tzinfo=min_date.tzinfo) + month_start = datetime(month_start.year+1, month_start.month-12+month_step, 1, tzinfo=min_date.tzinfo) return ranges @@ -66,3 +66,37 @@ def format_timedelta(td): def parseDatetime(date_time): """ Parse the datetime format used in LOFAR parsets. """ return datetime.strptime(date_time, ('%Y-%m-%d %H:%M:%S.%f' if '.' in date_time else '%Y-%m-%d %H:%M:%S')) + +MDJ_EPOCH = datetime(1858, 11, 17, 0, 0, 0) + +def to_modified_julian_date(timestamp): + ''' + computes the modified_julian_date from a python datetime timestamp + :param timestamp: datetime a python datetime timestamp + :return: double, the modified_julian_date + ''' + return to_modified_julian_date_in_seconds(timestamp)/86400.0 + +def to_modified_julian_date_in_seconds(timestamp): + ''' + computes the modified_julian_date (in seconds as opposed to the official days) from a python datetime timestamp + :param timestamp: datetime a python datetime timestamp + :return: double, the modified_julian_date (fractional number of seconds since MJD_EPOCH) + ''' + return totalSeconds(timestamp - MDJ_EPOCH) + +def from_modified_julian_date(modified_julian_date): + ''' + computes the python datetime timestamp from a modified_julian_date + :param modified_julian_date: double, a timestamp expressed in modified_julian_date format (fractional number of days since MJD_EPOCH) + :return: datetime, the timestamp as python datetime + ''' + return from_modified_julian_date_in_seconds(modified_julian_date*86400.0) + +def from_modified_julian_date_in_seconds(modified_julian_date_secs): + ''' + computes the python datetime timestamp from a modified_julian_date (in seconds as opposed to the official days) + :param modified_julian_date: double, a timestamp expressed in modified_julian_date format (fractional number of seconds since MJD_EPOCH) + :return: datetime, the timestamp as python datetime + ''' + return MDJ_EPOCH + timedelta(seconds=modified_julian_date_secs) diff --git a/LCS/PyCommon/postgres.py b/LCS/PyCommon/postgres.py index 59ea46dbe638189321acbe95949298cd922fcf00..2c60585127fa6d8704bbbcf9fb06575ab7d0a04c 100644 --- a/LCS/PyCommon/postgres.py +++ b/LCS/PyCommon/postgres.py @@ -26,10 +26,15 @@ Module with nice postgres helper methods and classes. import logging from threading import Thread, Lock from Queue import Queue, Empty +from datetime import datetime +import time +import re import select import psycopg2 import psycopg2.extras import psycopg2.extensions +from lofar.common.datetimeutils import totalSeconds +from lofar.common import dbcredentials logger = logging.getLogger(__name__) @@ -87,7 +92,149 @@ def makePostgresNotificationQueries(schema, table, action, column_name='id'): sql_lines = '\n'.join([s.strip() for s in sql.split('\n')]) + '\n' return sql_lines -class PostgresListener(object): +FETCH_NONE=0 +FETCH_ONE=1 +FETCH_ALL=2 + +class PostgresDatabaseConnection(object): + def __init__(self, + host='', + database='', + username='', + password='', + port=5432, + log_queries=False, auto_commit_selects=True, num_connect_retries=5, connect_retry_interval=1.0): + self._host = host + self._database = database + self._username = username + self._password = password + self._port = port + self._connection = None + self._log_queries = log_queries + self.__connection_retries = 0 + self.__auto_commit_selects = auto_commit_selects + self.__num_connect_retries = num_connect_retries + self.__connect_retry_interval = connect_retry_interval + self._connect() + + def _connect(self): + for i in range(self.__num_connect_retries): + try: + self._disconnect() + + logger.debug("%s connecting to db %s:*****@%s on %s:%s", type(self).__name__, + self._username, + self._database, + self._host, + self._port) + self._connection = psycopg2.connect(host=self._host, + user=self._username, + password=self._password, + database=self._database, + port=self._port, + connect_timeout=5) + + if self._connection: + logger.debug("%s connected to db %s", type(self).__name__, self._database) + return + except Exception as e: + logger.error(e) + if i == self.__num_connect_retries-1: + raise + + logger.debug('retrying to connect to %s in %s seconds', self._database, self.__connect_retry_interval) + time.sleep(self.__connect_retry_interval) + + def _disconnect(self): + if self._connection: + logger.debug("%s disconnecting from db: %s", type(self).__name__, self._database) + self._connection.close() + self._connection = None + + def __enter__(self): + '''connects to the database''' + self._connect() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + '''disconnects from the database''' + self._disconnect() + + def _queryAsSingleLine(self, query, qargs=None): + line = ' '.join(query.replace('\n', ' ').split()) + if qargs: + line = line % tuple(['\'%s\'' % a if isinstance(a, basestring) else a for a in qargs]) + return line + + def executeQuery(self, query, qargs=None, fetch=FETCH_NONE): + '''execute the query and reconnect upon OperationalError''' + try: + with self._connection.cursor(cursor_factory = psycopg2.extras.RealDictCursor) as cursor: + start = datetime.utcnow() + cursor.execute(query, qargs) + if self._log_queries: + elapsed = datetime.utcnow() - start + elapsed_ms = 1000.0 * totalSeconds(elapsed) + logger.info('executed query in %.1fms%s yielding %s rows: %s', elapsed_ms, + ' (SLOW!)' if elapsed_ms > 250 else '', # for easy log grep'ing + cursor.rowcount, + self._queryAsSingleLine(query, qargs)) + + try: + self._log_database_notifications() + + result = [] + if fetch == FETCH_ONE: + result = cursor.fetchone() + + if fetch == FETCH_ALL: + result = cursor.fetchall() + + if self.__auto_commit_selects and re.search('select', query, re.IGNORECASE): + #prevent dangling in idle transaction on server + self.commit() + + return result + except Exception as e: + logger.error("error while fetching result(s) for %s: %s", self._queryAsSingleLine(query, qargs), e) + + except (psycopg2.OperationalError, AttributeError) as e: + logger.error(str(e)) + while self.__connection_retries < 5: + logger.info("(re)trying to connect to database") + self.__connection_retries += 1 + self._connect() + if self._connection: + self.__connection_retries = 0 + return self.executeQuery(query, qargs, fetch) + time.sleep(i*i) + except (psycopg2.IntegrityError, psycopg2.ProgrammingError, psycopg2.InternalError, psycopg2.DataError)as e: + logger.error("Rolling back query=\'%s\' due to error: \'%s\'" % (self._queryAsSingleLine(query, qargs), e)) + self.rollback() + return [] + except Exception as e: + logger.error(str(e)) + + return [] + + def _log_database_notifications(self): + if self._log_queries and self._connection.notices: + for notice in self._connection.notices: + logger.info('database log message %s', notice.strip()) + del self._connection.notices[:] + + def commit(self): + if self._log_queries: + logger.debug('commit') + self._connection.commit() + + def rollback(self): + if self._log_queries: + logger.info('rollback') + self._connection.rollback() + + +class PostgresListener(PostgresDatabaseConnection): ''' This class lets you listen to postgress notifications It execute callbacks when a notifocation occurs. Make your own subclass with your callbacks and subscribe them to the appriate channel. @@ -122,32 +269,34 @@ class PostgresListener(object): password='', port=5432): '''Create a new PostgresListener''' - self.conn = psycopg2.connect(host=host, - user=username, - password=password, - database=database, - port=port) - self.conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) - self.cursor = self.conn.cursor() + super(PostgresListener, self).__init__(host=host, + database=database, + username=username, + password=password, + port=port) self.__listening = False self.__lock = Lock() self.__callbacks = {} self.__waiting = False self.__queue = Queue() + def _connect(self): + super(PostgresListener, self)._connect() + self._connection.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) + def subscribe(self, notification, callback): '''Subscribe to a certain postgres notification. Call callback method in case such a notification is received.''' logger.info("Subscribed %sto %s" % ('and listening ' if self.isListening() else '', notification)) with self.__lock: - self.cursor.execute("LISTEN %s;", (psycopg2.extensions.AsIs(notification),)) + self.executeQuery("LISTEN %s;", (psycopg2.extensions.AsIs(notification),)) self.__callbacks[notification] = callback def unsubscribe(self, notification): '''Unubscribe from a certain postgres notification.''' logger.info("Unsubscribed from %s" % notification) with self.__lock: - self.cursor.execute("UNLISTEN %s;", (psycopg2.extensions.AsIs(notification),)) + self.executeQuery("UNLISTEN %s;", (psycopg2.extensions.AsIs(notification),)) if notification in self.__callbacks: del self.__callbacks[notification] @@ -173,11 +322,11 @@ class PostgresListener(object): def eventLoop(): while self.isListening(): - if select.select([self.conn],[],[],2) != ([],[],[]): - self.conn.poll() - while self.conn.notifies: + if select.select([self._connection],[],[],2) != ([],[],[]): + self._connection.poll() + while self._connection.notifies: try: - notification = self.conn.notifies.pop(0) + notification = self._connection.notifies.pop(0) logger.debug("Received notification on channel %s payload %s" % (notification.channel, notification.payload)) if self.isWaiting(): @@ -270,3 +419,4 @@ class PostgresListener(object): pass self.stopWaiting() + diff --git a/LCS/PyCommon/subprocess.py b/LCS/PyCommon/subprocess_utils.py similarity index 100% rename from LCS/PyCommon/subprocess.py rename to LCS/PyCommon/subprocess_utils.py diff --git a/LCS/PyCommon/test/CMakeLists.txt b/LCS/PyCommon/test/CMakeLists.txt index 48f4f4d47f53903b1d6b735e9f44f2c969b5ccb4..6309f036769b6cb3b76377b36c9c9df46fdf24e1 100644 --- a/LCS/PyCommon/test/CMakeLists.txt +++ b/LCS/PyCommon/test/CMakeLists.txt @@ -12,3 +12,4 @@ lofar_add_test(t_defaultmailaddresses) lofar_add_test(t_methodtrigger) lofar_add_test(t_util) lofar_add_test(t_test_utils) +lofar_add_test(t_cep4_utils) diff --git a/LCS/PyCommon/test/t_cep4_utils.py b/LCS/PyCommon/test/t_cep4_utils.py new file mode 100755 index 0000000000000000000000000000000000000000..12ad67dce79cfb5054c1d7cc42e6d0def0a45b80 --- /dev/null +++ b/LCS/PyCommon/test/t_cep4_utils.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +import unittest +from subprocess import call + +import logging +from lofar.common.cep4_utils import * + +logger = logging.getLogger(__name__) + +class TestCep4Utils(unittest.TestCase): + def test_01_wrap_command_in_cep4_head_node_ssh_call(self): + cmd = wrap_command_in_cep4_head_node_ssh_call(['true']) + logger.info('executing command: %s', ' '.join(cmd)) + self.assertEqual(0, call(cmd)) + + def test_02_get_cep4_available_cpu_nodes(self): + node_nrs = get_cep4_available_cpu_nodes() + self.assertTrue(isinstance(node_nrs, list)) + self.assertTrue(len(node_nrs) > 0) + + def test_03_wrap_command_in_cep4_random_cpu_node_ssh_call(self): + ''' + this test calls and tests the functionality of the following methods via + wrap_command_in_cep4_random_cpu_node_ssh_call: get_cep4_available_cpu_nodes, wrap_command_in_cep4_cpu_node_ssh_call + ''' + cmd = wrap_command_in_cep4_random_cpu_node_ssh_call(['true'], via_head=True) + logger.info('executing command: %s', ' '.join(cmd)) + self.assertEqual(0, call(cmd)) + + def test_04_wrap_command_in_cep4_available_cpu_node_with_lowest_load_ssh_call(self): + ''' + this test calls and tests the functionality of the following methods via + wrap_command_in_cep4_random_cpu_node_ssh_call: + get_cep4_available_cpu_nodes, get_cep4_cpu_nodes_loads, + get_cep4_available_cpu_nodes_sorted_ascending_by_load, wrap_command_in_cep4_cpu_node_ssh_call + ''' + cmd = wrap_command_in_cep4_available_cpu_node_with_lowest_load_ssh_call(['true'], via_head=True) + logger.info('executing command: %s', ' '.join(cmd)) + self.assertEqual(0, call(cmd)) + + def test_05_wrap_command_for_docker_in_cep4_head_node_ssh_call(self): + ''' + this test calls and tests the functionality of wrap_command_for_docker and + wrap_command_in_cep4_head_node_ssh_call. + It is assumed that a docker image is available on head.cep4. + ''' + #wrap the command in a docker call first, and then in an ssh call + cmd = wrap_command_for_docker(['true'], 'adder', 'latest') + cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + logger.info('executing command: %s', ' '.join(cmd)) + self.assertEqual(0, call(cmd)) + + def test_06_get_slurm_info_from_within_docker_via_cep4_head(self): + ''' + test to see if we can execute a command via ssh on the head node, + from within a docker container, started via ssh on the head node (yes, that's multiple levels of indirection) + ''' + # use the slurm sinfo command (because it's available on the head nodes only)... + cmd = ['sinfo'] + # ...called on cep4 headnode... + cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + # ...from with the docker container... + cmd = wrap_command_for_docker(cmd, 'adder', 'latest') + # ...which is started on the cep4 head node + cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + logger.info('executing command: %s', ' '.join(cmd)) + + #redirect stdout/stderr to /dev/null + with open('/dev/null', 'w') as dev_null: + self.assertEqual(0, call(cmd, stdout=dev_null, stderr=dev_null)) + +if __name__ == '__main__': + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.DEBUG) + + # first try if we can reach cep4 + # this assumes the code in wrap_command_in_cep4_head_node_ssh_call is correct and working + # (which is also being tested in the unittests) + # if and only if the command to the head node succeeds, then we can do the tests + # otherwise, for whatever reason the ssh call fails, we skip the tests because we cannot reach cep4 head node. + cep4_true_cmd = wrap_command_in_cep4_head_node_ssh_call(['true']) + + if call(cep4_true_cmd) == 0: + logger.info('We can reach the CEP4 head node. Continuing with tests...') + unittest.main() + else: + logger.warning('Cannot reach the CEP4 head node. skipping tests...') + #exit with special 'skipped' exit-code + exit(3) + diff --git a/LCS/PyCommon/test/t_cep4_utils.run b/LCS/PyCommon/test/t_cep4_utils.run new file mode 100755 index 0000000000000000000000000000000000000000..dbbadd78378910833774ca467f8a7008a126b7ee --- /dev/null +++ b/LCS/PyCommon/test/t_cep4_utils.run @@ -0,0 +1,23 @@ +#!/bin/bash + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# Run the unit test +source python-coverage.sh +python_coverage_test "*cep4_utils*" t_cep4_utils.py + diff --git a/LCS/PyCommon/test/t_cep4_utils.sh b/LCS/PyCommon/test/t_cep4_utils.sh new file mode 100755 index 0000000000000000000000000000000000000000..9298df51c1b5e4c48c03b7d15833c6e1806ed4ee --- /dev/null +++ b/LCS/PyCommon/test/t_cep4_utils.sh @@ -0,0 +1,20 @@ +#!/bin/sh + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +./runctest.sh t_cep4_utils diff --git a/LCS/WinCCWrapper/CMakeLists.txt b/LCS/WinCCWrapper/CMakeLists.txt index 9c27cda941686b32cf500187319529c7cb17ac08..af2597b7a921f4876bcf78c5afc41dbb686f3fc6 100644 --- a/LCS/WinCCWrapper/CMakeLists.txt +++ b/LCS/WinCCWrapper/CMakeLists.txt @@ -8,7 +8,14 @@ set(WINCC_ROOT_DIR /opt/WinCC_OA/3.15 CACHE PATH "root dir where the WinCC_OA ap lofar_find_package(WINCC) IF(WINCC_FOUND) + lofar_find_package(Boost REQUIRED python) + lofar_find_package(Python) + add_subdirectory(include) add_subdirectory(src) - add_subdirectory(test) +ELSE() + MESSAGE(WARNING "The WinCC_OA API could not be found (WINCC_ROOT_DIR=${WINCC_ROOT_DIR}). Hence, the WinCCWrapper library cannot be build. We do continue with cmake-configure and build however, because for development one could use a mocked WinCCWrapper as well.") ENDIF(WINCC_FOUND) + +# add subdir test anyway, it will install a mock, even on non-WinCC API systems. +add_subdirectory(test) diff --git a/LCS/WinCCWrapper/doc/winccwrapper.md b/LCS/WinCCWrapper/doc/winccwrapper.md index ea636aaef669fc024cb72c55385839435a53b21f..7530e09bb94a41ab0f9caa119b4f44f03150a82f 100644 --- a/LCS/WinCCWrapper/doc/winccwrapper.md +++ b/LCS/WinCCWrapper/doc/winccwrapper.md @@ -1 +1,124 @@ -# See lofar/trunk/sw_documentation_template.md for example \ No newline at end of file + +# WinCCWrapper Overview {#winccwrapper_overview} + +## General + +### Description/Summary + +The WinCCWrapper library is a simple C++ wrapper around the difficult to use WinCC_OA API. Of course one could always still choose use the WinCC_OA API which provides more features than this simple wrapper. This WinCCWrapper is aimed at providing a simple API to set and get datapoints into a wincc database. All calls are blocking/synchronous (where the underlying WinCC_OA API is asynchronous). + +This WinCCWrapper library has the following features: +- set/get datapoints for the most common datatypes (int, long, float, bool, string, time_t) +- mark datapoints as valid/invalid. +- "connect" to changes in datapoints: whenever a 'connected' datapoint is changed (by whoever from whereever) then a supplied callback function is called. + +Boost-python bindings are provided as well, exposing the same API in python. + + +### Authors/Owners + +- Auke Klazema <mailto:klazema@astron.nl> +- Jorrit Schaap <mailto:schaap@astron.nl> + +### Overview + +This package builds a c++ library, and python bindings. For further details, see Description above. + +- - - + +## DEVELOPMENT + +### Analyses +This library originated from the Responsive Telescope project which needed a means to use the station monitoring information available in the wincc database. It was later extended in the APERTIF project to provide a means to set/get the validness of datapoints. + +The folling feaures were required, and implemented: +- set/get datapoints for the most common datatypes (int, long, float, bool, string, time_t) +- mark datapoints as valid/invalid. +- "connect" to changes in datapoints: whenever a 'connected' datapoint is changed (by whoever from whereever) then a supplied callback function is called. + +Because the WinCC_OA API is hard to use, we decided to implement this simple wrapper. +Because the WinCC_OA API is in C++, this wrapper needed to be written in C++ as well. +Because we needed to have the same API available in python as well, we decided to create boost-python bindings. + + +### Design +No fancy design needed. This is just a library with a few classes wrapping the complicated WinCC_OA API into a simple API. + +### Source Code +- [WinCCWrapper in SVN](https://svn.astron.nl/LOFAR/branches/SW-26_WinCC/LCS/WinCCWrapper) +- [WinCCWrapper Code Documentation](@ref winccwrapper_overview) + +### Testing + +#### Unit Testing + +We decided not to provide unit tests, because that would require to write a quite large mocked version of the WinCC_OA API, which would be bigger and more complex than the wrapper classes themselves. + +#### Integration Testing + +When BUILD_TESTING is ON, then little test programs are built: WinCCSet and WinCCGet. They can be run from the cmdline (on a host where WinCC is running) and be used to test whether you can successfully set and/or get a datapoint. This is a manual test. + +#### Build & Deploy + +This library needs a c++11 compiler. +Dependencies on other libraries are automatically found by cmake, and otherwise reported which are missing. + +##### Build locally + + svn co https://svn.astron.nl/LOFAR/<some_branch_or_trunk> <my_source_dir> + cd <my_source_dir> + mkdir -p build/gnu_cxx11debug + cd build/gnu_cxx11debug + cmake -DBUILD_PACKAGES=WinCCWrapper -DUSE_LOG4CPLUS=OFF -DCMAKE_INSTALL_PREFIX=/opt/lofar/ ../.. + cd ../.. + make + make install + +##### Build using Jenkins + +There are no special Jenkins jobs for this package specifically. Such a job is also not needed. CMake will automatically take care of building this package whenever a package which is build by Jenkins is dependent on WinCCWrapper. + +##### Deploy + +There is no special Jenkins job to deploy this package specifically. Such a job is also not needed. The library from this package is deployed automatically thanks to cmake/jenkins whenever another package is deployed which depends on this package. + +- - - + +## OPERATIONS + +### Configuration +- There are no configuration files. + +### Log Files +- This library does not produce log files. A program using this library could produce logfiles, and these log files will contain the log lines issued by this library. + +### Runtime +- This library just loads whenever a using program is started. + +### Interfaces (API) +- It's a library. See the source code documentation for the api. + +### Files/Databases +- It depends on a running WINCC_OA instance (which run on the mcu's) +- No other files and/or databases are needed. + +### Dependencies +- WINCC_OA 3.15 (API, and runtime, which are installed on mcu's and the buildhostcentos7) + +### Security +- No login credentials are needed. + +- - - + +## ADDITIONAL INFORMATION + +### User Documentation + +N.A. + +### Operations Documentation + +N.A. + + + diff --git a/LCS/WinCCWrapper/include/WinCCManager.h b/LCS/WinCCWrapper/include/WinCCManager.h index 620d4adeb6880338f34919832481f7cf0146ce18..69e70cbb947cf731c2a3bbf24224aa4ed7c16b5c 100644 --- a/LCS/WinCCWrapper/include/WinCCManager.h +++ b/LCS/WinCCWrapper/include/WinCCManager.h @@ -30,6 +30,8 @@ #include <Manager.hxx> #include <Variable.hxx> +#include <boost/python.hpp> + namespace LOFAR { namespace WINCCWRAPPER { @@ -69,12 +71,11 @@ public: bool get_datapoint(const std::string &name, std::string &value); //! get the datapoint with the given name and return it's tm value in parameter value. returns true upon success. bool get_datapoint(const std::string &name, struct tm &value); - //! get the datapoint with the given name and return it's DynVar value in parameter value. returns true upon success. - bool get_datapoint(const std::string &name, DynVar &value); - + //! get the datapoint with the given name and return it's boost::python::list value in parameter value. returns true upon success. + bool get_datapoint(const std::string &name, boost::python::list &value); + //! get the datapoint with the given name and return it's std::vector<int> value in parameter value. returns true upon success. bool get_datapoint(const std::string &name, std::vector<int> &value); - //! mark the datapoint with given name valid. returns true upon success. bool set_datapoint_valid(const std::string &name) { return set_datapoint_validity(name, true, nullptr); } //! mark the datapoint with given name invalid. returns true upon success. @@ -97,6 +98,9 @@ private: template <typename Tval> bool _get_datapoint(const std::string &name, Tval &value); + template <typename Tval> + bool _get_datapoint(const std::string &name, Tval *value); + bool get_datapoint_variable(const std::string &name, Variable *&value); bool has_received_variable(const std::string &name); diff --git a/LCS/WinCCWrapper/include/WinCCWrapper.h b/LCS/WinCCWrapper/include/WinCCWrapper.h index 76a5cc33e9562994fe71b2d6654555691f77fdf3..26fa060d6064ee4fcff3f620e44b86a90c52327a 100644 --- a/LCS/WinCCWrapper/include/WinCCWrapper.h +++ b/LCS/WinCCWrapper/include/WinCCWrapper.h @@ -28,6 +28,8 @@ #include "WinCCResources.h" #include "WinCCManager.h" +#include <boost/python.hpp> + namespace LOFAR { namespace WINCCWRAPPER { @@ -50,8 +52,13 @@ public: //! set the datapoint with given name to the given int value, mark it valid/invalid, returns true upon success. bool set_datapoint(const std::string &name, int value, bool valid=true); + //! set the datapoint with given name to the given boost::python::list value, mark it valid/invalid, returns true upon success. + bool set_datapoint(const std::string &name, boost::python::list &value, bool valid=true); + //! set the datapoint with given name to the given boost::python::tuple value, mark it valid/invalid, returns true upon success. + bool set_datapoint(const std::string &name, boost::python::tuple &value, bool valid=true); //! set the datapoint with given name to the given std::vector<int> value, mark it valid/invalid, returns true upon success. bool set_datapoint(const std::string &name, std::vector<int> &value, bool valid=true); + //! set the datapoint with given name to the given long value, mark it valid/invalid, returns true upon success. bool set_datapoint(const std::string &name, long value, bool valid=true); //! set the datapoint with given name to the given float value, mark it valid/invalid, returns true upon success. @@ -80,8 +87,11 @@ public: std::string get_datapoint_string(const std::string &name); //! get the datapoint with the given name and return it as a time_t value if possible, otherwise an exception is raised. time_t get_datapoint_time(const std::string &name); + //! get the datapoint with the given name and return it as a boost::python::list value if possible, otherwise an exception is raised. + boost::python::list get_datapoint_list(const std::string &name); //! get the datapoint with the given name and return it as a std::vector<int> value if possible, otherwise an exception is raised. - std::vector<int> get_datapoint_int_vector(const std::string &name); + //! this method is used in the WinCCGet test + std::vector<int> get_datapoint_vector(const std::string &name); private: // get_datapoint diff --git a/LCS/WinCCWrapper/src/WinCCManager.cc b/LCS/WinCCWrapper/src/WinCCManager.cc index 96ade22ce8bedc365ac07901bd9847e88a4bd129..6aa6c34b42ef58025ee2c2e37199c6820783e7f0 100644 --- a/LCS/WinCCWrapper/src/WinCCManager.cc +++ b/LCS/WinCCWrapper/src/WinCCManager.cc @@ -33,8 +33,10 @@ #include <LongVar.hxx> #include <TextVar.hxx> #include <DynVar.hxx> +#include <DynPtrArray.hxx> #include <cassert> -#include <DynVar.hxx> +#include <vector> +#include <boost/python.hpp> namespace LOFAR { namespace WINCCWRAPPER { @@ -227,16 +229,39 @@ bool WinCCManager::get_datapoint_variable(const std::string &name, Variable *&va template <typename Tval> Variable::ConvertResult convert(Variable *var, Tval &value, Variable *&converted_var); +template <> +Variable::ConvertResult convert(Variable *var, boost::python::list& value, Variable *&converted_var) +{ + Variable::ConvertResult cr = var->convert(VariableType::DYNINTEGER_VAR, converted_var); + + if(Variable::ConvertResult::OK == cr) + { + DynVar *dv = (DynVar*)converted_var; + + for(unsigned int i = 0; i < dv->getNumberOfItems(); i++) { + Variable *elem = dv->getAt(i); + if(elem->inherits(VariableType::INTEGER_VAR)) { + value.append(((IntegerVar*)elem)->getValue()); + } + else + return Variable::ConvertResult::CONV_NOT_DEFINED; + } + } + return cr; +} + template <> Variable::ConvertResult convert(Variable *var, std::vector<int> &value, Variable *&converted_var) { - Variable::ConvertResult cr = var->convert(VariableType::DYN_VAR, converted_var); + + Variable::ConvertResult cr = var->convert(VariableType::DYNINTEGER_VAR, converted_var); + if(Variable::ConvertResult::OK == cr) { DynVar *dv = (DynVar*)converted_var; value.resize(dv->getNumberOfItems()); - for(unsigned int i = 0; dv->getNumberOfItems(); i++) { + for(unsigned int i = 0; i < dv->getNumberOfItems(); i++) { Variable *elem = dv->getAt(i); if(elem->inherits(VariableType::INTEGER_VAR)) { value[i] = ((IntegerVar*)elem)->getValue(); @@ -248,7 +273,6 @@ Variable::ConvertResult convert(Variable *var, std::vector<int> &value, Variable return cr; } - template <> Variable::ConvertResult convert(Variable *var, int &value, Variable *&converted_var) { @@ -331,6 +355,7 @@ bool WinCCManager::_get_datapoint(const std::string &name, Tval &value) return false; } + //below, a few strictly type methods for get_datapoint are defined //they just call the templated _get_datapoint, so why not just use the one and only templated method? //because with these strictly type methods, we force the compiler to define the methods for these types @@ -366,6 +391,11 @@ bool WinCCManager::get_datapoint(const std::string &name, struct tm &value) return _get_datapoint(name, value); } +bool WinCCManager::get_datapoint(const std::string &name, boost::python::list &value) +{ + return _get_datapoint(name, value); +} + bool WinCCManager::get_datapoint(const std::string &name, std::vector<int> &value) { return _get_datapoint(name, value); @@ -374,7 +404,7 @@ bool WinCCManager::get_datapoint(const std::string &name, std::vector<int> &valu bool WinCCManager::set_datapoint_validity(const std::string &name, bool validity, const Variable *value) { DpIdentifier dpId; - + if (Manager::getId(name.c_str(), dpId) == PVSS_FALSE) { // This name was unknown. @@ -416,10 +446,8 @@ bool WinCCManager::set_datapoint_validity(const std::string &name, bool validity //delete it, since it was a cloned variable created in get_datapoint_variable delete last_known_value; } - return true; } - return false; } diff --git a/LCS/WinCCWrapper/src/WinCCWrapper.cc b/LCS/WinCCWrapper/src/WinCCWrapper.cc index b3d62bcbf000560b2cdeeed96d810d66ebd6e862..ab6191dc8d62d0f793b0c91541d59e3e2ecd4986 100644 --- a/LCS/WinCCWrapper/src/WinCCWrapper.cc +++ b/LCS/WinCCWrapper/src/WinCCWrapper.cc @@ -23,6 +23,8 @@ #include <LongVar.hxx> #include <TimeVar.hxx> #include <DynVar.hxx> +#include <vector> +#include <Variable.hxx> #include <condition_variable> #include <mutex> @@ -32,6 +34,8 @@ #include "WinCCWrapper.h" #include "ConnectWaitForAnswer.h" +#include <boost/python.hpp> + namespace LOFAR { namespace WINCCWRAPPER { @@ -74,15 +78,31 @@ bool WinCCWrapper::set_datapoint(const std::string &name, int value, bool valid) return manager->set_datapoint(name + DP_SUFFIX, variable, valid); } -bool WinCCWrapper::set_datapoint(const std::string &name, std::vector<int> &value, bool valid) +bool WinCCWrapper::set_datapoint(const std::string &name, boost::python::list &value, bool valid) { DynVar variable(VariableType::INTEGER_VAR); + for(int i = 0; i < boost::python::len(value); i++) { + int boost_elem = boost::python::extract<int>(value[i]); + IntegerVar elem{boost_elem}; + variable.append(elem);} + return manager->set_datapoint(name + DP_SUFFIX, variable, valid); +} + +bool WinCCWrapper::set_datapoint(const std::string &name, boost::python::tuple &value, bool valid) +{ + // do a simple type conversion to a boost::python::list to access the 'append' method + boost::python::list temp_list = boost::python::list(value); + return set_datapoint(name, temp_list, valid); +} + +bool WinCCWrapper::set_datapoint(const std::string &name, std::vector<int> &value, bool valid) +{ + DynVar variable(VariableType::INTEGER_VAR); + for(auto iter = value.cbegin(); iter != value.cend(); iter++) { IntegerVar elem{*iter}; - variable.append(elem); - } - + variable.append(elem);} return manager->set_datapoint(name + DP_SUFFIX, variable, valid); } @@ -137,7 +157,15 @@ int WinCCWrapper::get_datapoint_int(const std::string &name) throw std::runtime_error("Could not get datapoint"); } -std::vector<int> WinCCWrapper::get_datapoint_int_vector(const std::string &name) +boost::python::list WinCCWrapper::get_datapoint_list(const std::string &name) +{ + boost::python::list value; + if(get_datapoint(name, value)) + return value; + throw std::runtime_error("Could not get datapoint"); +} + +std::vector<int> WinCCWrapper::get_datapoint_vector(const std::string &name) { std::vector<int> value; if(get_datapoint(name, value)) diff --git a/LCS/WinCCWrapper/src/WinCCWrapper_boost_python.cc b/LCS/WinCCWrapper/src/WinCCWrapper_boost_python.cc index 9e4818ab3fd65a4018be312212ccee031fe08a27..4eee1f9936c9b91cbc785b0385f971bee0d07182 100644 --- a/LCS/WinCCWrapper/src/WinCCWrapper_boost_python.cc +++ b/LCS/WinCCWrapper/src/WinCCWrapper_boost_python.cc @@ -7,7 +7,7 @@ //# The LOFAR Software Suite is free software: you can redistribute it and/or //# modify it under the terms of the GNU General Public License as published by //# the Free Software Foundation, either version 3 of the License, or (at your -//# option) any later version. +//# option) any later versions //# //# The LOFAR Software Suite is distributed in the hope that it will be //# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of @@ -19,6 +19,8 @@ #include <boost/python.hpp> #include "WinCCWrapper.h" +#include <vector> +#include <boost/python/suite/indexing/vector_indexing_suite.hpp> BOOST_PYTHON_MODULE(pywincc) { @@ -26,7 +28,10 @@ BOOST_PYTHON_MODULE(pywincc) using namespace LOFAR::WINCCWRAPPER; bool (WinCCWrapper::*set_datapoint_int)(const std::string&, int, bool) = &WinCCWrapper::set_datapoint; - bool (WinCCWrapper::*set_datapoint_int_vector)(const std::string&, std::vector<int>&, bool) = &WinCCWrapper::set_datapoint; + bool (WinCCWrapper::*set_datapoint_list)(const std::string&, boost::python::list&, bool) = &WinCCWrapper::set_datapoint; + bool (WinCCWrapper::*set_datapoint_tuple)(const std::string&, boost::python::tuple&, bool) = &WinCCWrapper::set_datapoint; + // the set_datapoint_vector method is used in the WinCCGet and WinCCSet tests + bool (WinCCWrapper::*set_datapoint_vector)(const std::string&, std::vector<int>&, bool) = &WinCCWrapper::set_datapoint; bool (WinCCWrapper::*set_datapoint_long)(const std::string&, long, bool) = &WinCCWrapper::set_datapoint; bool (WinCCWrapper::*set_datapoint_float)(const std::string&, float, bool) = &WinCCWrapper::set_datapoint; bool (WinCCWrapper::*set_datapoint_bool)(const std::string&, bool, bool) = &WinCCWrapper::set_datapoint; @@ -35,9 +40,17 @@ BOOST_PYTHON_MODULE(pywincc) bool (WinCCWrapper::*set_datapoint_valid)(const std::string&) = &WinCCWrapper::set_datapoint_valid; bool (WinCCWrapper::*set_datapoint_invalid)(const std::string&) = &WinCCWrapper::set_datapoint_invalid; + // define the conversion between std::vector<int> and boost::python::list + class_<std::vector<int>>("VectorIterable") + .def(vector_indexing_suite<std::vector<int>>()) + ; + class_<WinCCWrapper>("WinCCWrapper", init<const std::string&>()) .def("set_datapoint_int", set_datapoint_int) - .def("set_datapoint_int_vector", set_datapoint_int_vector) + .def("set_datapoint_list", set_datapoint_list) + // define the "set_datapoint_list" method once more to accept a tuple instead of a list + .def("set_datapoint_list", set_datapoint_tuple) + .def("set_datapoint_vector", set_datapoint_vector) .def("set_datapoint_long", set_datapoint_long) .def("set_datapoint_float", set_datapoint_float) .def("set_datapoint_bool", set_datapoint_bool) @@ -47,7 +60,8 @@ BOOST_PYTHON_MODULE(pywincc) .def("set_datapoint_valid", set_datapoint_valid) .def("set_datapoint_invalid", set_datapoint_invalid) .def("get_datapoint_int", &WinCCWrapper::get_datapoint_int) - .def("get_datapoint_int_vector", &WinCCWrapper::get_datapoint_int_vector) + .def("get_datapoint_list", &WinCCWrapper::get_datapoint_list) + .def("get_datapoint_vector", &WinCCWrapper::get_datapoint_vector) .def("get_datapoint_long", &WinCCWrapper::get_datapoint_long) .def("get_datapoint_float", &WinCCWrapper::get_datapoint_float) .def("get_datapoint_bool", &WinCCWrapper::get_datapoint_bool) diff --git a/LCS/WinCCWrapper/test/CMakeLists.txt b/LCS/WinCCWrapper/test/CMakeLists.txt index c0e25c5a523711a37a3deb95c9ddd43d87d35077..93e3e67b7f1dd434c11c41fd399d18972c55937f 100644 --- a/LCS/WinCCWrapper/test/CMakeLists.txt +++ b/LCS/WinCCWrapper/test/CMakeLists.txt @@ -1,8 +1,17 @@ include(LofarCTest) IF(BUILD_TESTING) - lofar_add_bin_program(WinCCSet WinCCSet.cc) - lofar_add_bin_program(WinCCGet WinCCGet.cc) + lofar_find_package(WINCC) - include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../include) + IF(WINCC_FOUND) + include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../include) + + lofar_add_bin_program(WinCCSet WinCCSet.cc) + lofar_add_bin_program(WinCCGet WinCCGet.cc) + ENDIF(WINCC_FOUND) ENDIF(BUILD_TESTING) + +# always add the mock.py module with the mocked WinCCWrapper, so it is available for development and testing, +# even without an active wincc instance. +include(PythonInstall) +python_install(mock.py DESTINATION lofar/common/wincc/test) diff --git a/LCS/WinCCWrapper/test/WinCCGet.cc b/LCS/WinCCWrapper/test/WinCCGet.cc index 0293c530d7da1ba9728308a0ff9ab3f7d6d815ed..5958d82e915ae681f7e11d206b062c0eebeb1983 100644 --- a/LCS/WinCCWrapper/test/WinCCGet.cc +++ b/LCS/WinCCWrapper/test/WinCCGet.cc @@ -1,18 +1,62 @@ #include <cstdlib> #include <string> #include <WinCCWrapper.h> +#include <vector> +#include <iostream> using namespace LOFAR::WINCCWRAPPER; +using namespace std; -int main(int, char * argv[]) +void get_help(){ + cout << "Usage:" << endl; + cout << "WinCCGet \"datapoint_name\" datapoint_type" << endl; + cout << "Accepted datapoint types:" << endl; + cout << " int, float, string, list (for int lists)" << endl; +} + +int main(int argc, char * argv[]) { - WinCCWrapper wrapper{""}; + bool asking_for_help = ((argc == 2) && (string(argv[1]) == "--help" || string(argv[1]) == "--h")); + bool invalid_args = (argc != 3); - std::string dpname{argv[1]}; - int value = 22; + if (asking_for_help || invalid_args){ + get_help(); + return 0; + } + + WinCCWrapper wrapper{""}; + string dpname{argv[1]}; - value = wrapper.get_datapoint_int(dpname); - std::cout << dpname << ": " << value << std::endl; + if (string(argv[2]) == "int") { + int value; + value = wrapper.get_datapoint_int(dpname); + cout << dpname << ": " << value << endl; + } + else if (string(argv[2]) == "float") { + float value; + value = wrapper.get_datapoint_float(dpname); + cout << dpname << ": " << value << endl; + } + else if (string(argv[2]) == "string") { + string value; + value = wrapper.get_datapoint_string(dpname); + cout << dpname << ": " << value << endl; + } + else if (string(argv[2]) == "list") { + // We use the argument 'list' for consistency with the python interface, + // even though we must pass a vector + std::vector<int> value; + value = wrapper.get_datapoint_vector(dpname); + cout << dpname << ": ["; + for (auto iter = value.cbegin(); iter != value.cend(); iter++) { + cout << *iter << ", "; + } + cout << "\b\b]" << endl; // remove the last ', ' from the end. + } + else { + cout << "Unknown datatype: " << string(argv[2]) << "\n" << endl; + get_help(); + } return 0; } diff --git a/LCS/WinCCWrapper/test/WinCCSet.cc b/LCS/WinCCWrapper/test/WinCCSet.cc index c7d05fd428fdb792d6e4660e6ffd6698cc6cf313..f6bfd1419400ab24ba7e7a39580d6f57998d70df 100644 --- a/LCS/WinCCWrapper/test/WinCCSet.cc +++ b/LCS/WinCCWrapper/test/WinCCSet.cc @@ -1,17 +1,58 @@ #include <cstdlib> #include <string> #include <WinCCWrapper.h> +#include <vector> using namespace LOFAR::WINCCWRAPPER; +using namespace std; -int main(int, char * argv[]) +void get_help(){ + cout << "Usage:" << endl; + cout << "WinCCSet \"datapoint_name\" datapoint_type new_value" << endl; + cout << "Accepted datapoint types:" << endl; + cout << " int, float, string, list (for int list)" << endl; +} + +int main(int argc, char * argv[]) { + bool asking_for_help = ((argc == 2) && (string(argv[1]) == "--help" || string(argv[1]) == "--h")); + bool invalid_args = (argc < 4); + + if (asking_for_help || invalid_args){ + get_help(); + return 0; + } WinCCWrapper wrapper{""}; + string dpname{argv[1]}; - std::string dpname{argv[1]}; - int value = atoi(argv[2]); + if (string(argv[2]) == "float") { + float value = atof(argv[3]); + wrapper.set_datapoint(dpname, value); + } + else if (string(argv[2]) == "int") { + int value = atoi(argv[3]); + wrapper.set_datapoint(dpname, value); + } + else if (string(argv[2]) == "string") { + string value{argv[3]}; + wrapper.set_datapoint(dpname, value); + } + else if (string(argv[2]) == "list") { + // we cannot append to lists made outside of a boost python module declaration, + // so we pass a vector instead. + // We use the argument "list" for consistency with the python interface + vector<int> value(argc-3); - wrapper.set_datapoint(dpname, value); + for (int i = 3; i < argc; i++) { + value[i-3] = atoi(argv[i]); + } + wrapper.set_datapoint(dpname, value); + } + else { + cout << "Unknown datatype: " << string(argv[2]) << "\n" << endl; + get_help(); + } return 0; } + diff --git a/LCS/WinCCWrapper/test/mock.py b/LCS/WinCCWrapper/test/mock.py new file mode 100644 index 0000000000000000000000000000000000000000..e1feed7a75905a150a8aa2473c9be24e044b2cb9 --- /dev/null +++ b/LCS/WinCCWrapper/test/mock.py @@ -0,0 +1,99 @@ +# mock.py: mocked version of WinCCWrapper +# +# Copyright (C) 2016 +# ASTRON (Netherlands Institute for Radio Astronomy) +# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the APERTIF software suite. +# The APERTIF software suite is free software: you can redistribute it +# and/or modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# The APERTIF software suite is distributed in the hope that it will be +# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the APERTIF software suite. If not, see <http://www.gnu.org/licenses/>. +# +# $Id: $ + +""" +this module defines a mocked version of lofar.common.wincc.pywincc.WinCCWrapper""" + + +from apertif.common.astronLogger import get_logger +logger = get_logger('apertif') + +class MockWinCCWrapper: + '''mocked version of lofar.common.wincc.pywincc.WinCCWrapper with same API, + faking a database storage backend by storing all set values in a local dict + ''' + def __init__(self): + self.last_set_values = {} + self.last_set_valid_values = {} + + def set_datapoint(self, name, value, valid): + logger.info('MockWinCCWrapper: storing value=%s valid=%s for datapoint=%s', value, valid, name) + self.last_set_values[name] = value + self.last_set_valid_values[name] = valid + + def set_datapoint_int(self, name, value, valid): + return self.set_datapoint(name, value, valid) + + def set_datapoint_long(self, name, value, valid): + return self.set_datapoint(name, value, valid) + + def set_datapoint_bool(self, name, value, valid): + return self.set_datapoint(name, value, valid) + + def set_datapoint_float(self, name, value, valid): + return self.set_datapoint(name, value, valid) + + def set_datapoint_string(self, name, value, valid): + return self.set_datapoint(name, value, valid) + + def set_datapoint_time(self, name, value, valid): + return self.set_datapoint(name, value, valid) + + def set_datapoint_list(self, name, value, valid): + return self.set_datapoint(name, value, valid) + + def set_datapoint_vector(self, name, value, valid): + return self.set_datapoint(name, value, valid) + + def get_datapoint(self, name): + return self.last_set_values[name] + + def get_datapoint_int(self, name): + return self.get_datapoint(name) + + def get_datapoint_long(self, name): + return self.get_datapoint(name) + + def get_datapoint_float(self, name): + return self.get_datapoint(name) + + def get_datapoint_bool(self, name): + return self.get_datapoint(name) + + def get_datapoint_string(self, name): + return self.get_datapoint(name) + + def get_datapoint_time(self, name): + return self.get_datapoint(name) + + def get_datapoint_list(self, name): + return self.get_datapoint(name) + + def get_datapoint_vector(self, name): + return self.get_datapoint(name) + + def set_datapoint_valid(self, name): + self.last_set_valid_values[name] = True + + def set_datapoint_invalid(self, name): + self.last_set_valid_values[name] = False + diff --git a/LCS/pyparameterset/src/__init__.py b/LCS/pyparameterset/src/__init__.py index bd37643c8fbef2d3a51f14685632cd55cacd37ee..f84733f4f24f6da87d28f9eabac50e937f27f226 100755 --- a/LCS/pyparameterset/src/__init__.py +++ b/LCS/pyparameterset/src/__init__.py @@ -19,8 +19,8 @@ # # $Id$ -from _pyparameterset import PyParameterValue -from _pyparameterset import PyParameterSet +from ._pyparameterset import PyParameterValue +from ._pyparameterset import PyParameterSet class parametervalue(PyParameterValue): """ @@ -152,7 +152,7 @@ class parameterset(PyParameterSet): self.replace (kv[0], kv[1]) def adoptDict(self, parms): - for (k,v) in parms.iteritems(): + for (k,v) in parms.items(): # str(container) calls __repr__ on its items, which ends # badly for us for lists of unicode strings ([u"a"] -> ['ua']). # We thus stringify the items first. @@ -161,6 +161,16 @@ class parameterset(PyParameterSet): self.replace (str(k), str(v)) # k, v always type string + @staticmethod + def fromString(parset_string): + '''Create a parset from a plain text string. + Splits the string in lines, and parses each '=' seperated key/value pair. + ''' + lines = [l.strip() for l in parset_string.split('\n')] + kv_pairs = [tuple(l.split('=')) for l in lines if '=' in l] + parset_dict = dict(kv_pairs) + return parameterset(parset_dict) + def get(self, key): # type: (object) -> object """Get the parametervalue object of a parameter.""" diff --git a/LCS/pyparameterset/test/tpyparameterset.py b/LCS/pyparameterset/test/tpyparameterset.py index 92e61e32fcd49ac0d7130cfab95c0192050a9562..8b65375425c81db69c3829c18b2eb2f2b5631fb7 100644 --- a/LCS/pyparameterset/test/tpyparameterset.py +++ b/LCS/pyparameterset/test/tpyparameterset.py @@ -1,71 +1,78 @@ +from __future__ import print_function + from lofar.parameterset import * +try: + import cPickle as pickle +except ImportError: + import pickle + def checkps (ps): - print ps.isDefined("key1") - print ps.isDefined("a.b") - print ps.get("a.b").get() - print ps.getString("a.b") - print ps.getString("a.b", "aa") - print ps.getString("aa.bb", "aa") + print(ps.isDefined("key1")) + print(ps.isDefined("a.b")) + print(ps.get("a.b").get()) + print(ps.getString("a.b")) + print(ps.getString("a.b", "aa")) + print(ps.getString("aa.bb", "aa")) - print ps.getString("a.b.lange_naam") + print(ps.getString("a.b.lange_naam")) - print ps.getBool(key="a.b.bool") - print ps.getBool("a.b.bool", False) - print ps.getBool("aa.bb", False) + print(ps.getBool(key="a.b.bool")) + print(ps.getBool("a.b.bool", False)) + print(ps.getBool("aa.bb", False)) - print ps.getInt("a.b") - print ps.getInt("a.b", 10) - print ps.getInt("aa.bb", 10) + print(ps.getInt("a.b")) + print(ps.getInt("a.b", 10)) + print(ps.getInt("aa.bb", 10)) - print ps.getFloat("a.b") - print ps.getFloat("a.b", 3.14) - print ps.getFloat("aa.bb", 3.14) - print ps.getDouble("a.b.double") + print("{:.1f}".format(ps.getFloat("a.b"))) + print("{:.1f}".format(ps.getFloat("a.b", 3.14))) + print("{:.10f}".format(ps.getFloat("aa.bb", 3.14))) + print("{:.7f}".format(ps.getDouble("a.b.double"))) - print ps.getBoolVector("vecbool") - print ps.getBoolVector("vecbool", (False,True)) - print ps.getBoolVector("aa.bb", [False,True]) + print(ps.getBoolVector("vecbool")) + print(ps.getBoolVector("vecbool", (False,True))) + print(ps.getBoolVector("aa.bb", [False,True])) - print ps.getIntVector("vec") - print ps.getIntVector("vec", (5,6)) - print ps.getIntVector("aa.bb", [5,6]) + print(ps.getIntVector("vec")) + print(ps.getIntVector("vec", (5,6))) + print(ps.getIntVector("aa.bb", [5,6])) - print ps.getFloatVector("vec") - print ps.getFloatVector("vec", (5,6)) - print ps.getFloatVector("aa.bb", [5,6]) + print(ps.getFloatVector("vec")) + print(ps.getFloatVector("vec", (5,6))) + print(ps.getFloatVector("aa.bb", [5,6])) - print ps.getDoubleVector("vec") - print ps.getDoubleVector("vec", (5,6)) - print ps.getDoubleVector("aa.bb", [5,6]) + print(ps.getDoubleVector("vec")) + print(ps.getDoubleVector("vec", (5,6))) + print(ps.getDoubleVector("aa.bb", [5,6])) - print ps.getStringVector("vec") - print ps.getStringVector("vec", ('5','6')) - print ps.getStringVector("aa.bb", ['5','6']) + print(ps.getStringVector("vec")) + print(ps.getStringVector("vec", ('5','6'))) + print(ps.getStringVector("aa.bb", ['5','6'])) - print ps.getIntVector("vecexp", True) - print ps.getIntVector("vecexp", [1,2], True) - print ps.getIntVector("aa.bb", [1,2], True) + print(ps.getIntVector("vecexp", True)) + print(ps.getIntVector("vecexp", [1,2], True)) + print(ps.getIntVector("aa.bb", [1,2], True)) pvs = ps["vecnest"] - print pvs.isVector() + print(pvs.isVector()) pvsvec = pvs.getVector() - print pvsvec[0].get() - print pvsvec[0].expand().getIntVector() - print pvsvec[1].expand().getIntVector() + print(pvsvec[0].get()) + print(pvsvec[0].expand().getIntVector()) + print(pvsvec[1].expand().getIntVector()) # Check using given parset file. checkps (parameterset("tpyparameterset.in")) -print "" +print("") # Create and check a new parset using same keys/values as in parset file. ps=parameterset() -print ">>>" -print ps.version("tree") -print ps.version("full") -print ps.version("top") -print ps.version() -print "<<<" +print(">>>") +print(ps.version("tree")) +print(ps.version("full")) +print(ps.version("top")) +print(ps.version()) +print("<<<") ps.add ("a.b", "7") ps.add ("a.b.lange_naam", "'dit \"is\" nu een andere naam geworden zonder extra spaties aan het einde want die gaan verloren bij wegschrijven + teruglezen'") ps.add ("a.b.c", "5") @@ -77,27 +84,26 @@ ps.add ("vecbool", "[true,false,true]") ps.add ("vec", "[1,2,3]") ps.add ("vecexp", "[1..3,5..10]") ps.add ("vecnest", "[[1..3,5*10],[5..10]]") -print ps.keys() +print(ps.keys()) checkps (ps) # Check if a subset can be made and its name can be read. pss = ps.makeSubset('a.') -print pss.keys() -print 'b.c =', pss.getString ('b.c') -print pss.makeSubset('b.', 'aa.bb.').keys() -print pss.makeSubset('b.').size() -print pss.makeSubset('cc').keys() # should be empty -print len(pss.makeSubset('cc')) +print(pss.keys()) +print('b.c =', pss.getString ('b.c')) +print(pss.makeSubset('b.', 'aa.bb.').keys()) +print(pss.makeSubset('b.').size()) +print(pss.makeSubset('cc').keys()) # should be empty +print(len(pss.makeSubset('cc'))) # Check the dict functionality. -print pss.dict() -print pss.dict(True) # remove quotes around strings +print(sorted(pss.dict().items())) +print(sorted(pss.dict(True).items())) # remove quotes around strings # Check str() -print str(ps) +print(str(ps)) # Check picking/unpickling -import cPickle as pickle s = pickle.dumps(ps) ps2 = pickle.loads(s) assert str(ps) == str(ps2) diff --git a/LCS/pyparameterset/test/tpyparameterset.stdout b/LCS/pyparameterset/test/tpyparameterset.stdout index 0fc009b824f13087fa6dca3c9db633f1afaa2ded..854ff79614fe997b77ac86c35ef8793cad8338c2 100644 --- a/LCS/pyparameterset/test/tpyparameterset.stdout +++ b/LCS/pyparameterset/test/tpyparameterset.stdout @@ -128,8 +128,8 @@ b.c = 5 4 [] 0 -{'b.c': '5', 'b': '7', 'b.lange_naam': '\'dit "is" nu een andere naam geworden zonder extra spaties aan het einde want die gaan verloren bij wegschrijven + teruglezen\'', 'b.bool': 'true', 'b.double': '3.1415926'} -{'b.c': '5', 'b': '7', 'b.lange_naam': 'dit "is" nu een andere naam geworden zonder extra spaties aan het einde want die gaan verloren bij wegschrijven + teruglezen', 'b.bool': 'true', 'b.double': '3.1415926'} +[('b', '7'), ('b.bool', 'true'), ('b.c', '5'), ('b.double', '3.1415926'), ('b.lange_naam', '\'dit "is" nu een andere naam geworden zonder extra spaties aan het einde want die gaan verloren bij wegschrijven + teruglezen\'')] +[('b', '7'), ('b.bool', 'true'), ('b.c', '5'), ('b.double', '3.1415926'), ('b.lange_naam', 'dit "is" nu een andere naam geworden zonder extra spaties aan het einde want die gaan verloren bij wegschrijven + teruglezen')] a.b=7 a.b.bool=true a.b.c=5 diff --git a/LCS/pytools/include/pytools/PycBasicData.h b/LCS/pytools/include/pytools/PycBasicData.h index 85ecd8efc1198b21d46587388a06f26fe6a69965..f2e12e55f72bad24bd39e8a016ea7f9ca0261524 100755 --- a/LCS/pytools/include/pytools/PycBasicData.h +++ b/LCS/pytools/include/pytools/PycBasicData.h @@ -231,11 +231,16 @@ namespace LOFAR { namespace pytools { incref(obj_ptr); // incr refcount, because ~object decrements it // Accept single values. if (PyBool_Check(obj_ptr) +#if PYTHON_VERSION_MAJOR < 3 || PyInt_Check(obj_ptr) + || PyString_Check(obj_ptr) +#else + || PyUnicode_Check(obj_ptr) +#endif || PyLong_Check(obj_ptr) || PyFloat_Check(obj_ptr) || PyComplex_Check(obj_ptr) - || PyString_Check(obj_ptr)) { + ) { extract<container_element_type> elem_proxy(py_obj); if (!elem_proxy.check()) return 0; return obj_ptr; @@ -277,12 +282,16 @@ namespace LOFAR { namespace pytools { data->convertible = storage; ContainerType& result = *((ContainerType*)storage); if (PyBool_Check(obj_ptr) +#if PYTHON_VERSION_MAJOR < 3 || PyInt_Check(obj_ptr) + || PyString_Check(obj_ptr) +#else + || PyUnicode_Check(obj_ptr) +#endif || PyLong_Check(obj_ptr) || PyFloat_Check(obj_ptr) || PyComplex_Check(obj_ptr) - || PyString_Check(obj_ptr)) { - /// || PyString_Check(obj_ptr) + ) { /// || PycArrayScalarCheck(obj_ptr)) { extract<container_element_type> elem_proxy(obj_ptr); ConversionPolicy::reserve(result, 1); diff --git a/LCS/pytools/test/tConvert.py b/LCS/pytools/test/tConvert.py index 551ebbe173854972cfcf7eb07be8e45704c574a2..3b245086108e20cb7e04248cbe5ca1e9c971b86c 100755 --- a/LCS/pytools/test/tConvert.py +++ b/LCS/pytools/test/tConvert.py @@ -1,30 +1,32 @@ #!/usr/bin/env python +from __future__ import print_function + from _tConvert import * def dotest(t): - print '' - print 'begin dotest' - print t.testbool (True); - print t.testbool (False); - print t.testint (-1); - print t.testint (10L); - print t.testint64 (-123456789013L); - print t.testint64 (123456789014L); - print t.testssize (-2); - print t.testssize (11); - print t.testfloat (3.14); - print t.testfloat (12); - print t.teststring ("this is a string"); + print('') + print('begin dotest') + print(t.testbool (True)) + print(t.testbool (False)) + print(t.testint (-1)) + print(t.testint (10)) + print(t.testint64 (-123456789013)) + print(t.testint64 (123456789014)) + print(t.testssize (-2)) + print(t.testssize (11)) + print(t.testfloat (3.14)) + print(t.testfloat (12)) + print(t.teststring ("this is a string")) - print t.testvecint ([1,2,3,4]); - print t.testvecint ([]); - print t.testvecint ((-1,-2,-3,-4)); - print t.testvecint (-10); - print t.testveccomplex ([1+2j, -1-3j, -1.5+2.5j]); - print t.testvecstr (["a1","a2","b1","b2"]) - print t.testvecstr (()) - print t.testvecstr ("sc1") + print(t.testvecint ([1,2,3,4])) + print(t.testvecint ([])) + print(t.testvecint ((-1,-2,-3,-4))) + print(t.testvecint (-10)) + print(t.testveccomplex ([1+2j, -1-3j, -1.5+2.5j])) + print(t.testvecstr (["a1","a2","b1","b2"])) + print(t.testvecstr (())) + print(t.testvecstr ("sc1")) t = tConvert(); diff --git a/LCU/StationTest/rspctlprobe.py b/LCU/StationTest/rspctlprobe.py new file mode 100755 index 0000000000000000000000000000000000000000..4b8d23ed1deae4d76e682f19292539c1e40c2674 --- /dev/null +++ b/LCU/StationTest/rspctlprobe.py @@ -0,0 +1,857 @@ +#!/usr/bin/env python + +import logging +import re +import subprocess + +import json +import os +import argparse + +import tempfile +import shutil +import time +import socket + +import traceback + +name = __name__ if __name__ != '__main__' else 'rspctlprobe' +logger = logging.getLogger(name) + + +# --------------------------------NICE PRINTOUT +def table_maxlength_per_column(column): + """ + Computes the width in character of a column made of strings + :param column: list of values [ row1, row2 ... ] + :return: max value + """ + return reduce(max, map(len, column)) + + +def compute_table_width(data, margin=1): + """ + Compute the column width in characters + :param data: table made of a list of columns + :type data: list + :param margin: number of character to use as a margin for all the columns + :type margin: int + :return: a list of all the column sizes + """ + return map(lambda x: x + 2*margin, map(table_maxlength_per_column, data)) + + +def table_fix_string_length(string, length): + """ + Reformat each string to have the same character width + :param string: the string to reformact + :type string: str + :param length: the length of the final string + :type length: str + :return: a formatted string with the request character size + """ + return '{:^{width}}'.format(string, width=length) + + +def table_format_column(column, length): + """ + Given a column of values it formats them to have the requested character size + :param column: the column of data + :type column: list + :param length: the length you want to have for that column + :return: + """ + return map(lambda x: table_fix_string_length(x, length), column) + + +def table_transpose(table): + """ + Transpose a list of rows in a list of columns and viceversa + :param table: the table to format + :type table: a list of list of strings + :return: + """ + return list(zip(*table)) + + +def table_format(table, separator="|", margin_size=1): + """ + Format a table of values + :param table: table of values + :param separator: character used to separate the columns + :param margin_size: size of the margin in characters + :return: + """ + # compute the size needed taking into account also the margins of each column in the table + column_desired_size = compute_table_width(table, margin_size) + # format each column with the desired number of characters + formatted_columns = [table_format_column(column, size) for column, size in zip(table, column_desired_size)] + # transpose the list of columns in list of rows and concatenate the values to obtain rows using the separator + return [separator.join(row) for row in table_transpose(formatted_columns)] + + +def table_print_out_table(write_function, table): + """ + Calls the write function for each row in the new formatted table + :param write_function: the function to be called + :param table: the table to format + :return: None + """ + try: + for row in table_format(table): + write_function(row+"\n") + except Exception as e: + logger.error("Error formatting table: %s", e) + + +# ---------------------------------UTILITIES +def issue_rspctl_command(cmd): + """ + Issue the command over a shell and catches the output + :param cmd: a list of the arguments to be executed + :type cmd: list + :return: a tuple with the stdout and the sterr of the execution + :rtype: tuple + """ + cmd = ["rspctl"] + cmd + + try: + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + out, err = proc.communicate() + + if proc.returncode == 0: + return out, err + else: + raise Exception("Program failed with error: \n" + + "STDOUT: %s\n" % out + + "STDERR: %s\n" % err) + except OSError as e: + raise Exception("Error executing " + " ".join(cmd) + ":" + e.strerror) + + +def list_mode(l): + """ + Return the most frequent element in the list + :param l: input list + :return: the most frequent element + """ + return max(set(l), key=l.count) + + +# ----------------------------------COMMANDS +# -------Clock +def parse_clock_output(out, err): + """ + Parse the output of the rspctl --clock + + Output pattern: + "Sample frequency: clock=??? MHz" + :param: out stdout + :param: err stderr + :return: the int value of the clock in Mhz + :rtype: int + """ + match = re.search("\s*Sample frequency: clock=(\d{3})MHz\s*", out) + if match: + return int(match.group(1)) + else: + raise Exception("Couldn't query the clock: \n" + + "%s\n" % out + + "STDOUT: %s\n" % out + + "STDERR: %s\n" % err) + + +def query_clock(): + """ + Execute the command rspctl --clock and and parses the result + :return: the clock in Mhz + :rtype: int + """ + out, err = issue_rspctl_command(['--clock']) + return parse_clock_output(out, err) + + +class RCUBoard: + """ + This class describes the properties of a RCUBoard + """ + def __init__(self, + identifier=-1, + status=None, + mode=None, + delay=None, + attenuation=None, + sub_bands=None, + xcsub_bands=None): + + self.id = identifier + self.status = status + self.mode = mode + self.delay = delay + self.attenuation = attenuation + self.sub_bands = sub_bands + self.xcsub_bands = xcsub_bands + + def __str__(self): + return "RCU[%d] status:%s mode:%s delay:%s attenuation:%s sub_bands:%s xcsub_bands:%s" % ( + self.id, + self.status, + self.mode, + self.delay, + self.attenuation, + self.sub_bands, + self.xcsub_bands) + + def __getitem__(self, item): + return getattr(self, item) + + +# -------RCU mode +def parse_rcu_output(out, err): + """ + Parse the output of rspctl --rcu + Output pattern: + "RCU[ 0].control=0x10003000 => OFF, mode:0, delay=00, att=00 + RCU[ 1].control=0x10003000 => OFF, mode:0, delay=00, att=00 + RCU[ 2].control=0x10003000 => OFF, mode:0, delay=00, att=00 + RCU[ 3].control=0x10003000 => OFF, mode:0, delay=00, att=00" + :param: out stdout + :param: err stderr + :return: a dict indexed by the rcu board id and the properties parsed such as the status, the mode, + the delay and the attenuation + :rtype: dict + """ + rcu_values = filter(None, out.split('\n')) # It filters empty strings + rcu_by_id = {} # list of RCUs listed by ID + + for rcu_value in rcu_values: + match = re.search("RCU\[\s*(?P<RCU_id>\d+)\].control=" + # parsing id + "\d+x\w+\s=>\s*(?P<status>\w+)," + # parsing status + "\smode:(?P<mode>\-?\d)," + # parsing mode + "\sdelay=(?P<delay>\d+)," + # parsing delay + "\satt=(?P<attenuation>\d+)", rcu_value) # parsing attenuation + if match: + rcu_id = int(match.group('RCU_id')) + rcu_board = RCUBoard(identifier=rcu_id, + status=match.group('status'), + mode=match.group('mode'), + delay=match.group('delay'), + attenuation=match.group('attenuation') + ) + + rcu_by_id[rcu_id] = rcu_board + else: + raise Exception("Couldn't query the rcu: \n" + + "STDOUT: %s\n" % out + + "STDERR: %s\n" % err) + return rcu_by_id + + +def query_rcu_mode(): + """ + Execute the command rspctl --rcu and parses the result + :return: the properties per rcu board + :rtype: dict + """ + out, err = issue_rspctl_command(['--rcu']) + return parse_rcu_output(out, err) + + +# -------Subbands +def parse_subbands_output(out, err): + """ + + Parses the output of rspctl --subbands + + Output pattern: + "RCU[ 0].subbands=(0,1) x (0,243) + [ 142 144 146 148 150 152 154 156 158 160 162 164 166 168 170 172 174 176 178 180 182 184 186 188 190 192 194 196 198 200 202 204 206 208 210 212 214 216 218 220 222 224 226 228 230 232 234 236 238 240 242 244 246 248 250 252 254 256 258 260 262 264 266 268 270 272 274 276 278 280 282 284 286 288 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 + 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 ] + + RCU[ 1].subbands=(0,1) x (0,243) + [ 143 145 147 149 151 153 155 157 159 161 163 165 167 169 171 173 175 177 179 181 183 185 187 189 191 193 195 197 199 201 203 205 207 209 211 213 215 217 219 221 223 225 227 229 231 233 235 237 239 241 243 245 247 249 251 253 255 257 259 261 263 265 267 269 271 273 275 277 279 281 283 285 287 289 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 + + :param: out stdout + :param: err stderr + :return: a dict indexed by the rcuboard id and the properties parsed such as the active state, the mode, + the delay and the attenuation + :rtype: dict + """ + + rcu_values = filter(None, out.split('\n'))[1:] # FILTERS empty strings + + rcu_by_id = {} + + i_row = 0 + while i_row < len(rcu_values): + value = rcu_values[i_row] + match = re.search("RCU\[\s*(?P<RCU_id>\d+)\]" + # parsing RCU id + ".subbands=\(\d+,(?P<n_rows>\d)\)\s+x\s+\(0," + # parsing the number of rows + "(?P<n_elements>\d+)\)\s*", # parsing the number of elements + value) + if match: + rcu_id = int(match.group('RCU_id')) + n_rows = int(match.group('n_rows')) + 1 + + else: + raise Exception("Couldn't query the subband: \n" + + "%s\n" % value + + "STDOUT: %s\n" % out + + "STDERR: %s\n" % err) + + sub_band_list = [] + for i in range(n_rows): + # Parsing the string [ 143 145 ... or ... 122 123] into a list of integers + row = map(int, filter(None, rcu_values[i_row + i + 1].strip().lstrip('[').rstrip(']').split(' '))) + sub_band_list.append(row) + + i_row = i_row + n_rows + 1 # ADVANCE + + rcu_by_id[rcu_id] = sub_band_list + + return rcu_by_id + + +def query_sub_bands_mode(): + """ + Execute the command rspctl --subbands and parses the result + :return: the properties per rcu board + :rtype: dict + """ + out, err = issue_rspctl_command(['--subbands']) + return parse_subbands_output(out, err) + + +# -------XCSub bands +def parse_xcsub_bands_output(out, err): + """ + + Parses the output of rspctl --xcsubbands + + Output pattern: + "getsubbandsack.timestamp=1511262126 - Tue, 21 Nov 2017 11:02:06.000000 +0000 + RCU[ 0].xcsubbands=(0,1) x (0,3) + [ 0 0 0 0 + 0 0 0 0 ] + + RCU[ 1].xcsubbands=(0,1) x (0,3) + [ 0 0 0 0 + 0 0 0 0 ] + + RCU[ 2].xcsubbands=(0,1) x (0,3) + [ 0 0 0 0 + 0 0 0 0 ] + + RCU[ 3].xcsubbands=(0,1) x (0,3) + [ 0 0 0 0 + 0 0 0 0 ] + + RCU[ 4].xcsubbands=(0,1) x (0,3) + [ 0 0 0 0 + 0 0 0 0 ] + + RCU[ 5].xcsubbands=(0,1) x (0,3) + [ 0 0 0 0 + 0 0 0 0 ] + + :param: out stdout + :param: err stderr + :return: a dict indexed by the rcu board id containing the list of xcsub bands used + :rtype: dict + """ + + rcu_values = filter(None, out.split('\n'))[1:] # it filters empty strings + + rcu_by_id = {} + + i_row = 0 + while i_row < len(rcu_values): + value = rcu_values[i_row] + match = re.search("RCU\[\s*(?P<RCU_id>\d+)\]." + + "xcsubbands=\(\d+,(?P<n_rows>\d)\)\s+x\s+\(0,(?P<n_elements>\d+)\)\s*", value) + if match: + rcu_id = int(match.group('RCU_id')) + n_rows = int(match.group('n_rows'))+1 + else: + raise Exception("Couldn't query the subband: \n" + + "%s\n" % value + + "STDOUT: %s\n" % out + + "STDERR: %s\n" % err) + + xcsub_bands_list = [] + for i in range(n_rows): + # Parsing the string [ 143 145 ... or ... 122 123] into a list of integers + row = map(int, filter(None, rcu_values[i_row + i + 1].strip().lstrip('[').rstrip(']').split(' '))) + xcsub_bands_list.append(row) + + i_row = i_row + n_rows + 1 # ADVANCE + # concatenates the two rows -> computes the max xcsub_band and returns the value + # [NOTE max accepts only a couple of values] + val = reduce(lambda x, a: max(x, a), reduce(lambda x, a: x+a, xcsub_bands_list)) + # The xcsub band index is expressed as the double of the actual sub band: + # even for the X polarization + # odd for the Y polarization + val = (val-1)/2 if rcu_id % 2 else val/2 + + rcu_by_id[rcu_id] = val + return rcu_by_id + + +def query_xcsub_bands_mode(): + """ + Execute the command rspctl --subbands and parses the result + :return: the properties per rcu board + :rtype: dict + """ + out, err = issue_rspctl_command(['--xcsubband']) + return parse_xcsub_bands_output(out, err) + + +# -------Spectral inversion +def parse_spinv_output(out, err): + """ + Parses the output of rspctl --spinv + + Output pattern: + "getSIack.timestamp=1507887895 - Fri, 13 Oct 2017 09:44:55.000000 +0000 + + Board[00]: . . . . . . . . + Board[01]: . . . . . . . . + Board[02]: . . . . . . . . + Board[03]: . . . . . . . . + Board[04]: . . . . . . . . + Board[05]: . . . . . . . . + Board[06]: . . . . . . . . + Board[07]: . . . . . . . . + Board[08]: . . . . . . . . + Board[09]: . . . . . . . . + Board[10]: . . . . . . . . + Board[11]: . . . . . . . . + Board[12]: . . . . . . . . + Board[13]: . . . . . . . . + Board[14]: . . . . . . . . + Board[15]: . . . . . . . . + Board[16]: . . . . . . . . + Board[17]: . . . . . . . . + Board[18]: . . . . . . . . + Board[19]: . . . . . . . . + Board[20]: . . . . . . . . + Board[21]: . . . . . . . . + Board[22]: . . . . . . . . + Board[23]: . . . . . . . . + + + :param: out stdout + :param: err stderr + :return: a dict indexed by the rcuboard id and the properties parsed such as the active state, the mode, + the delay and the attenuation + :rtype: dict + """ + + board_values = filter(None, out.split('\n'))[1:] # FILTERS empty strings + rcu_by_id = {} + for board_value in board_values: + temp = board_value.split(":") + match = re.search("Board\[(\w+)\]", temp[0]) + + if match: + board_id = int(match.group(1)) + else: + raise Exception("Couldn't query the spinv: \n" + + "%s\n" % board_value + + "STDOUT: %s\n" % out + + "STDERR: %s\n" % err) + + match = re.findall("(\d+|\.)", temp[1]) + + spinv_values = map(lambda x: x if x != '.' else '', + match) + + # this is a delicate point since some antenna might have not changed the spec inv setting + # is not straightforward to define whether or not the spec inv is on + rcu_by_id[board_id] = {"spinv": spinv_values, "ispinv": '' not in spinv_values} + + return rcu_by_id + + +def query_spinv_mode(): + """ + Execute the command rspctl --spinv and parses the result + :return: the spectral inversion status + :rtype: dict + """ + out, err = issue_rspctl_command(['--specinv']) + return parse_spinv_output(out, err) + + +def execute_xcstatistics_mode(parameters): + """ + Execute the command rspclt --xcstatistics from a dict of parameters + :param parameters: The properties for the xcstatistics command + :type parameters: dict + :return: + :rtype: + """ + logger.info("Executing xcstatistics with these parameters %s", parameters) + cmd_list = [] + + if 'xcangle' in parameters: + cmd_list.append('--xcangle') + + cmd_list.append('--xcstatistics') + + if 'duration' in parameters: + cmd_list.append('--duration=%d' % parameters['duration']) + if 'integration' in parameters: + cmd_list.append('--integration=%d' % parameters['integration']) + if 'directory' in parameters: + cmd_list.append('--directory=%s' % parameters['directory']) + if 'select'in parameters: + cmd_list.append('--select=%s' % parameters['select']) + + issue_rspctl_command(cmd_list) + + +# ----------------------------------Merging information + +def query_status(): + """ + Query the status of the station in particular collect its statistics executing + + rspctl --clock to collect the clock + rspctl --subbands to see the sub band involved + rspctl --rcu to collect status mode delay and attenuation + rspctl --spinv to collect the status of the spectral inversion + """ + try: + sub_bands = query_sub_bands_mode() + except Exception as e: + logger.error("error querying sub band: %s", e) + raise Exception('Error querying sub band') + + try: + xcsub_bands = query_xcsub_bands_mode() + except Exception as e: + logger.error("error querying xcsub bands: %s", e.message) + raise Exception('Error querying xcsub band') + + try: + rcu = query_rcu_mode() + except Exception as e: + logger.error("error querying rcu status: %s", e.message) + raise Exception('Error querying rcu') + + try: + clock = query_clock() + except Exception as e: + logger.error("error querying clock: %s", e.message) + raise Exception('Error querying clock') + + try: + boards_spinv = query_spinv_mode() + except Exception as e: + logger.error("error querying spectral inversion: %s", e.message) + raise Exception('Error querying spectral inversion') + + for k in rcu.keys(): + rcu_i = rcu[k] + rcu_i.sub_bands = sub_bands[k] + rcu_i.xcsub_bands = xcsub_bands[k] + + res = {"rcus": rcu, "clock": clock, "boards-spinv": boards_spinv} + + rcus_mode = [rcu[i]["mode"] for i in rcu] + rcus_xcsub_band = [rcu[i]["xcsub_bands"] for i in rcu] + + res["mode"] = list_mode(rcus_mode) + res["xcsub_band"] = list_mode(rcus_xcsub_band) + + return res + + +def dump_info_file(path, res): + """ + Dump the information collected in json format into the directory specified in path + :param path: where to store the information file + :type path: str + :param res: result of the query + :type res: dict + """ + + file_path = os.path.join(path, "infos") + with open(file_path, 'w') as fout: + fout.write(json.dumps(res, indent=4, separators=(',', ': '))) + + +def query_xcstatistics(options): + """ + Perform the query of the status information and the xcstatistics with the given options + and afterwards dumps the information into the directory specified in the options + with the timestamp + + + :param options: options that involve the rspctl --xcstatistics + :type options: dict + """ + final_directory = os.path.join(options['directory']) + if not os.path.exists(final_directory): + os.makedirs(final_directory) + + res = query_status() + + subband = res["xcsub_band"] + mode = res["mode"] + + filename = "_mode_%s_xst_sb%0.3d.dat" % (mode, subband) + + temporary_output_directory = tempfile.mkdtemp(prefix="rspctlprobe_tmp") + + options['directory'] = temporary_output_directory + integration = options['integration'] + + duration = options['duration'] + + logger.info("query xcstatistics and storing them into directory %s", options['directory']) + + execute_xcstatistics_mode(options) + + # List all the file in the temporary directory + file_list = [f for f in os.listdir(temporary_output_directory) + if os.path.isfile(os.path.join(temporary_output_directory, f))][0] + timestamp = file_list.rstrip("_xst.dat") + + res["timestamp"] = timestamp + filename = timestamp + filename + + shutil.copy(os.path.join(temporary_output_directory, file_list), os.path.join(final_directory, filename)) + shutil.rmtree(temporary_output_directory) + + rcus = res["rcus"] + header = ["RCUID", "delay", "attenuation", "mode", "status", "xcsub_bands"] + ids = [[header[0]] + map(str, rcus.keys())] # Create the id column of the file + table = [[key] + [str(rcus[i][key]) for i in rcus] for key in header[1:]] + table = ids + table + + fileout = os.path.join(final_directory, "summary.info") + + with open(fileout, "a") as out: + out.write("\n") + out.write("timestamp = {} , mode = {} , xcsubband = {}, integration = {}, duration = {}\n".format( + res["timestamp"], + res["mode"], + res["xcsub_band"], + integration, + duration)) + table_print_out_table(out.write, table) + + return res + + +def query_most_common_mode(): + """ + Return the most frequent mode that the RCUs have + :return: the mode + """ + rcus_mode = query_rcu_mode() + rcus_mode = [rcus_mode[rcu] for rcu in rcus_mode] + return int(list_mode(map(lambda x: x['mode'], rcus_mode))) + + +def set_mode(mode): + """ + Set the mode on all the rsp boards + + :param mode: the mode to be set + :type mode: int + """ + + if mode == query_most_common_mode(): + return True + + logger.info('switching rcu mode to %d', mode) + issue_rspctl_command(["--mode={}".format(mode)]) + logger.info('mode change command issued') + + for i in range(10): + time.sleep(3) + outmode = query_most_common_mode() + logger.info('current rsp mode is {}'.format(outmode)) + if mode == outmode: + logger.info('mode changed correctly to {}'.format(outmode)) + return True + raise Exception('Cannot change rsp mode') + + +def set_xcsubband(subband): + """ + Set the crosslet subband from which collecting the statistics on all the rsp boards + + :param subband: the list of subband + :type subband: string + """ + logger.info('switching rcu xcsubband to %d', subband) + issue_rspctl_command(["--xcsubband={}".format(subband)]) + logger.debug('xcsubband change command issued') + for i in range(10): + time.sleep(1) + xcsub_bands = query_xcsub_bands_mode().values() + out_xcsubband = list_mode(xcsub_bands) + if subband == out_xcsubband: + logger.info('xcsubband changed correctly to %d', out_xcsubband) + return True + raise Exception('Cannot change rsp xcsubband to {}'.format(subband)) + + +def produce_xcstatistics(integration_time=1, duration=1, add_options=None, output_directory="./"): + """ + Execute the command to compute the xcstatistics with a given integration and duration. + It is also possible to specify an output directory and additional options. + :param integration_time: integration time + :param duration: duration time + :param add_options: additional options as a dict{} + :param output_directory: + :return: + """ + if not add_options: + add_options = {} + + add_options["integration"] = integration_time + add_options["duration"] = duration + add_options["directory"] = output_directory + + res = query_xcstatistics(add_options) + return res + + +def batch_produce_xcstatistics(integration_time, + duration, + wait_time=None, + xcsub_bands=None, + mode=None, + add_options=None, + output_directory="./"): + """ + Produces the xcstatistics for a list of integration_times durations and wait_times on the given set of xcsubband + storing everything in the output directory. + :param integration_time: list of integration times + :param duration: list of duration of the single + :param wait_time: list of wait times + :param xcsub_bands: list of sub band where to compute the crosslet statistics + :param mode: mode of the array + :param add_options: additional options to pass to rspctl + :param output_directory: the output directory + :return: None + """ + + if not wait_time: + wait_time = [0] + + if not add_options: + add_options = {} + + if mode != -2: + set_mode(mode) + + for ind, (i, d, w) in enumerate(zip(integration_time, duration, wait_time)): + if not xcsub_bands: + produce_xcstatistics(i, d, add_options, output_directory) + else: + for xcsub_band in xcsub_bands: + set_xcsubband(xcsub_band) + produce_xcstatistics(i, d, add_options, output_directory) + + time.sleep(w) + + +# ----------------------------------MAIN CODE LOGIC +def setup_logging(): + """ + Setup the logging system + """ + logging.basicConfig( + format='%(asctime)s - %(name)s: %(message)s', + datefmt="%m/%d/%Y %I:%M:%S %p", + level=logging.DEBUG) + + +def init(): + """ + Init phase of the program + """ + setup_logging() + + +def setup_command_argument_parser(): + parser = argparse.ArgumentParser( + description="es: python /opt/stationtest/rspctlprobe.py --mode 3 --xcsubband 150:250:50 --xcstatistics --integration 1 --duration 5 --wait 10 --loops 2 --directory /localhome/data") + + parser.add_argument('--xcstatistics', action='store_true') + parser.add_argument('--integration', type=int, default=[1], nargs='+') + parser.add_argument('--duration', type=int, default=[1], nargs='+') + parser.add_argument('--xcangle', default='False') + parser.add_argument('--directory', default=os.getcwd()) + parser.add_argument('--wait', type=int, default=[0], nargs='+') + parser.add_argument('--xcsubband', type=str, default="") + parser.add_argument('--loops', type=int, default=1) + parser.add_argument('--mode', type=int, default=-2) + return parser + + +def parse_and_execute_command_arguments(): + """ + Parses the command line arguments and execute the procedure linked + :return: + :rtype: + """ + parser = setup_command_argument_parser() + program_arguments = parser.parse_args() + + if program_arguments.xcstatistics: + options = {} + if program_arguments.xcangle: + options['xcangle'] = True + + try: + if program_arguments.xcsubband: + if ":" in program_arguments.xcsubband: + start, end, step = map(int, program_arguments.xcsubband.split(":")) + xcsub_bands = [i for i in range(start, end+step, step)] + if "," in program_arguments.xcsubband: + xcsub_bands = [int(i) for i in program_arguments.xcsubband.split(",")] + else: + xcsub_bands = [int(program_arguments.xcsubband)] + + for i in range(program_arguments.loops): + batch_produce_xcstatistics(program_arguments.integration, + program_arguments.duration, + wait_time=program_arguments.wait, + xcsub_bands=xcsub_bands, + mode=program_arguments.mode, + add_options=options, + output_directory=program_arguments.directory) + + else: + for i in range(program_arguments.loops): + batch_produce_xcstatistics(program_arguments.integration, + program_arguments.duration, + wait_time=program_arguments.wait, + mode=program_arguments.mode, + add_options=options, + output_directory=program_arguments.directory) + except Exception as e: + logger.error('error executing rspctl : %s', e) + logger.error('traceback \n%s', traceback.format_exc()) + raise e + else: + parser.error('please specify a task') + + +def main(): + init() + logging.basicConfig(format='%(asctime)s ' + socket.gethostname() + ' %(levelname)s %(message)s', + level=logging.INFO) + parse_and_execute_command_arguments() + + +if __name__ == '__main__': + main() diff --git a/LCU/checkhardware/CMakeLists.txt b/LCU/checkhardware/CMakeLists.txt index a94754b9dcd79d8ac79b0748e765220cc2d76098..2bddc75a90d43b66a6d55ce8d89da0d463cf4aaf 100644 --- a/LCU/checkhardware/CMakeLists.txt +++ b/LCU/checkhardware/CMakeLists.txt @@ -1,6 +1,15 @@ # $Id$ -lofar_package(checkhardware 1.0) +lofar_package(checkhardware 1.0 DEPENDS PyCommon) +include(PythonInstall) + +# install for testing in cmake +set(_py_files + check_hardware.py +) + +python_install(${_py_files} DESTINATION lofar/lcu/checkhardware) + # Install files matching regex pattern in current directory and below install(DIRECTORY . @@ -14,3 +23,9 @@ install(DIRECTORY config/ USE_SOURCE_PERMISSIONS FILES_MATCHING REGEX "(\\.conf)$" PATTERN ".svn" EXCLUDE) + + +add_subdirectory(test) +add_subdirectory(checkhardware_lib) + + diff --git a/LCU/checkhardware/check_hardware.py b/LCU/checkhardware/check_hardware.py index 9dc57811d14e6ab9b14886dde096c0d544f5bf00..06d18f2dc8e0897e5f088fd29a003909414f71f6 100755 --- a/LCU/checkhardware/check_hardware.py +++ b/LCU/checkhardware/check_hardware.py @@ -51,6 +51,13 @@ from time import sleep import datetime from socket import gethostname import logging +from signal import SIGABRT, SIGINT, SIGTERM, signal +import atexit +from subprocess import Popen, check_call, CalledProcessError, STDOUT, check_output +from functools import partial + +# FIXME: There is _way_ too much going on here outside a function, including things that might fail (like path checks) +# FIXME: emoving hard dependencies on station environment os.umask(001) @@ -283,6 +290,148 @@ def wait_for_start(start_datetime): return +def stop_test_signal(cmd): + logger.info("Stopping test signal.") + + # try to execute command to stop test signal + try: + check_call(cmd, shell=True) + except CalledProcessError as ex: + logger.error(("Could not stop the test signal! Non-zero return code from start_cmd (%s)." % cmd), ex) + raise + +def stop_test_signal_and_exit(cmd, *optargs): + """ + Signal handler that exits with the return code of a passed POSIX signal after executing the provided command. + + :param cmd: The command to stop the test signal + :param optargs: The intercepted POSIX signal + """ + stop_test_signal(cmd) + exit_without_triggering_handler(cmd, *optargs) + + +def exit_without_triggering_handler(cmd, *optargs): + """ + :param cmd: The command to stop the test signal + :param optargs: The intercepted POSIX signal + """ + + # try to get correct return code + logger.info('Now exiting.') + try: + ret_code = int(optargs[0]) # use POSIX signal code + os._exit(ret_code) # sys.exit() won't work here, we don't want to trigger our handler again + # (hm, we could actually call sys.exit and just trigger the atexit handler, but this is more explicit and keeps + # things operational independently.) + except: + os._exit(1) + + +def register_exit_handler(cmd): + """ + execute stop_cmd when script exits normally or with Exception + :param cmd: the command to execute + """ + # execute stop_cmd when script exits normally + atexit.register(stop_test_signal, cmd) + + +def register_signal_handlers(cmd): + """ + execute stop_cmd when script is terminated externally + :param cmd: the command to execute + """ + # execute stop_cmd when script exits normally + atexit.register(stop_test_signal, cmd) + + # handle POSIX signals + for sig in (SIGABRT, SIGINT, SIGTERM): + signal(sig, partial(stop_test_signal_and_exit, cmd)) + + +def start_watchdog_daemon(pid, cmd): + """ + Start a daemon that sits and waits for this script to terminate and then execute the provided command. + We cannot handle SIGKILL / kill -9 from inside the script, so we have to handle that case this way. This may be + a bit --wait for it-- overkill (hah!) and I don't see why this would be needed under normal circumstances, but + nonetheless, since this was requested on the ticket, here we go. + :param cmd: command as shell-executable string + """ + daemon_cmd = 'while ps -p %s > /dev/null; do sleep 1; done; %s' % (pid, cmd) + Popen(daemon_cmd, stdout=open('/dev/null', 'w'), stderr=STDOUT, shell=True, preexec_fn=os.setpgrp) + + +def safely_start_test_signal(start_cmd, stop_cmd): + """ + This will start start_cmd and set things up in a way that stop_cmd is executed in case the check_hardware script + either exits regularly or gets killed for some reason by a POSIX signal. stop_cmd might be executed repeatedly + under circumstances. + :param start_cmd: the command to start as shell-executable string + :param stop_cmd: the command to stop on exit as shell-executable string + """ + + # set things up sp signal is stopped when check_hardware terminates + register_signal_handlers(stop_cmd) + register_exit_handler(stop_cmd) + start_watchdog_daemon(os.getpid(), stop_cmd) # this alone would actually be sufficient + + # start signal + try: + check_call(start_cmd, shell=True) + except CalledProcessError as ex: + logger.error("Could not start the test signal! Non-zero return code from start_cmd (%s)." % start_cmd, ex) + raise + + +def safely_start_test_signal_from_ParameterSet(settings): + ''' + :param settings: A settings.ParameterSet (e.g. obtained through TestSettings.group) + ''' + try: + start_cmd = settings.parset['testsignal']['start-cmd'] + stop_cmd = settings.parset['testsignal']['stop-cmd'] + logger.info('Test signal start/stop settings found. (%s // %s)' % (start_cmd, stop_cmd)) + + # start signal: + safely_start_test_signal(start_cmd, stop_cmd) + + try: + status_cmd = settings.parset['testsignal']['status-cmd'] + ok_status = settings.parset['testsignal']['ok-status'] + logger.info('Test signal status settings found. (%s // %s)' % (status_cmd, ok_status)) + + # wait for signal status to be ok: + wait_for_test_signal_status(status_cmd, ok_status) + + except KeyError: + logger.info('No test signal status settings found.') + + except KeyError: + logger.info('No test signal settings found.') + + +def wait_for_test_signal_status(status_cmd, status, retry_limit=30): + """ + :param status_cmd: command to get test signal status + :param status: the command output to wait for + :param retry_limit: raise RunTimeError after this many status_cmd that did not return status + """ + logger.info("Waiting for '%s' to return '%s'" % (status_cmd, status)) + out = None + for _ in range(retry_limit): + out = check_output(status_cmd, shell=True) + out = out.strip() + if out == status: + logger.info("Status ok.") + return status + else: + logger.info('Wrong status: %s != %s. Try again...'% (out, status)) + time.sleep(1) + + raise RuntimeError("Timed out. Last response was '%s'" % out) + + def main(): global station_name get_arguments() @@ -501,6 +650,7 @@ def main(): lbh.check_rf_power(mode=mode, parset=settings) for mode in (5, 6, 7): + # do all rcumode 5, 6, 7 tests hba = HBA(db) tile_settings = conf.group('rcumode.%d.tile' % mode) @@ -528,6 +678,7 @@ def main(): # if 'RCU%d' % mode in args or 'S%d' % mode in args: if 'S%d' % mode in args: + safely_start_test_signal_from_ParameterSet(tile_settings) hba.check_rf_power(mode=mode, parset=tile_settings) runtime = (time.time() - runstart) @@ -538,12 +689,12 @@ def main(): recordtime = 4 else: recordtime = int(args.get('E%d' % mode)) - + safely_start_test_signal_from_ParameterSet(elem_settings) hba.check_elements(mode=mode, record_time=recordtime, parset=elem_settings) # stop test if driver stopped db.rsp_driver_down = not check_active_rspdriver() - if db.rsp_driver_down and (restarts > 0): + if db.rsp_driver_down and (restarts > 0): # FIXME 'restarts' is undefined at this point?! restarts -= 1 reset_48_volt() time.sleep(30.0) @@ -634,6 +785,7 @@ def main(): return 0 - if __name__ == '__main__': sys.exit(main()) + + diff --git a/LCU/checkhardware/checkhardware_lib/CMakeLists.txt b/LCU/checkhardware/checkhardware_lib/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..e5ca32db72ca295e7ab1b50ca18625a046363936 --- /dev/null +++ b/LCU/checkhardware/checkhardware_lib/CMakeLists.txt @@ -0,0 +1,21 @@ +# $Id: CMakeLists.txt $ + +set(_py_files + __init__.py + data.py + db.py + general.py + hardware_tests.py + hba.py + lba.py + lofar.py + reporting.py + rsp.py + settings.py + spu.py + tbb.py +) + +python_install(${_py_files} DESTINATION lofar/lcu/checkhardware/checkhardware_lib) + +add_subdirectory(spectrum_checks) \ No newline at end of file diff --git a/LCU/checkhardware/checkhardware_lib/__init__.py b/LCU/checkhardware/checkhardware_lib/__init__.py index 837db9972b7db5382bdac83aedf7ae111f0a03dc..2a187de83473ab54a4bdf06f0d8c95801b081094 100644 --- a/LCU/checkhardware/checkhardware_lib/__init__.py +++ b/LCU/checkhardware/checkhardware_lib/__init__.py @@ -3,7 +3,7 @@ from general import * from lofar import * -from settings import TestSettings +from settings import TestSettings, ParameterSet from db import DB, db_version from reporting import make_report from spu import SPU diff --git a/LCU/checkhardware/checkhardware_lib/spectrum_checks/CMakeLists.txt b/LCU/checkhardware/checkhardware_lib/spectrum_checks/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..291219377eeb9b62b637452e6851cdff46e7942a --- /dev/null +++ b/LCU/checkhardware/checkhardware_lib/spectrum_checks/CMakeLists.txt @@ -0,0 +1,19 @@ +# $Id: CMakeLists.txt $ + +set(_py_files + __init__.py + cable_reflection.py + down.py + down_old.py + flat.py + noise.py + oscillation.py + peakslib.py + rf_power.py + short.py + spurious.py + summator_noise.py + tools.py +) + +python_install(${_py_files} DESTINATION lofar/lcu/checkhardware/checkhardware_lib/spectrum_checks) diff --git a/LCU/checkhardware/config/FR606-check_hardware.conf b/LCU/checkhardware/config/FR606-check_hardware.conf index f72e209b6823125c736aa0d727c6712495c40913..045a901b766ecbdbec7488484f28d86e559aa2fb 100755 --- a/LCU/checkhardware/config/FR606-check_hardware.conf +++ b/LCU/checkhardware/config/FR606-check_hardware.conf @@ -29,8 +29,8 @@ station= FR606C always= RV, TV, RBC, TBC list.0= list.1= SPU,TM,RCU3,RCU5 -list.2= SPU,TM,RCU3,M5,SN5,O5,N5,SP5,S7,E7 -list.3= S3 +list.2= SPU,TM,RCU3,M5,SN5,O5,N5,SP5,S5,E5 +list.3= S5,E5 [spu] temperature.min= 10.0 @@ -131,10 +131,10 @@ short.mean-pwr.min= 55.0 short.mean-pwr.max= 61.0 flat.mean-pwr.min= 61.0 flat.mean-pwr.max= 64.5 -rf.subbands= 105 +rf.subbands= 256 rf.min-sb-pwr= 65.0 -rf.negative-deviation= -24.0 -rf.positive-deviation= 12.0 +rf.negative-deviation= -28.0 +rf.positive-deviation= 16.0 noise.negative-deviation= -3.0 noise.positive-deviation= 1.5 noise.max-difference= 1.5 @@ -147,12 +147,16 @@ oscillation.min-peak-pwr= 6.0 oscillation.passband= 1:511 spurious.min-peak-pwr= 3.0 spurious.passband= 1:511 +testsignal.start-cmd= echo set_config 150.0 -10 | nc ncu 8093 +testsignal.stop-cmd= echo bye | nc ncu 8093 +testsignal.status-cmd= echo get_config | nc ncu 8093 | grep Frequency +testsignal.ok-status= Frequency: 150 MHz Power level: -10 dBm RF: ON [rcumode.5.element] -rf.subbands= 105 +rf.subbands= 256 rf.min-sb-pwr= 65.0 -rf.negative-deviation= -24.0 -rf.positive-deviation= 12.0 +rf.negative-deviation= -30.0 +rf.positive-deviation= 16.0 noise.negative-deviation= -3.0 noise.positive-deviation= 1.5 noise.max-difference= 1.5 @@ -161,6 +165,10 @@ oscillation.min-peak-pwr= 6.0 oscillation.passband= 1:511 spurious.min-peak-pwr= 3.0 spurious.passband= 1:511 +testsignal.start-cmd= echo set_config 150.0 0 | nc ncu 8093 +testsignal.stop-cmd= echo bye | nc ncu 8093 +testsignal.status-cmd= echo get_config | nc ncu 8093 | grep Frequency +testsignal.ok-status= Frequency: 150 MHz Power level: 0 dBm RF: ON [rcumode.6.tile] short.mean-pwr.min= 55.0 diff --git a/LCU/checkhardware/test/CMakeLists.txt b/LCU/checkhardware/test/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..acc3d69163f87e951e5c5ec2da0e9e7405a45bcb --- /dev/null +++ b/LCU/checkhardware/test/CMakeLists.txt @@ -0,0 +1,5 @@ +# $Id: CMakeLists.txt 33404 2016-01-27 10:21:14Z jkuensem $ + +include(LofarCTest) + +lofar_add_test(t_check_hardware) diff --git a/LCU/checkhardware/test/t_check_hardware.py b/LCU/checkhardware/test/t_check_hardware.py new file mode 100644 index 0000000000000000000000000000000000000000..54e5d8c78d1d8b0d5824826cb44200b0f929e1bf --- /dev/null +++ b/LCU/checkhardware/test/t_check_hardware.py @@ -0,0 +1,429 @@ +import unittest +import os +from mock import MagicMock, patch, call +import sys +import logging +import subprocess +import signal +import atexit +import time + +logger = logging.getLogger(__name__) + +# mock out check for existing log directory on script import as module +os.access = MagicMock(return_value=True) + +# mock out modules with relative imports (that only work with the namespace when executed as a script) +# FIXME: make sure that absolute imports are ok and don't break things in production. +# FIXME: ...Then fix the implementation and remove this mock so we can test those modules. +with patch.dict('sys.modules', **{ + 'checkhardware_lib': MagicMock(), + 'checkhardware_lib.data': MagicMock(), + 'checkhardware_lib.rsp': MagicMock(), + 'cable_reflection': MagicMock(), + 'logging': MagicMock(), +}): + # import these here so we can before mock out checks on station environment + import lofar.lcu.checkhardware.check_hardware as check_hardware + from lofar.lcu.checkhardware.checkhardware_lib import TestSettings, ParameterSet + check_hardware.logger = logger # override logger to handle logging output here + + +class TestCheckHardware(unittest.TestCase): + + def setUp(self): + logger.info(">>>---> %s <---<<<" % self._testMethodName) + + # mock exit call to not actually exit the test + os._exit = MagicMock() + # we don't want to actually call anything + check_hardware.check_call = MagicMock() + + self.elem_settings_no_testsignal = ParameterSet() + self.elem_settings_no_testsignal.parset = {'spurious': {'min-peak-pwr': '3.0', + 'passband': '1:511'}, + 'rf': {'negative-deviation': '-24.0', + 'subbands': '105', + 'min-sb-pwr': '65.0', + 'positive-deviation': '12.0'}, + 'noise': {'negative-deviation': '-3.0', + 'max-difference': '1.5', + 'positive-deviation': '1.5', + 'passband': '1:511'}, + 'oscillation': {'min-peak-pwr': '6.0', + 'passband': '1:511'}} + + self.elem_settings_testsignal = ParameterSet() + self.elem_settings_testsignal.parset = {'spurious': {'min-peak-pwr': '3.0', + 'passband': '1:511'}, + 'rf': {'negative-deviation': '-24.0', + 'subbands': '105', + 'min-sb-pwr': '65.0', + 'positive-deviation': '12.0'}, + 'noise': {'negative-deviation': '-3.0', + 'max-difference': '1.5', + 'positive-deviation': '1.5', + 'passband': '1:511'}, + 'testsignal': {'start-cmd': 'echo set config 56.0 -10 | nc ncu 8093', + 'stop-cmd': 'echo bye | nc ncu 8093'}, + 'oscillation': {'min-peak-pwr': '6.0', + 'passband': '1:511'}} + + self.elem_settings_testsignal_with_status = ParameterSet() + self.elem_settings_testsignal_with_status.parset = {'spurious': {'min-peak-pwr': '3.0', + 'passband': '1:511'}, + 'rf': {'negative-deviation': '-24.0', + 'subbands': '105', + 'min-sb-pwr': '65.0', + 'positive-deviation': '12.0'}, + 'noise': {'negative-deviation': '-3.0', + 'max-difference': '1.5', + 'positive-deviation': '1.5', + 'passband': '1:511'}, + 'testsignal': {'start-cmd': 'echo set config 56.0 -10 | nc ncu 8093', + 'stop-cmd': 'echo bye | nc ncu 8093', + 'status-cmd': "echo 'Frequency: 56 MHz Power level: -10 dBm RF: ON'", + 'ok-status': "Frequency: 56 MHz Power level: -10 dBm RF: ON"}, + 'oscillation': {'min-peak-pwr': '6.0', + 'passband': '1:511'}} + + + def test_safely_start_test_signal(self): + """ Verify that the provided command is executed and handlers are registered correctly""" + + # test value + start_cmd = 'echo "Start the signal!"' + stop_cmd = 'echo "Stop the signal!"' + + # setup test + with patch.object(check_hardware, 'register_exit_handler'), \ + patch.object(check_hardware, 'register_signal_handlers'), \ + patch.object(check_hardware, 'start_watchdog_daemon'): + + # trigger action + check_hardware.safely_start_test_signal(start_cmd, stop_cmd) + + # assert correct behavior + check_hardware.check_call.assert_called_with(start_cmd, shell=True) + check_hardware.register_exit_handler.assert_called_with(stop_cmd) + check_hardware.register_signal_handlers.assert_called_with(stop_cmd) + check_hardware.start_watchdog_daemon.assert_called_with(os.getpid(), stop_cmd) + + def test_safely_start_test_signal_logs_and_reraises_CalledProcessError(self): + """ Verify that the provided command is executed and handlers are registered correctly""" + + # test value + start_cmd = 'echo "Start the signal!"' + stop_cmd = 'echo "Stop the signal!"' + + # setup test + with patch.object(check_hardware, 'register_exit_handler'), \ + patch.object(check_hardware, 'register_signal_handlers'), \ + patch.object(check_hardware, 'start_watchdog_daemon'), \ + patch.object(check_hardware, 'check_call', MagicMock(side_effect=subprocess.CalledProcessError('', ''))), \ + patch.object(check_hardware.logger, 'error'): + + with self.assertRaises(subprocess.CalledProcessError): + # trigger action + check_hardware.safely_start_test_signal(start_cmd, stop_cmd) + + # assert correct behavior + check_hardware.logger.error.assert_called() + + def test_safely_start_test_signal_from_ParameterSet_turns_signal_and_waits_for_status_correctly(self): + """ Verify that the commands from ParameterSet are passed on to safely_start_test_signal and wait_for_test_signal_status""" + + # test value + start_cmd = 'echo set config 56.0 -10 | nc ncu 8093' + stop_cmd = 'echo bye | nc ncu 8093' + expected_status_cmd = "echo 'Frequency: 56 MHz Power level: -10 dBm RF: ON'" + expected_ok_status = "Frequency: 56 MHz Power level: -10 dBm RF: ON" + + # setup test + with patch.object(check_hardware, 'safely_start_test_signal'), \ + patch.object(check_hardware, 'wait_for_test_signal_status'): + + # trigger action + check_hardware.safely_start_test_signal_from_ParameterSet(self.elem_settings_testsignal_with_status) + + # assert correct behavior + check_hardware.safely_start_test_signal.assert_called_with(start_cmd, stop_cmd) + check_hardware.wait_for_test_signal_status.assert_called_with(expected_status_cmd, expected_ok_status) + + def test_safely_start_test_signal_from_ParameterSet_does_nothing_when_no_stationsignal_keys_in_ParameterSet(self): + """ Verify that the commands from ParameterSet are passed on to safely_start_test_signal and wait_for_test_signal_status is not called""" + + # setup test + with patch.object(check_hardware, 'safely_start_test_signal'), \ + patch.object(check_hardware, 'wait_for_test_signal_status'): + + # trigger action + check_hardware.safely_start_test_signal_from_ParameterSet(self.elem_settings_no_testsignal) + + # assert correct behavior + check_hardware.safely_start_test_signal.assert_not_called() + check_hardware.wait_for_test_signal_status.assert_not_called() + + def test_safely_start_test_signal_from_ParameterSet_only_starts_signal_when_no_status_keys_in_ParameterSet(self): + """ Verify that safely_start_test_signal and wait_for_test_signal_status are not called""" + + # test value + start_cmd = 'echo set config 56.0 -10 | nc ncu 8093' + stop_cmd = 'echo bye | nc ncu 8093' + + # setup test + with patch.object(check_hardware, 'safely_start_test_signal'), \ + patch.object(check_hardware, 'wait_for_test_signal_status'): + # trigger action + check_hardware.safely_start_test_signal_from_ParameterSet(self.elem_settings_testsignal) + + # assert correct behavior + check_hardware.safely_start_test_signal.assert_called_with(start_cmd, stop_cmd) + check_hardware.wait_for_test_signal_status.assert_not_called() + + def test_stop_test_signal(self): + """ Verify that the provided command is executed """ + + # test value + cmd = 'echo "Stop the signal! 1"' + + # trigger action + check_hardware.stop_test_signal(cmd) + + # assert correct behavior + os._exit.assert_not_called() + check_hardware.check_call.assert_called_with(cmd, shell=True) # command is executed + + def test_stop_test_signal_and_exit_defaults_to_code_1(self): + """ Verify that the provided command is executed and os._exit is called with correct return code """ + + # test value + cmd = 'echo "Stop the signal! 2"' + + # trigger action + check_hardware.stop_test_signal_and_exit(cmd) + + # assert correct behavior + os._exit.assert_called_with(1) # exit code correct + check_hardware.check_call.assert_called_with(cmd, shell=True) # command is executed + + def test_stop_test_signal_and_exit_handles_signal_correctly(self): + """ Verify that the provided command is executed and os._exit is called with correct return code """ + + # test value + cmd = 'echo "Stop the signal! 2"' + signal_code = 42 + + # trigger action + check_hardware.stop_test_signal_and_exit(cmd, signal_code, KeyboardInterrupt()) + + # assert correct behavior + os._exit.assert_called_with(signal_code) # exit code correct + check_hardware.check_call.assert_called_with(cmd, shell=True) # command is executed + + def test_wait_for_test_signal_status_waits_for_correct_status(self): + """ Verify that the provided command is executed and os._exit is called with correct return code """ + + # test value + status_cmd = 'mockme' + responses = ['ne', 'ja\n', 'ne'] + waitfor = 'ja' + + with patch.object(check_hardware, 'check_output', MagicMock(side_effect=responses)),\ + patch('time.sleep'): + + # trigger action + check_hardware.wait_for_test_signal_status(status_cmd, waitfor) + + # assert correct behavior + check_hardware.check_output.called_with(status_cmd, shell=True) # command is executed + self.assertEqual(check_hardware.check_output.call_count, 2) + + def test_wait_for_test_signal_status_raises_RuntimeError_when_retry_limit_reached(self): + """ Verify that the provided command is executed and os._exit is called with correct return code """ + + # test value + limit=15 + status_cmd = 'mockme' + responses = ['ne'] * limit # only 30 are read + responses.append('ja') + waitfor = 'ja' + + with patch.object(check_hardware, 'check_output', MagicMock(side_effect=responses)),\ + patch('time.sleep'): + + # trigger action + + with self.assertRaises(RuntimeError): + check_hardware.wait_for_test_signal_status(status_cmd, waitfor, retry_limit=limit) + + # assert correct behavior + check_hardware.check_output.called_with(status_cmd, shell=True) # command is executed + self.assertEqual(check_hardware.check_output.call_count, limit) + + + def test_register_signal_handlers_stops_test_signal_on_POSIX_signal(self): + """ Verify that the provided command is executed and os._exit is called with correct return code """ + + # test value + cmd = 'echo "Stop the signal! 3"' + + # register handlers we want to test + check_hardware.register_signal_handlers(cmd) + + # trigger action: + pid = os.getpid() + os.kill(pid, signal.SIGINT) # code 2 + os.kill(pid, signal.SIGABRT) # code 6 + os.kill(pid, signal.SIGTERM) # code 15 + + # assert correct behavior + os._exit.assert_has_calls([call(2), call(6), call(15)]) # all signal error codes correct + check_hardware.check_call.assert_called_with(cmd, shell=True) # command is executed + + def test_register_exit_handler_stops_test_signal_on_normal_exit(self): + """ Verify that the provided command is executed and os._exit is called with correct return code """ + + # This test turned out nastier than expected. + # The problem is that we cannot catch the SystemExit within the test, because the atexit hooks only fire after + # the test exits (even after tearDownClass), so we will get a stacktrace printed, but cmake won't count the + # assert failures as failure of the test. + # Note: As long as we use the watchdog, this is redundant anyway and we could also change the implementation + # to explicitely turn the test signal off before it exits and test for that instead. + # But who wants the easy way out, right? ;) + # FIXME: Find a way to make sure this test fails if the assertion fails or find a smarter way to test this. + + # test value + cmd = 'echo "Stop the signal! 4"' + + # assert correct behavior + def assert_on_exit(): + logger.info('>>>----> Asserting on exit!') + check_hardware.check_call.assert_called_with(cmd, shell=True) # command is executed + + # register a handler to trigger the assert. + atexit.register(assert_on_exit) + + # register handlers we want to test + check_hardware.register_exit_handler(cmd) + + # The test will now regularly exit with code 0, hopefully triggering all these hooks + + + def test_start_watchdog_daemon_stops_test_signal_when_provided_pid_is_killed(self): + """ Verify that the provided command is executed when watched process dies """ + + tmpfile = "/tmp/t_checkhardware.%s" % time.time() + + # test value + good_message = 'Stop the signal! 5' + cmd = 'echo "%s" > %s' % (good_message, tmpfile) + + # start dummy process + p = subprocess.Popen(['sleep', '120']) + + # start watchdog for dummy process + check_hardware.start_watchdog_daemon(p.pid, cmd) + + # kill dummy + os.kill(p.pid, signal.SIGKILL) + os.wait() + + # check temporary file to confirm the watchdog command has been executed + for i in range (30): + if os.path.isfile(tmpfile): + break + time.sleep(1) + self.assertTrue(os.path.isfile(tmpfile)) + with open(tmpfile) as f: + lines = f.read().split('\n') + self.assertTrue(good_message in lines) # cmd has been executed + + os.remove(tmpfile) + + + # FIXME: Move this to t_settings once that exists + def test_settings_parset_raises_KeyError_when_accessing_missing_key(self): + + # assert KeyError if setting not there + with self.assertRaises(KeyError): + logger.info(self.elem_settings_no_testsignal.parset['testsignal']['status-cmd']) + + + # FIXME: Move this to t_settings once that exists + def test_settings_contains_testsignal_commands_from_config_file(self): + + # test_values + expected_start_cmd = "echo set config 56.0 -10 | nc ncu 8093" + expected_stop_cmd = "echo bye | nc ncu 8093" + expected_status_cmd = "echo 'Frequency: 56 MHz Power level: -10 dBm RF: ON'" + expected_ok_status = "Frequency: 56 MHz Power level: -10 dBm RF: ON" + + # read settings + f = os.environ.get('srcdir')+'/test-check_hardware.conf' + settings = TestSettings(filename=f) + elem_settings = settings.group('rcumode.5.element') + start_cmd = elem_settings.parset['testsignal']['start-cmd'] + stop_cmd = elem_settings.parset['testsignal']['stop-cmd'] + status_cmd = elem_settings.parset['testsignal']['status-cmd'] + ok_status = elem_settings.parset['testsignal']['ok-status'] + + # assert correct values + self.assertEqual(start_cmd, expected_start_cmd) + self.assertEqual(stop_cmd, expected_stop_cmd) + self.assertEqual(status_cmd, expected_status_cmd) + self.assertEqual(ok_status, expected_ok_status) + + #@unittest.skip('disabled due to fork bomb behavior') + def test_main_turns_signal_with_commands_from_settings(self): + + # test values + expected_start_cmd = "echo set config 56.0 -10 | nc ncu 8093" + expected_stop_cmd = "echo bye | nc ncu 8093" + expected_status_cmd = "echo 'Frequency: 56 MHz Power level: -10 dBm RF: ON'" + expected_ok_status = "Frequency: 56 MHz Power level: -10 dBm RF: ON" + + # setup tests + # todo: mock the ParameterSet instead, once the imports are resolved and this can be done straight-forward + check_hardware.conf_file = r'test-check_hardware.conf' + check_hardware.confpath = os.environ.get('srcdir')+'/' + + # pretend to be a station + # FIXME: correct behavior of mocked-out parts should be covered by additional tests + # FIXME: why is all this actually necessary when I only run an element test? + with patch.object(check_hardware, 'read_station_config', MagicMock(return_value=(1, 1, 1, 1, 1, 1, 1))), \ + patch.object(check_hardware, 'safely_start_test_signal'), \ + patch.object(check_hardware, 'wait_for_test_signal_status'), \ + patch.object(check_hardware, 'swlevel', MagicMock(return_value=(5, None))), \ + patch.object(check_hardware, 'rspctl'), \ + patch.object(check_hardware, 'RSP'), \ + patch.object(check_hardware, 'check_active_boards', MagicMock(return_value=(1, 1))), \ + patch.object(check_hardware, 'check_active_tbbdriver', MagicMock(return_value=True)), \ + patch.object(check_hardware, 'check_active_rspdriver', MagicMock(return_value=True)), \ + patch.object(check_hardware, 'reset_rsp_settings'), \ + patch.object(check_hardware, 'HBA'), \ + patch.object(check_hardware, 'reset_48_volt'), \ + patch.object(check_hardware, 'tbbctl'), \ + patch.object(os, 'listdir'), \ + patch.object(os, 'remove'): # I'm scared... + + # patch arguments: pretend script was started with these. + # -TST (test mode) + # -e5: (element test in mode 5) + # Names optimized for disk space + testargs = ["check_hardware.py", '-TST', '-e5', '-s5'] + with patch.object(sys, 'argv', testargs): + # trigger action + check_hardware.main() # Warning: Something acts as a fork bomb when mocks are not setup properly! + + check_hardware.safely_start_test_signal.assert_called_with(expected_start_cmd, expected_stop_cmd) + check_hardware.wait_for_test_signal_status.assert_called_with(expected_status_cmd, expected_ok_status) + self.assertEqual(check_hardware.safely_start_test_signal.call_count, 2) + self.assertEqual(check_hardware.wait_for_test_signal_status.call_count, 2) + + +if __name__ == "__main__": + logger.level = logging.DEBUG + stream_handler = logging.StreamHandler(sys.stdout) + logger.addHandler(stream_handler) + unittest.main() diff --git a/LCU/checkhardware/test/t_check_hardware.run b/LCU/checkhardware/test/t_check_hardware.run new file mode 100755 index 0000000000000000000000000000000000000000..3a348cbba93a2ac84832d7c0579197fa21577f4e --- /dev/null +++ b/LCU/checkhardware/test/t_check_hardware.run @@ -0,0 +1,5 @@ +#!/bin/bash + +# Run the unit test +source python-coverage.sh +python_coverage_test "checkhardware*" t_check_hardware.py diff --git a/LCU/checkhardware/test/t_check_hardware.sh b/LCU/checkhardware/test/t_check_hardware.sh new file mode 100755 index 0000000000000000000000000000000000000000..43193a4253494dc8d06e0dac425a066d4a5f80b4 --- /dev/null +++ b/LCU/checkhardware/test/t_check_hardware.sh @@ -0,0 +1,2 @@ +#!/bin/sh +./runctest.sh t_check_hardware diff --git a/LCU/checkhardware/test/test-check_hardware.conf b/LCU/checkhardware/test/test-check_hardware.conf new file mode 100644 index 0000000000000000000000000000000000000000..4b7c43ad06d8ed9d560c4c77dc641de6a4a2c085 --- /dev/null +++ b/LCU/checkhardware/test/test-check_hardware.conf @@ -0,0 +1,255 @@ +# +# configuration file for check_hardware.py +# + +[configuration] +version= 00.01 +station= FR606C + +# checks to do if '-l=x' argument is given, all checks behind list.x are executed +# always checks will be done always +# +# S(rcumode) : signal check for rcumode (also down and flat-check in rcumode 1..4). +# O(rcumode) : oscillation check for rcumode. +# SP(rcumode) : spurious check for rcumode. +# N(rcumode)[= 300]: noise check for rcumode, optional data time in seconds +# default data time= 120 sec. +# E(rcumode)[= 60] : do all RCU5 element tests, optional data time in seconds. +# default data time= 10 sec. +# M(rcumode) : do modem +# SN(rcumode) : do summator noise +# +# RCU(mode) : do all rcu checks for given mode, no element tests done. +# +# RBC : RSP voltage/temperature check +# TBC : TBB voltage/temperature check +# SPU : SPU voltage +# TM : TBB memmory +[check] +always= +list.0= +list.1= SPU,TM,RCU3,RCU5 +list.2= SPU,TM,RCU3,M5,SN5,O5,N5,SP5,S7,E7 +list.3= S3 + +[spu] +temperature.min= 10.0 +temperature.max= 35.0 +voltage.3_3.min= 3.1 +voltage.3_3.max= 3.4 +voltage.3_3.max-drop= 0.3 +voltage.5_0.min= 4.5 +voltage.5_0.max= 5.0 +voltage.5_0.max-drop= 0.3 +voltage.8_0.min= 7.4 +voltage.8_0.max= 8.0 +voltage.8_0.max-drop= 0.3 +voltage.48_0.min= 43.0 +voltage.48_0.max= 48.0 +voltage.48_0.max-drop= 2.0 + +[tbb] +version.tp= 2.4 +version.mp= 3.0 +temperature.min= 10.0 +temperature.max= 45.0 +temperature.tp.min= 10.0 +temperature.tp.max= 75.0 +temperature.tp.max_delta= 10.0 +temperature.mp.min= 10.0 +temperature.mp.max= 75.0 +temperature.mp.max_delta= 10.0 +voltage.1_2.min= 1.1 +voltage.1_2.max= 1.3 +voltage.2_5.min= 2.4 +voltage.2_5.max= 2.6 +voltage.3_3.min= 3.1 +voltage.3_3.max= 3.4 + +[rsp] +version.ap= 8.2 +version.bp= 8.2 +temperature.min= 10.0 +temperature.max= 50.0 +temperature.ap.min= 10.0 +temperature.ap.max= 80.0 +temperature.ap.max_delta= 10.0 +temperature.bp.min= 10.0 +temperature.bp.max= 80.0 +temperature.bp.max_delta= 10.0 +voltage.1_2.min= 1.1 +voltage.1_2.max= 1.3 +voltage.2_5.min= 2.4 +voltage.2_5.max= 2.6 +voltage.3_3.min= 3.1 +voltage.3_3.max= 3.4 + +[rcumode.1-3] +short.mean-pwr.min= 55.0 +short.mean-pwr.max= 61.0 +flat.mean-pwr.min= 61.0 +flat.mean-pwr.max= 64.5 +rf.subbands= 301 +rf.min-sb-pwr= 75.0 +rf.negative-deviation= -3.0 +rf.positive-deviation= 3.0 +noise.negative-deviation= -2.5 +noise.positive-deviation= 2.5 +noise.max-difference= 1.5 +noise.passband= 1:511 +oscillation.min-peak-pwr= 6.0 +oscillation.passband= 1:511 +cable-reflection.min-peak-pwr= 0.8 +cable-reflection.passband= 1:511 +spurious.min-peak-pwr= 3.0 +spurious.passband= 1:511 +down.passband= 231:371 + +[rcumode.2-4] +short.mean-pwr.min= 55.0 +short.mean-pwr.max= 61.0 +flat.mean-pwr.min= 61.0 +flat.mean-pwr.max= 64.5 +rf.subbands= 301 +rf.min-sb-pwr= 75.0 +rf.negative-deviation= -3.0 +rf.positive-deviation= 3.0 +noise.negative-deviation= -2.5 +noise.positive-deviation= 2.5 +noise.max-difference= 1.5 +noise.passband= 1:511 +oscillation.min-peak-pwr= 6.0 +oscillation.passband= 1:511 +cable-reflection.min-peak-pwr= 0.8 +cable-reflection.passband= 1:511 +spurious.min-peak-pwr= 3.0 +spurious.passband= 1:511 +down.passband= 231:371 + +[rcumode.5.tile] +short.mean-pwr.min= 55.0 +short.mean-pwr.max= 61.0 +flat.mean-pwr.min= 61.0 +flat.mean-pwr.max= 64.5 +rf.subbands= 105 +rf.min-sb-pwr= 65.0 +rf.negative-deviation= -24.0 +rf.positive-deviation= 12.0 +noise.negative-deviation= -3.0 +noise.positive-deviation= 1.5 +noise.max-difference= 1.5 +noise.passband= 1:511 +summator-noise.min-peak-pwr= 1.2 +summator-noise.passband= 45:135,200:270 +cable-reflection.min-peak-pwr= 0.8 +cable-reflection.passband= 1:511 +oscillation.min-peak-pwr= 6.0 +oscillation.passband= 1:511 +spurious.min-peak-pwr= 3.0 +spurious.passband= 1:511 +testsignal.start-cmd= echo set config 56.0 -10 | nc ncu 8093 +testsignal.stop-cmd= echo bye | nc ncu 8093 +testsignal.status-cmd= echo 'Frequency: 56 MHz Power level: -10 dBm RF: ON' +testsignal.ok-status= Frequency: 56 MHz Power level: -10 dBm RF: ON + +[rcumode.5.element] +rf.subbands= 105 +rf.min-sb-pwr= 65.0 +rf.negative-deviation= -24.0 +rf.positive-deviation= 12.0 +noise.negative-deviation= -3.0 +noise.positive-deviation= 1.5 +noise.max-difference= 1.5 +noise.passband= 1:511 +oscillation.min-peak-pwr= 6.0 +oscillation.passband= 1:511 +spurious.min-peak-pwr= 3.0 +spurious.passband= 1:511 +testsignal.start-cmd= echo set config 56.0 -10 | nc ncu 8093 +testsignal.stop-cmd= echo bye | nc ncu 8093 +testsignal.status-cmd= echo 'Frequency: 56 MHz Power level: -10 dBm RF: ON' +testsignal.ok-status= Frequency: 56 MHz Power level: -10 dBm RF: ON + +[rcumode.6.tile] +short.mean-pwr.min= 55.0 +short.mean-pwr.max= 61.0 +flat.mean-pwr.min= 61.0 +flat.mean-pwr.max= 64.5 +rf.subbands= 105 +rf.min-sb-pwr= 65.0 +rf.negative-deviation= -24.0 +rf.positive-deviation= 12.0 +noise.negative-deviation= -3.0 +noise.positive-deviation= 1.5 +noise.max-difference= 1.5 +noise.passband= 1:511 +summator-noise.min-peak-pwr= 1.2 +summator-noise.passband= 45:135 +cable-reflection.min-peak-pwr= 0.8 +cable-reflection.passband= 1:511 +oscillation.min-peak-pwr= 6.0 +oscillation.passband= 1:511 +spurious.min-peak-pwr= 3.0 +spurious.passband= 1:511 + +[rcumode.6.element] +rf.subbands= 105 +rf.min-sb-pwr= 65.0 +rf.negative-deviation= -24.0 +rf.positive-deviation= 12.0 +noise.negative-deviation= -3.0 +noise.positive-deviation= 1.5 +noise.max-difference= 1.5 +noise.passband= 1:511 +oscillation.min-peak-pwr= 6.0 +oscillation.passband= 1:511 +spurious.min-peak-pwr= 3.0 +spurious.passband= 1:511 +testsignal.start-cmd= echo set config 127.5 0 | nc ncu 8093 +testsignal.stop-cmd= echo bye | nc ncu 8093 + +[rcumode.7.tile] +short.mean-pwr.min= 55.0 +short.mean-pwr.max= 61.0 +flat.mean-pwr.min= 61.0 +flat.mean-pwr.max= 64.5 +rf.subbands= 105 +rf.min-sb-pwr= 65.0 +rf.negative-deviation= -24.0 +rf.positive-deviation= 12.0 +noise.negative-deviation= -3.0 +noise.positive-deviation= 1.5 +noise.max-difference= 1.5 +noise.passband= 1:511 +summator-noise.min-peak-pwr= 1.2 +summator-noise.passband= 45:135 +cable-reflection.min-peak-pwr= 0.8 +cable-reflection.passband= 1:511 +oscillation.min-peak-pwr= 6.0 +oscillation.passband= 1:511 +spurious.min-peak-pwr= 3.0 +spurious.passband= 1:511 + +[rcumode.7.element] +rf.subbands= 105 +rf.min-sb-pwr= 65.0 +rf.negative-deviation= -24.0 +rf.positive-deviation= 12.0 +noise.negative-deviation= -3.0 +noise.positive-deviation= 3.0 +noise.max-difference= 1.5 +noise.passband= 1:511 +oscillation.min-peak-pwr= 6.0 +oscillation.passband= 1:511 +spurious.min-peak-pwr= 3.0 +spurious.passband= 1:511 + +# General settings +[paths] +global-data= /globalhome/log/stationtest +local-data= /opt/stationtest/data +local-report-dir= /localhome/stationtest/data +global-report-dir= /globalhome/log/stationtest + +[files] +bad-antenna-list= /localhome/stationtest/data/bad_antenna_list.txt diff --git a/LTA/CMakeLists.txt b/LTA/CMakeLists.txt index f5d0396b660d02d5f8a81fe01c7612e4e9a350e5..665a9d4c3068e927f416d910ebafdb317f990c55 100644 --- a/LTA/CMakeLists.txt +++ b/LTA/CMakeLists.txt @@ -1,5 +1,6 @@ # $Id$ +lofar_add_package(LTACommon) lofar_add_package(LTAIngest) lofar_add_package(ltastorageoverview) lofar_add_package(sip) diff --git a/LTA/LTACommon/CMakeLists.txt b/LTA/LTACommon/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..590e81909a57a76557c620d7509923ac90238782 --- /dev/null +++ b/LTA/LTACommon/CMakeLists.txt @@ -0,0 +1,5 @@ +lofar_package(LTACommon 1.0) + +set(etc_files LTA-SIP.xsd) +lofar_add_sysconf_files(${etc_files} DESTINATION lta) + diff --git a/LTA/sip/lib/LTA-SIP.xsd b/LTA/LTACommon/LTA-SIP.xsd similarity index 98% rename from LTA/sip/lib/LTA-SIP.xsd rename to LTA/LTACommon/LTA-SIP.xsd index edc4f488ea661ee54955ae7e10fa13d2e7cc12bc..4b7d975a255839cf769492087d65e7bffb35b3ae 100644 --- a/LTA/sip/lib/LTA-SIP.xsd +++ b/LTA/LTACommon/LTA-SIP.xsd @@ -1,5 +1,5 @@ <?xml version="1.0" encoding="UTF-8"?> -<xs:schema targetNamespace="http://www.astron.nl/SIP-Lofar" version="2.7.0" xmlns="http://www.astron.nl/SIP-Lofar" xmlns:xs="http://www.w3.org/2001/XMLSchema"> +<xs:schema targetNamespace="http://www.astron.nl/SIP-Lofar" version="2.7.1" xmlns="http://www.astron.nl/SIP-Lofar" xmlns:xs="http://www.w3.org/2001/XMLSchema"> <xs:annotation> <xs:documentation> XML Schema for data model Submission Information Package LOFAR Long Term Archive @@ -748,6 +748,15 @@ <xs:enumeration value="PREFACTOR"/> <xs:enumeration value="UNDOCUMENTED"/> </xs:restriction> + </xs:simpleType> + <xs:simpleType name="StorageWriterType"> + <xs:restriction base="xs:string"> + <xs:enumeration value="LofarStorageManager"/> + <xs:enumeration value="CasaStorageManagers"/> <!--encompasses all standard default casacode storagemanagers--> + <xs:enumeration value="DyscoStorageManager"/> + <xs:enumeration value="HDF5Default"/> + <xs:enumeration value="Unknown"/> + </xs:restriction> </xs:simpleType> <xs:annotation> <xs:documentation>This very well defined yet! type probably needs to be an enumeration</xs:documentation> @@ -794,6 +803,8 @@ <xs:element maxOccurs="unbounded" minOccurs="0" name="checksum" type="ChecksumType"/> <xs:element name="fileName" type="xs:string"/> <xs:element name="fileFormat" type="FileFormatType"/> + <xs:element name="storageWriter" type="StorageWriterType"/> + <xs:element name="storageWriterVersion" type="xs:string"/> <xs:element name="processIdentifier" type="IdentifierType"/> </xs:sequence> </xs:complexType> diff --git a/LTA/LTAIngest/LTAIngestClient/lib/ingestbuslistener.py b/LTA/LTAIngest/LTAIngestClient/lib/ingestbuslistener.py index 6a9824fcfc292c0639ba5d3db7a48c0291fce24a..aefdb6600d8bb10157b3a0ea226d02231742ab8b 100644 --- a/LTA/LTAIngest/LTAIngestClient/lib/ingestbuslistener.py +++ b/LTA/LTAIngest/LTAIngestClient/lib/ingestbuslistener.py @@ -156,6 +156,9 @@ class IngestBusListener(AbstractBusListener): if job_dict.get('average_speed') != None: msg += ' avg speed: %s' % humanreadablesize(job_dict.get('average_speed'), 'Bps') + if job_dict.get('srm_url'): + msg += ' srm_url: %s' % job_dict.get('srm_url') + if job_dict.get('message'): msg += ' message: %s' % job_dict.get('message') diff --git a/LTA/LTAIngest/LTAIngestCommon/CMakeLists.txt b/LTA/LTAIngest/LTAIngestCommon/CMakeLists.txt index 9d033933e689531b95e446ca860de70624a5d0b3..9ef3200635ef72f40ba4c4833ca8915094349c41 100644 --- a/LTA/LTAIngest/LTAIngestCommon/CMakeLists.txt +++ b/LTA/LTAIngest/LTAIngestCommon/CMakeLists.txt @@ -2,6 +2,7 @@ lofar_package(LTAIngestCommon 2.0 DEPENDS PyMessaging PyCommon) python_install(config.py job.py + srm.py DESTINATION lofar/lta/ingest/common) add_subdirectory(test) diff --git a/LTA/LTAIngest/LTAIngestCommon/config.py b/LTA/LTAIngest/LTAIngestCommon/config.py index 3ffeb428fdb5d8783904753125d704621fcec464..8b741c7cf66a261851f70f3faf8ff30bf9eeaaa7 100644 --- a/LTA/LTAIngest/LTAIngestCommon/config.py +++ b/LTA/LTAIngest/LTAIngestCommon/config.py @@ -11,7 +11,7 @@ DEFAULT_INGEST_NOTIFICATION_BUSNAME = adaptNameToEnvironment('lofar.lta.ingest.n DEFAULT_INGEST_NOTIFICATION_PREFIX = 'LTAIngest.' DEFAULT_INGEST_NOTIFICATION_SUBJECTS=DEFAULT_INGEST_NOTIFICATION_PREFIX+'*' -DEFAULT_BROKER = '10.178.1.3' if isProductionEnvironment() else 'localhost' +DEFAULT_BROKER = 'lexar003' if isProductionEnvironment() else 'localhost' def hostnameToIp(hostname): if 'lexar001' in hostname: diff --git a/LTA/LTAIngest/LTAIngestCommon/srm.py b/LTA/LTAIngest/LTAIngestCommon/srm.py new file mode 100755 index 0000000000000000000000000000000000000000..abed17b82dbaf09bc4a94647f5243cbd1076c28e --- /dev/null +++ b/LTA/LTAIngest/LTAIngestCommon/srm.py @@ -0,0 +1,264 @@ +# Copyright (C) 2018 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id: $ + +from subprocess import Popen, PIPE +import socket +import os +import time +import re +from datetime import datetime, timedelta + +import logging + +logger = logging.getLogger(__name__) + +""" +This srm module provides python methods for the most used srm calls like srmls, srmrm, etc. +Furthermore, this module provides methods for surl (srm-url) and turl (transfer-url) manipulation. +""" + + +class SrmException(Exception): + """ Generic exception for srm errors""" + pass + + +def srmrm(surl, log_prefix='', timeout=-1): + """ remove file from srm + :param surl: an srm url like: srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884/L652884_SAP000_B000_P001_bf_e619e5da.tar + :param log_prefix: an optional prefix for all log lines (can be used to provide a unique identifier to filter log lines in a file) + :param timeout: optional timeout in seconds + :return: (stdout, stderr, returncode) tuple with the results of the system call to srm. + """ + logger.info('%s removing surl: %s', log_prefix, surl) + return __execute(['/bin/bash', '-c', 'srmrm %s' % (surl,)], log_prefix, timeout) + + +def srmrmdir(surl, log_prefix='', timeout=-1): + """ remove (empty) directory from srm + :param surl: an srm url like: srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884 + :param log_prefix: an optional prefix for all log lines (can be used to provide a unique identifier to filter log lines in a file) + :param timeout: optional timeout in seconds + :return: (stdout, stderr, returncode) tuple with the results of the system call to srm. + """ + return __execute(['/bin/bash', '-c', 'srmrmdir %s' % (surl,)], log_prefix, timeout) + + +def srmmkdir(surl, log_prefix='', timeout=-1): + """ create directory in srm + :param surl: an srm url like: srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884 + :param log_prefix: an optional prefix for all log lines (can be used to provide a unique identifier to filter log lines in a file) + :param timeout: optional timeout in seconds + :return: (stdout, stderr, returncode) tuple with the results of the system call to srm. + """ + return __execute(['/bin/bash', '-c', 'srmmkdir -retry_num=0 %s' % (surl,)], log_prefix, timeout) + + +def srmls(surl, log_prefix='', timeout=-1): + """ get listing in directory + :param surl: an srm url like: srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884 + :param log_prefix: an optional prefix for all log lines (can be used to provide a unique identifier to filter log lines in a file) + :param timeout: optional timeout in seconds + :return: (stdout, stderr, returncode) tuple with the results of the system call to srm. + """ + return __execute(['/bin/bash', '-c', 'srmls %s' % (surl,)], log_prefix, timeout) + + +def srmll(surl, log_prefix='', timeout=-1): + """ get detailed listing of a surl (directory or file) + :param surl: an srm url like: srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884 + :param log_prefix: an optional prefix for all log lines (can be used to provide a unique identifier to filter log lines in a file) + :param timeout: optional timeout in seconds + :return: (stdout, stderr, returncode) tuple with the results of the system call to srm. + """ + return __execute(['/bin/bash', '-c', 'srmls -l %s' % (surl,)], log_prefix, timeout) + + +def __execute(cmd, log_prefix='', timeout=-1): + """ helper method, wrapper around subprocess. + execute command and return (stdout, stderr, returncode) tuple + :param cmd: a subprocess Popen cmd like list + :param log_prefix: an optional prefix for all log lines (can be used to provide a unique identifier to filter log lines in a file) + :param timeout: optional timeout in seconds + :return: (stdout, stderr, returncode) tuple + """ + if log_prefix: + if not isinstance(log_prefix, basestring): + log_prefix = str(log_prefix) + if log_prefix[-1] != ' ': + log_prefix += ' ' + + logger.info('%sexecuting: %s', log_prefix, ' '.join(cmd)) + p_cmd = Popen(cmd, stdout=PIPE, stderr=PIPE) + + if timeout > 0: + timeout = timedelta(seconds=timeout) + logger.debug('%swaiting at most %s for command to finish...', log_prefix, timeout) + start_wait = datetime.now() + while datetime.now() - start_wait < timeout: + if p_cmd.poll() is not None: + break + time.sleep(1) + + if p_cmd.poll() is None: + raise SrmException('%s%s did not finish within %s.' % (log_prefix, cmd, timeout)) + + stdout, stderr = p_cmd.communicate() + return stdout, stderr, p_cmd.returncode + + +def get_srm_size_and_a32_checksum(surl, log_prefix='', timeout=-1): + """ get file size and checksum from srm via srmll + :param surl: an srm url like: srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884/L652884_SAP000_B000_P001_bf_e619e5da.tar + :param log_prefix: an optional prefix for all log lines (can be used to provide a unique identifier to filter log lines in a file) + :param timeout: optional timeout in seconds + :return: (success, file_size, a32_checksum) tuple. + """ + try: + output, errors, code = srmll(surl, log_prefix, timeout) + logger.debug(output) + + if code != 0: + return False, None, None + + path_line = output.strip() + path_line_items = [x.strip() for x in path_line.split()] + + if len(path_line_items) < 2: + # path line shorter than expected + return False, None, None + + file_size = int(path_line_items[0]) + + if 'Checksum type:' not in output: + return False, None, None + + if 'Checksum type:' in output: + cstype = output.split('Checksum type:')[1].split()[0].strip() + if cstype.lower() != 'adler32': + return False, None, None + + if 'Checksum value:' in output: + a32_value = output.split('Checksum value:')[1].lstrip().split()[0] + return True, file_size, a32_value + + except Exception as e: + logger.error(e) + + return False, None, None + + +def create_missing_directories(surl): + """ recursively checks for presence of parent directory and created the missing part of a tree + :param surl: an srm url like: srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884 + :return: exit-code of srmmkdir of final dir + """ + parent, child = os.path.split(surl) + missing = [] + + # determine missing dirs + while parent: + logger.info('checking path: %s' % parent) + o, e, code = srmls(parent) + if code == 0: + logger.info('srmls returned successfully, so this path apparently exists: %s' % parent) + break + else: + parent, child = os.path.split(parent) + missing.append(child) + + # recreate missing dirs + while len(missing) > 0: + parent = parent + '/' + missing.pop() + code = srmmkdir(parent)[2] + if code != 0: + logger.info('failed to create missing directory: %s' % parent) + return code + + logger.info('successfully created parent directory: %s' % parent) + return 0 + + +def convert_surl_to_turl(surl): + """ converts given srm url of an LTA site into a transport url as needed by gridftp. + """ + if 'grid.sara.nl' in surl: + # sara provides dynamic hostnames via a round-robin dns. Get a random/dynamic host as provided by them. + dyn_hostname = socket.getfqdn(socket.gethostbyname('gridftp.grid.sara.nl')) + return re.sub('srm://srm\.grid\.sara\.nl:?\d*', 'gsiftp://%s:2811' % (dyn_hostname,), surl) + + if 'lta-head.lofar.psnc.pl' in surl: + # poznan provides dynamic hostnames via a round-robin dns. Get a random/dynamic host as provided by them. + dyn_hostname = socket.getfqdn(socket.gethostbyname('gridftp.lofar.psnc.pl')) + return re.sub('srm://lta-head\.lofar\.psnc\.pl:?\d*', 'gsiftp://%s:2811' % (dyn_hostname,), surl) + + if 'lofar-srm.fz-juelich.de' in surl: + # juelich provides dynamic hostnames via a round-robin dns. Get a random/dynamic host as provided by them. + dyn_hostname = socket.getfqdn(socket.gethostbyname('lofar-gridftp.fz-juelich.de')) + return re.sub('srm://lofar-srm\.fz-juelich\.de:?\d*', 'gsiftp://%s:2811' % (dyn_hostname,), surl) + + raise SrmException('Cannot convert surl to turl. Unknown destination in surl: \'%s\'.' % surl) + + +def get_site_surl(surl): + """ + extract the site surl from a given surl. + for example srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884 + becomes: srm://lofar-srm.fz-juelich.de:8443 + :param surl: an srm url like: srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884 + :return: the 'site-part' of the surl, like: srm://lofar-srm.fz-juelich.de:8443 + """ + if not surl.startswith('srm://'): + raise SrmException('invalid srm_url: %s' % surl) + + return 'srm://' + surl[6:].split('/')[0] + + +def get_path_in_site(surl): + """ + cut the site 'prefix' of the srm url and returns the path. + for example srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884/L652884_SAP000_B000_P001_bf_e619e5da.tar + becomes: /pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884/L652884_SAP000_B000_P001_bf_e619e5da.tar + :param surl: an srm url like: srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884/L652884_SAP000_B000_P001_bf_e619e5da.tar + :return: the 'path-part' of the surl, like: /pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884/L652884_SAP000_B000_P001_bf_e619e5da.tar + """ + site_surl = get_site_surl(surl) + return surl[len(site_surl):].rstrip('/') + + +def get_dir_path_in_site(surl): + """ + cut the site 'prefix' of the srm url and cut an optional file 'postfix' and return the directory path. + It is assumed that a filename contains a '.' + for example (1) srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884 + becomes: /pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884 + for example (2) srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884/L652884_SAP000_B000_P001_bf_e619e5da.tar + becomes: /pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884 + :param surl: an srm url like: srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884 + :return: the 'dir-path-part' of the surl, like: /pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884 + """ + path = get_path_in_site(surl) + parts = path.split('/') + if '.' in parts[-1]: + # last part is a filename, because it contains a '.' + # return only dir-parts + return '/'.join(parts[:-1]) + + # path contains no filename, just return it + return path diff --git a/LTA/LTAIngest/LTAIngestCommon/test/CMakeLists.txt b/LTA/LTAIngest/LTAIngestCommon/test/CMakeLists.txt index a028c7174cde232159349f0b0f04662330621def..fbd25d400ee9ae5e6d134564ba5a479666600dc3 100644 --- a/LTA/LTAIngest/LTAIngestCommon/test/CMakeLists.txt +++ b/LTA/LTAIngest/LTAIngestCommon/test/CMakeLists.txt @@ -1,5 +1,6 @@ include(LofarCTest) lofar_add_test(t_job) +lofar_add_test(t_srm) diff --git a/LTA/LTAIngest/LTAIngestCommon/test/t_srm.py b/LTA/LTAIngest/LTAIngestCommon/test/t_srm.py new file mode 100755 index 0000000000000000000000000000000000000000..d8b3f79beb8c13fff80adab5228bb479ff98b804 --- /dev/null +++ b/LTA/LTAIngest/LTAIngestCommon/test/t_srm.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python + +import unittest +from lofar.lta.ingest.common.srm import * + +import logging +logger = logging.getLogger(__name__) + +class TestSrm(unittest.TestCase): + """ + Test various methods from the srm module. + Unfortunately, we cannot do unittests on actual srm calls, as we need real srm sites and certificates for that. + """ + + def test_get_site_surl(self): + self.assertEqual('srm://srm.grid.sara.nl:8443', + get_site_surl('srm://srm.grid.sara.nl:8443/pnfs/grid.sara.nl/data/lofar/ops/projects/lc10_010/658346/L658346_SB019_uv.MS_8190b749.tar')) + + self.assertEqual('srm://lofar-srm.fz-juelich.de:8443', + get_site_surl('srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884')) + + self.assertEqual('srm://lta-head.lofar.psnc.pl:8443', + get_site_surl('srm://lta-head.lofar.psnc.pl:8443/lofar/ops/projects/lt10_004/658456/L658456_SAP000_B000_P012_bf_03c23eb1.tar')) + + with self.assertRaises(SrmException) as context: + get_site_surl('http://nu.nl') + self.assertTrue('invalid srm_url' in context.exception.message) + + def test_path_in_site(self): + self.assertEqual('/pnfs/grid.sara.nl/data/lofar/ops/projects/lc10_010/658346/L658346_SB019_uv.MS_8190b749.tar', + get_path_in_site('srm://srm.grid.sara.nl:8443/pnfs/grid.sara.nl/data/lofar/ops/projects/lc10_010/658346/L658346_SB019_uv.MS_8190b749.tar')) + + self.assertEqual('/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884', + get_path_in_site('srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884')) + + self.assertEqual('/lofar/ops/projects/lt10_004/658456/L658456_SAP000_B000_P012_bf_03c23eb1.tar', + get_path_in_site('srm://lta-head.lofar.psnc.pl:8443/lofar/ops/projects/lt10_004/658456/L658456_SAP000_B000_P012_bf_03c23eb1.tar')) + + # check if tailing '/' is removed + self.assertEqual('/foo/bar', + get_path_in_site('srm://lta-head.lofar.psnc.pl:8443/foo/bar/')) + + with self.assertRaises(SrmException) as context: + get_path_in_site('http://nu.nl') + self.assertTrue('invalid srm_url' in context.exception.message) + + def test_dir_path_in_site(self): + self.assertEqual('/pnfs/grid.sara.nl/data/lofar/ops/projects/lc10_010/658346', + get_dir_path_in_site('srm://srm.grid.sara.nl:8443/pnfs/grid.sara.nl/data/lofar/ops/projects/lc10_010/658346')) + + self.assertEqual('/pnfs/grid.sara.nl/data/lofar/ops/projects/lc10_010/658346', + get_dir_path_in_site('srm://srm.grid.sara.nl:8443/pnfs/grid.sara.nl/data/lofar/ops/projects/lc10_010/658346/')) + + self.assertEqual('/pnfs/grid.sara.nl/data/lofar/ops/projects/lc10_010/658346', + get_dir_path_in_site('srm://srm.grid.sara.nl:8443/pnfs/grid.sara.nl/data/lofar/ops/projects/lc10_010/658346/L658346_SB019_uv.MS_8190b749.tar')) + + +if __name__ == '__main__': + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', + level=logging.DEBUG) + unittest.main() diff --git a/LTA/LTAIngest/LTAIngestCommon/test/t_srm.run b/LTA/LTAIngest/LTAIngestCommon/test/t_srm.run new file mode 100755 index 0000000000000000000000000000000000000000..1f133d359b9c2d2e3e1c71112190fd35480d7e20 --- /dev/null +++ b/LTA/LTAIngest/LTAIngestCommon/test/t_srm.run @@ -0,0 +1,6 @@ +#!/bin/bash + +# Run the unit test +source python-coverage.sh +python_coverage_test "*srm*" t_srm.py + diff --git a/LTA/LTAIngest/LTAIngestCommon/test/t_srm.sh b/LTA/LTAIngest/LTAIngestCommon/test/t_srm.sh new file mode 100755 index 0000000000000000000000000000000000000000..88d8d4090d12421bc8a74d4921e4ddd080a0b572 --- /dev/null +++ b/LTA/LTAIngest/LTAIngestCommon/test/t_srm.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +./runctest.sh t_srm diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingestjobmanagementserver.py b/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingestjobmanagementserver.py index 77b2743f01026d5e1503fbe2e53b2027b02338e5..3c9ee04fe2f5b335d9097d086b71dd1a363f6ec2 100644 --- a/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingestjobmanagementserver.py +++ b/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingestjobmanagementserver.py @@ -445,7 +445,6 @@ class IngestJobManager: job_admin_dict['runs'][job_admin_dict.get('retry_attempt', 0)]['started_at'] = datetime.utcnow() if new_status == JobProduced or new_status == JobTransferFailed: - job_admin_dict['runs'][job_admin_dict.get('retry_attempt', 0)] = {} job_admin_dict['runs'][job_admin_dict.get('retry_attempt', 0)]['finished_at'] = datetime.utcnow() if lta_site: diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingestmomadapter.py b/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingestmomadapter.py index c3f960515bfc98f8482808627afa70147205c848..9f9117a575051ecfcdc33c9b739e05e59a6e2af4 100644 --- a/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingestmomadapter.py +++ b/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingestmomadapter.py @@ -56,6 +56,7 @@ class IngestBusListenerForMomAdapter(IngestBusListener): self._busname = busname self._broker = broker self._momrpc = momrpc + self._removed_export_ids = set() # keep track of which export_id's were removed, so we don't have to remove them again super(IngestBusListenerForMomAdapter, self).__init__(busname=busname, subjects=subjects, broker=broker, **kwargs) @@ -73,10 +74,16 @@ class IngestBusListenerForMomAdapter(IngestBusListener): export_id = int(job_id.split('_')[1]) if export_id and export_id not in self._momrpc.getObjectDetails(export_id): - logger.warn('Export job %s cannot be found (anymore) in mom. Removing export job from ingest queue', export_id) + if export_id not in self._removed_export_ids: + logger.warn('Export job %s cannot be found (anymore) in mom. Removing export job from ingest queue', export_id) + + # keep track of which export_id's were removed, so we don't have to remove them again + # this keeps stuff flowing faster + self._removed_export_ids.add(export_id) + + with IngestRPC(broker=self._broker) as ingest_rpc: + ingest_rpc.removeExportJob(export_id) - with IngestRPC(broker=self._broker) as ingest_rpc: - ingest_rpc.removeExportJob(export_id) return with ToBus(self._busname, broker=self._broker) as tobus: @@ -247,6 +254,17 @@ class IngestMomAdapter: if key in job_dict: job_dict2[key] = job_dict[key] + if 'srm_url' in job_dict: + try: + # try to parse the srm_url and get the observation dir name from the dataproduct srmurl + # example url: srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884/L652884_SAP000_B000_P001_bf_e619e5da.tar + # should become: srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884 + srm_url = job_dict['srm_url'] + srm_dir_url = '/'.join(srm_url.split('/')[:-2]) + job_dict2['srm_url'] = srm_dir_url + except Exception as e: + logger.error("could not derive srm_dir_url from %s. error=%s", srm_url, e) + message = 'Ingested %s/%s (%.1f%%) dataproducts for otdb_id %s mom2id %s' % (len(ingested_dps), len(dps), 100.0*len(ingested_dps)/len(dps), diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/test/t_ingestjobmanagementserver.py b/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/test/t_ingestjobmanagementserver.py index 71837dd8fd5c3f4f24803b4daa7c2290156b5f86..c8ebbf5499f200211ee9f61e3baba38aa0142164 100755 --- a/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/test/t_ingestjobmanagementserver.py +++ b/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/test/t_ingestjobmanagementserver.py @@ -97,6 +97,8 @@ try: if percentage_done: content['percentage_done'] = percentage_done event_msg = EventMessage(context=config.DEFAULT_INGEST_NOTIFICATION_PREFIX + event, content=content) + logger.info('sending test event message on %s subject=%s content=%s', + test_notifier.address, event_msg.subject, event_msg.content) test_notifier.send(event_msg) def receiveJobForTransfer(): @@ -145,7 +147,7 @@ try: #just finish normally sendNotification('JobFinished', job1['JobId']) - time.sleep(0.5) + time.sleep(1.0) #TODO: should not wait fixed amount of time, but poll for expected output with a timeout assert manager.nrOfUnfinishedJobs() == 2, 'expected 2 jobs unfinished' #check report @@ -162,7 +164,7 @@ try: assert job2['JobId'] == 'A_999999999_777777778_L888888888_SB001_uv.MS', 'unexpected job %s' % job2['JobId'] sendNotification('JobStarted', job2['JobId']) - time.sleep(0.5) + time.sleep(1.5) #TODO: should not wait fixed amount of time, but poll for expected output with a timeout assert manager.nrOfUnfinishedJobs() == 2, 'expected 2 jobs unfinished' #check report @@ -177,7 +179,7 @@ try: # let job2 fail sendNotification('JobTransferFailed', job2['JobId'], message='something went wrong') - time.sleep(0.5) + time.sleep(1.5) #TODO: should not wait fixed amount of time, but poll for expected output with a timeout assert manager.nrOfUnfinishedJobs() == 2, 'expected 2 jobs unfinished' #check report @@ -196,7 +198,7 @@ try: assert job3['JobId'] == 'A_999999999_777777779_L888888888_SB002_uv.MS', 'unexpected job %s' % job3['JobId'] sendNotification('JobStarted', job3['JobId']) - time.sleep(0.5) + time.sleep(1.5) #TODO: should not wait fixed amount of time, but poll for expected output with a timeout assert manager.nrOfUnfinishedJobs() == 2, 'expected 2 jobs unfinished' #check report @@ -214,7 +216,7 @@ try: #3rd job will fail all the time sendNotification('JobTransferFailed', job3['JobId'], message='something went wrong') - time.sleep(0.5) + time.sleep(1.5) #TODO: should not wait fixed amount of time, but poll for expected output with a timeout assert manager.nrOfUnfinishedJobs() == 2, 'expected 2 jobs unfinished' #check report @@ -236,7 +238,7 @@ try: assert job2['JobId'] == 'A_999999999_777777778_L888888888_SB001_uv.MS', 'unexpected job %s' % job2['JobId'] sendNotification('JobStarted', job2['JobId']) - time.sleep(0.5) + time.sleep(1.5) #TODO: should not wait fixed amount of time, but poll for expected output with a timeout assert manager.nrOfUnfinishedJobs() == 2, 'expected 2 jobs unfinished' #keep job2 running while we process job3 @@ -259,7 +261,7 @@ try: assert job3['JobId'] == 'A_999999999_777777779_L888888888_SB002_uv.MS', 'unexpected job %s' % job3['JobId'] sendNotification('JobStarted', job3['JobId']) - time.sleep(0.5) + time.sleep(1.5) #TODO: should not wait fixed amount of time, but poll for expected output with a timeout assert manager.nrOfUnfinishedJobs() == 2, 'expected 2 jobs unfinished' #check report @@ -279,7 +281,7 @@ try: #3rd job will fail again sendNotification('JobTransferFailed', job3['JobId'], message='something went wrong') - time.sleep(0.5) + time.sleep(1.5) #TODO: should not wait fixed amount of time, but poll for expected output with a timeout assert manager.nrOfUnfinishedJobs() == 2, 'expected 2 jobs unfinished' #check report @@ -302,7 +304,7 @@ try: sendNotification('JobFinished', job2['JobId']) #one job to go - time.sleep(0.5) + time.sleep(1.5) #TODO: should not wait fixed amount of time, but poll for expected output with a timeout assert manager.nrOfUnfinishedJobs() == 1, 'expected 1 job unfinished' #check report @@ -328,7 +330,7 @@ try: assert job3['JobId'] == 'A_999999999_777777779_L888888888_SB002_uv.MS', 'unexpected job %s' % job3['JobId'] sendNotification('JobStarted', job3['JobId']) - time.sleep(0.5) + time.sleep(1.5) #TODO: should not wait fixed amount of time, but poll for expected output with a timeout assert manager.nrOfUnfinishedJobs() == 1, 'expected 1 job unfinished' #check report @@ -354,14 +356,14 @@ try: #3rd job should have failed after 3 retries #no more jobs to go - time.sleep(0.5) + time.sleep(1.5) #TODO: should not wait fixed amount of time, but poll for expected output with a timeout assert manager.nrOfUnfinishedJobs() == 0, 'expected 0 jobs unfinished' #there should be no more reports, cause the job group 999999999 is finished as a whole #and is removed from the manager at this point reports = manager.getStatusReportDict() assert 0 == len(reports), 'expected 0 reports' - time.sleep(0.5) + time.sleep(1.5) #TODO: should not wait fixed amount of time, but poll for expected output with a timeout jobgroup_999999999_failed_dir = os.path.join(config.JOBS_DIR, 'failed', 'MoM_999999999') failed_jobgroup_999999999_files = [os.path.join(jobgroup_999999999_failed_dir, f) for f in @@ -385,14 +387,14 @@ try: sendNotification('JobStarted', job3['JobId']) sendNotification('JobFinished', job3['JobId']) - time.sleep(0.5) + time.sleep(1.5) #TODO: should not wait fixed amount of time, but poll for expected output with a timeout #there should be no more reports, cause the job group 999999999 is finished as a whole #and is removed from the manager at this point reports = manager.getStatusReportDict() assert 0 == len(reports), 'expected 0 reports' assert manager.nrOfUnfinishedJobs() == 0, 'expected 0 jobs unfinished' - time.sleep(0.5) + time.sleep(1.5) #TODO: should not wait fixed amount of time, but poll for expected output with a timeout manager.quit() manager_thread.join() diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestServerCommon/config.py b/LTA/LTAIngest/LTAIngestServer/LTAIngestServerCommon/config.py index b3094795ab86243e2e5781988b2d8fa1d50f190c..36670b04aeb13fa9c1e789534d7835769a79d7be 100644 --- a/LTA/LTAIngest/LTAIngestServer/LTAIngestServerCommon/config.py +++ b/LTA/LTAIngest/LTAIngestServer/LTAIngestServerCommon/config.py @@ -26,7 +26,9 @@ DEFAULT_JOB_PRIORITY = 4 MAX_NR_OF_JOBS=40 MAX_USED_BANDWITH_TO_START_NEW_JOBS=9.9e9 #Gbps -NET_IF_TO_MONITOR='p2p1' +NET_IF_TO_MONITOR=['p2p1.2030', # outgoing traffic to Juelich + 'p2p1.2033', # outgoing traffic to Poznan + 'p2p1.992'] # outgoing traffic to SARA GLOBUS_TIMEOUT = 1800 diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/CMakeLists.txt b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/CMakeLists.txt index ba2deb04d2594ccb482087a7409ec8e1c8231596..76bf2d12ba7ff71f10127bffea976f1d31937124 100644 --- a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/CMakeLists.txt +++ b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/CMakeLists.txt @@ -1,8 +1,7 @@ -lofar_package(LTAIngestTransferServer 2.0 DEPENDS LTAIngestCommon LTAIngestServerCommon PyMessaging PyCommon) +lofar_package(LTAIngestTransferServer 2.0 DEPENDS LTACommon LTAIngestCommon LTAIngestServerCommon PyMessaging PyCommon) include(PythonInstall) add_subdirectory(lib) add_subdirectory(bin) add_subdirectory(test) -add_subdirectory(etc) diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/etc/CMakeLists.txt b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/etc/CMakeLists.txt deleted file mode 100644 index 1709d8bf1686108026532773d182a71506473867..0000000000000000000000000000000000000000 --- a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/etc/CMakeLists.txt +++ /dev/null @@ -1,4 +0,0 @@ -set(etc_files LTA-SIP.xsd) - -install_files(/etc/lta FILES ${etc_files}) - diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/etc/LTA-SIP.xsd b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/etc/LTA-SIP.xsd deleted file mode 100644 index edc4f488ea661ee54955ae7e10fa13d2e7cc12bc..0000000000000000000000000000000000000000 --- a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/etc/LTA-SIP.xsd +++ /dev/null @@ -1,1219 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<xs:schema targetNamespace="http://www.astron.nl/SIP-Lofar" version="2.7.0" xmlns="http://www.astron.nl/SIP-Lofar" xmlns:xs="http://www.w3.org/2001/XMLSchema"> - <xs:annotation> - <xs:documentation> - XML Schema for data model Submission Information Package LOFAR Long Term Archive - - The basic layout of a SIP is to have the Project that owns the data described, the DataProduct - that is being ingested, and the process (Observation or PipelineRun) that generated it. - If the generating process is a PipelineRun, then it will usually have input DataProducts. These - will recursively be described in the relatedDataProduct entries, with the processes that generated - them. These relatedDataProducts do not need to have been ingested into the archive themselves. It is - sufficient that there are described, with their related Obsrvation/Pipelines in this document to - be able to recreate the full provenance of the DataProduct. - </xs:documentation> - </xs:annotation> - <xs:annotation> - <xs:documentation>============================Generic Types============================ - - Below are generic types that are used at various places within the document. It basically contains descriptions - of units like Frequency, Length, Time, astronomical entities like Equinox, Pointing and Angle - and some useful container types like lists and indentifiers. - </xs:documentation> - </xs:annotation> - <xs:simpleType name="FrequencyUnit"> - <xs:restriction base="xs:string"> - <xs:enumeration value="Hz"/> - <xs:enumeration value="kHz"/> - <xs:enumeration value="MHz"/> - <xs:enumeration value="GHz"/> - </xs:restriction> - </xs:simpleType> - <xs:complexType name="Frequency"> - <xs:simpleContent> - <xs:extension base="xs:double"> - <xs:attribute name="units" type="FrequencyUnit" use="required"/> - </xs:extension> - </xs:simpleContent> - </xs:complexType> - <xs:simpleType name="LengthUnit"> - <xs:restriction base="xs:string"> - <xs:enumeration value="m"/> - <xs:enumeration value="km"/> - </xs:restriction> - </xs:simpleType> - <xs:complexType name="Length"> - <xs:simpleContent> - <xs:extension base="xs:double"> - <xs:attribute name="units" type="LengthUnit" use="required"/> - </xs:extension> - </xs:simpleContent> - </xs:complexType> - <xs:simpleType name="TimeUnit"> - <xs:restriction base="xs:string"> - <xs:enumeration value="s"/> - <xs:enumeration value="ms"/> - <xs:enumeration value="us"/> - <xs:enumeration value="ns"/> - </xs:restriction> - </xs:simpleType> - <xs:complexType name="Time"> - <xs:simpleContent> - <xs:extension base="xs:double"> - <xs:attribute name="units" type="TimeUnit" use="required"/> - </xs:extension> - </xs:simpleContent> - </xs:complexType> - <xs:simpleType name="AngleUnit"> - <xs:restriction base="xs:string"> - <xs:enumeration value="radians"/> - <xs:enumeration value="degrees"/> - <xs:enumeration value="arcsec"/> - </xs:restriction> - </xs:simpleType> - <xs:complexType name="Angle"> - <xs:simpleContent> - <xs:extension base="xs:double"> - <xs:attribute name="units" type="AngleUnit" use="required"/> - </xs:extension> - </xs:simpleContent> - </xs:complexType> - <xs:simpleType name="PixelUnit"> - <xs:restriction base="xs:string"> - <xs:enumeration value="Jy/beam"/> - </xs:restriction> - </xs:simpleType> - <xs:complexType name="Pixel"> - <xs:simpleContent> - <xs:extension base="xs:double"> - <xs:attribute name="units" type="PixelUnit" use="required"/> - </xs:extension> - </xs:simpleContent> - </xs:complexType> - <xs:simpleType name="ListOfDouble"> - <xs:list itemType="xs:double"/> - </xs:simpleType> - <xs:simpleType name="ListOfString"> - <xs:list itemType="xs:string"/> - </xs:simpleType> - <xs:simpleType name="ListOfSubbands"> - <xs:list itemType="xs:unsignedShort"/> - </xs:simpleType> - <xs:complexType name="ListOfFrequencies"> - <xs:sequence> - <xs:element name="frequencies" type="ListOfDouble"/> - <xs:element name="unit" type="FrequencyUnit"/> - </xs:sequence> - </xs:complexType> - <xs:annotation> - <xs:documentation>Generic identifier type. Currently two sources are supported: MoM and SAS - </xs:documentation> - </xs:annotation> - <xs:complexType name="IdentifierType"> - <xs:sequence> - <xs:element name="source" type="xs:string"/> - <xs:element name="identifier" type="xs:nonNegativeInteger"/> - <xs:element name="name" minOccurs="0" type="xs:string"/> - <xs:element name="label" minOccurs="0" type="xs:string"/> - </xs:sequence> - </xs:complexType> - <xs:simpleType name="EquinoxType"> - <xs:restriction base="xs:string"> - <xs:enumeration value="B1950"/> - <xs:enumeration value="J2000"/> - <xs:enumeration value="SUN"/> - <xs:enumeration value="JUPITER"/> - </xs:restriction> - </xs:simpleType> - <xs:annotation> - <xs:documentation>Pointing, either RA/DEC or AZ/EL, can't be coded as a choice due to Eclipse parser limitations.</xs:documentation> - </xs:annotation> - <xs:complexType name="Pointing"> - <xs:sequence> - <xs:choice> - <xs:element name="rightAscension" type="Angle"/> - <xs:element name="azimuth" type="Angle"/> - </xs:choice> - <xs:choice> - <xs:element name="declination" type="Angle"/> - <xs:element name="altitude" type="Angle"/> - </xs:choice> - <xs:element name="equinox" type="EquinoxType"/> - </xs:sequence> - </xs:complexType> - <xs:annotation> - <xs:documentation>============================Stations============================ - - Below is information related to Stations and AntennaFields. Stations have one or more AntennaFields, each of which is - a single sensing element within the array for most observation types. AntennaFields are the end points for baselines in - Interferometry mode, the array elements in Beam Formed mode, etc. Only Transient Buffer Mode reads the individual antennas. - </xs:documentation> - </xs:annotation> - <xs:annotation> - <xs:documentation>Usually the coordinates of a station will by in x,y,z in ITRF, but we also have the option to use coordinates on a sphere.</xs:documentation> - </xs:annotation> - <xs:complexType name="Coordinates"> - <xs:sequence> - <xs:element name="coordinateSystem"> - <xs:simpleType> - <xs:restriction base="xs:string"> - <xs:enumeration value="WGS84"/> - <xs:enumeration value="ITRF2000"/> - <xs:enumeration value="ITRF2005"/> - </xs:restriction> - </xs:simpleType> - </xs:element> - <xs:choice> - <xs:sequence> - <xs:element name="x" type="Length"/> - <xs:element name="y" type="Length"/> - <xs:element name="z" type="Length"/> - </xs:sequence> - <xs:sequence> - <xs:element name="radius" type="Length"/> - <xs:element name="longitude" type="Angle"/> - <xs:element name="latitude" type="Angle"/> - </xs:sequence> - </xs:choice> - </xs:sequence> - </xs:complexType> - <xs:annotation> - <xs:documentation> - AntennaFields per station are currently either 2 (HBA/LBA) or 3 for core stations (HBA0/HBA1/LBA). - When the signals from HBA0 and HBA1 are combined, they show up as HBA in the SIP, not HBA0+HBA1. - </xs:documentation> - </xs:annotation> - <xs:simpleType name="AntennaFieldType"> - <xs:restriction base="xs:string"> - <xs:enumeration value="HBA0"/> - <xs:enumeration value="HBA1"/> - <xs:enumeration value="HBA"/> - <xs:enumeration value="LBA"/> - </xs:restriction> - </xs:simpleType> - <xs:annotation> - <xs:documentation>Currently Superterp is not a separate type here.</xs:documentation> - </xs:annotation> - <xs:simpleType name="StationTypeType"> - <xs:restriction base="xs:string"> - <xs:enumeration value="Core"/> - <xs:enumeration value="Remote"/> - <xs:enumeration value="International"/> - </xs:restriction> - </xs:simpleType> - <xs:complexType name="AntennaField"> - <xs:sequence> - <xs:element name="name" type="AntennaFieldType"/> - <xs:element name="location" type="Coordinates"/> - </xs:sequence> - </xs:complexType> - <xs:complexType name="Stations"> - <xs:sequence> - <xs:element maxOccurs="unbounded" name="station" type="Station"/> - </xs:sequence> - </xs:complexType> - <xs:annotation> - <xs:documentation>Currently only one (LBA/HBA/HBA0/HBA1) or two (HBA0+HBA1) antennafields can be active at the same time.</xs:documentation> - </xs:annotation> - <xs:complexType name="Station"> - <xs:sequence> - <xs:element name="name" type="xs:string"/> - <xs:element name="stationType" type="StationTypeType"/> - <xs:element minOccurs="1" maxOccurs="2" name="antennaField" type="AntennaField"/> - </xs:sequence> - </xs:complexType> - <xs:annotation> - <xs:documentation>============================Process============================ - - Below is the generic Process class. Currently it has two subclasses: Observation and PipelineRun. - All processes are currently run in Tier0, specified in MoM/Scheduler and controlled by SAS/MAC. - processIdentifier: MoM Id - observationId: SAS VIC Tree Id (Note that it's not called observationId-entifier) - strategyName/Description: Strategy template for observing/processing, known within SAS as the default template - </xs:documentation> - </xs:annotation> - <xs:annotation> - <xs:documentation>Currently only one relation type is defined (GroupID), but others are likely to be added, like Target-Calibrator, Slice. - Please note that this also can apply to Sub-Array Pointings.</xs:documentation> - </xs:annotation> - <xs:simpleType name="ProcessRelationType"> - <xs:restriction base="xs:string"> - <xs:enumeration value="GroupID"/> - </xs:restriction> - </xs:simpleType> - <xs:complexType name="ProcessRelation"> - <xs:sequence> - <xs:element name="relationType" type="ProcessRelationType"/> - <xs:element name="identifier" type="IdentifierType"/> - <xs:element minOccurs="0" name="name" type="xs:string"/> - </xs:sequence> - </xs:complexType> - <xs:complexType name="ProcessRelations"> - <xs:sequence> - <xs:element minOccurs="0" maxOccurs="unbounded" name="relation" type="ProcessRelation"/> - </xs:sequence> - </xs:complexType> - <xs:complexType name="Process"> - <xs:sequence> - <xs:element name="processIdentifier" type="IdentifierType"/> - <xs:element name="observationId" type="IdentifierType"/> - <xs:element minOccurs="0" name="parset" type="IdentifierType"/> - <xs:element name="strategyName" type="xs:string"/> - <xs:element name="strategyDescription" type="xs:string"/> - <xs:element name="startTime" type="xs:dateTime"/> - <xs:element name="duration" type="xs:duration"/> - <xs:element name="relations" type="ProcessRelations"/> - </xs:sequence> - </xs:complexType> - <xs:annotation> - <xs:documentation>============================Observation============================ - - Below is information related to the Observation process. - </xs:documentation> - </xs:annotation> - <xs:simpleType name="FilterSelectionType"> - <xs:restriction base="xs:string"> - <xs:enumeration value="10-70 MHz"/> - <xs:enumeration value="10-90 MHz"/> - <xs:enumeration value="30-70 MHz"/> - <xs:enumeration value="30-90 MHz"/> - <xs:enumeration value="110-190 MHz"/> - <xs:enumeration value="170-230 MHz"/> - <xs:enumeration value="210-250 MHz"/> - </xs:restriction> - </xs:simpleType> - <xs:complexType name="ClockType"> - <xs:simpleContent> - <xs:restriction base="Frequency"> - <xs:enumeration value="160"/> - <xs:enumeration value="200"/> - <xs:attribute fixed="MHz" name="units" type="FrequencyUnit" use="required"/> - </xs:restriction> - </xs:simpleContent> - </xs:complexType> - <xs:simpleType name="AntennaSetType"> - <xs:restriction base="xs:string"> - <xs:enumeration value="HBA Zero"/> - <xs:enumeration value="HBA One"/> - <xs:enumeration value="HBA Dual"/> - <xs:enumeration value="HBA Joined"/> - <xs:enumeration value="LBA Outer"/> - <xs:enumeration value="LBA Inner"/> - <xs:enumeration value="LBA Sparse Even"/> - <xs:enumeration value="LBA Sparse Odd"/> - <xs:enumeration value="LBA X"/> - <xs:enumeration value="LBA Y"/> - <xs:enumeration value="HBA Zero Inner"/> - <xs:enumeration value="HBA One Inner"/> - <xs:enumeration value="HBA Dual Inner"/> - <xs:enumeration value="HBA Joined Inner"/> - </xs:restriction> - </xs:simpleType> - <xs:simpleType name="StationSelectionType"> - <xs:restriction base="xs:string"> - <xs:enumeration value="Single"/> - <xs:enumeration value="Core"/> - <xs:enumeration value="Dutch"/> - <xs:enumeration value="International"/> - <xs:enumeration value="Custom"/> - </xs:restriction> - </xs:simpleType> - <xs:simpleType name="ObservingModeType"> - <xs:restriction base="xs:string"> - <xs:enumeration value="Interferometer"/> - <xs:enumeration value="Beam Observation"/> - <xs:enumeration value="TBB (standalone)"/> - <xs:enumeration value="TBB (piggyback)"/> - <xs:enumeration value="Direct Data Storage"/> - <xs:enumeration value="Non Standard"/> - <xs:enumeration value="Unknown"/> - </xs:restriction> - </xs:simpleType> - <xs:simpleType name="TimeSystemType"> - <xs:restriction base="xs:string"> - <xs:enumeration value="UTC"/> - <xs:enumeration value="LST"/> - </xs:restriction> - </xs:simpleType> - <xs:annotation> - <xs:documentation>Observation is one of the core classes of the SIP. It describes one of the main datagenerating processes. - The big difference with a PipelineRun process is in that an Observation has no input dataproducts as it is a - direct measurement of the physical process. - - Notes:Is unsignedShort enough for numberOftransientBufferBoardEvents? - Backward compatibility with the BlueGene: With old BG observations channelWidth and channelsPerSubband are set at Observation level and not at RealTimeProcess level. - For BG frequencyDownsamplingFactor and numberOfCollapsedChannels are set. - For Cobalt observations the reverse is true: channelWidth and channelsPerSubband are only set at RealTimeProcess level and not at Observation level. - For Cobalt frequencyDownsamplingFactor en numberOfCollapsedChannels are not set. - </xs:documentation> - </xs:annotation> - <xs:complexType name="Observation"> - <xs:complexContent> - <xs:extension base="Process"> - <xs:sequence> - <xs:element name="observingMode" type="ObservingModeType"/> - <xs:element minOccurs="0" name="observationDescription" type="xs:string"/> - <xs:element name="instrumentFilter" type="FilterSelectionType"/> - <xs:element name="clock" type="ClockType"/> - <xs:element name="stationSelection" type="StationSelectionType"/> - <xs:element name="antennaSet" type="AntennaSetType"/> - <xs:element name="timeSystem" type="TimeSystemType"/> - <xs:element minOccurs="0" name="channelWidth" type="Frequency"/><!--BlueGene compatibility--> - <xs:element minOccurs="0" name="channelsPerSubband" type="xs:unsignedShort"/><!--BlueGene compatibility--> - <xs:element name="numberOfStations" type="xs:unsignedByte"/> - <xs:element name="stations" type="Stations"/> - <xs:element name="numberOfSubArrayPointings" type="xs:unsignedShort"/> - <xs:element minOccurs="0" name="subArrayPointings" type="SubArrayPointings"/> - <xs:element name="numberOftransientBufferBoardEvents" type="xs:unsignedShort"/> - <xs:element minOccurs="0" name="transientBufferBoardEvents" type="TransientBufferBoardEvents"/> - <xs:element name="numberOfCorrelatedDataProducts" type="xs:unsignedShort"/> - <xs:element name="numberOfBeamFormedDataProducts" type="xs:unsignedShort"/> - <xs:element name="numberOfBitsPerSample" type="xs:unsignedShort"/> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:complexType name="DirectDataMeasurement"> - <xs:complexContent> - <xs:extension base="Process"> - <xs:sequence> - <xs:element name="observingMode" type="ObservingModeType"/> - <xs:element name="station" type="Station"/> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:annotation> - <xs:documentation>===================Generic/Unspecified====================== - - Please note that the difference between Generic and Unspecified is that the first describes a non standard process, while the second describes an unknown - process. The latter is mostly used when there are partial errors during the ingest of data into the archive. - </xs:documentation> - </xs:annotation> - <xs:complexType name="GenericMeasurement"> - <xs:complexContent> - <xs:extension base="Process"> - <xs:sequence> - <xs:element name="observingMode" type="ObservingModeType"/> - <xs:element name="description" type="xs:string"/> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:complexType name="UnspecifiedProcess"> - <xs:complexContent> - <xs:extension base="Process"> - <xs:sequence> - <xs:element name="observingMode" type="ObservingModeType"/> - <xs:element name="description" type="xs:string"/> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:annotation> - <xs:documentation>============================Online Processing============================ - - This describes the various types of realtime/online processing that can happen after the data is sent from the stations to - the central processing. It's still part of the Observation. - </xs:documentation> - </xs:annotation> - <xs:simpleType name="ProcessingType"> - <xs:restriction base="xs:string"> - <xs:enumeration value="Correlator"/> - <xs:enumeration value="Coherent Stokes"/> - <xs:enumeration value="Incoherent Stokes"/> - <xs:enumeration value="Fly's Eye"/> - <xs:enumeration value="Non Standard"/> - </xs:restriction> - </xs:simpleType> - <xs:simpleType name="MeasurementType"> - <xs:restriction base="xs:string"> - <xs:enumeration value="Test"/> - <xs:enumeration value="Tune Up"/> - <xs:enumeration value="Calibration"/> - <xs:enumeration value="Target"/> - <xs:enumeration value="All Sky"/> - <xs:enumeration value="Miscellaneous"/> - </xs:restriction> - </xs:simpleType> - <xs:complexType name="Processing"> - <xs:sequence> - <xs:element minOccurs="0" name="correlator" type="Correlator"/> - <xs:element minOccurs="0" name="coherentStokes" type="CoherentStokes"/> - <xs:element minOccurs="0" name="incoherentStokes" type="IncoherentStokes"/> - <xs:element minOccurs="0" name="flysEye" type="FlysEye"/> - <xs:element minOccurs="0" name="nonStandard" type="NonStandard"/> - </xs:sequence> - </xs:complexType> - <xs:complexType name="RealTimeProcess"> - <xs:sequence> - <xs:element name="processingType" type="ProcessingType"/> - </xs:sequence> - </xs:complexType> - <xs:complexType name="Correlator"> - <xs:complexContent> - <xs:extension base="RealTimeProcess"> - <xs:sequence> - <xs:element name="integrationInterval" type="Time"/> - <xs:element minOccurs="0" name="channelWidth" type="Frequency"/><!--BlueGene compatibility--> - <xs:element minOccurs="0" name="channelsPerSubband" type="xs:unsignedShort"/><!--BlueGene compatibility--> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:annotation> - <xs:documentation>The CoherentStokes and IncoherentStokes do further processing on the data after the polyphase filter - on the BlueGene. The numberOfColapsedChannels is what is actually written to disk, the frequencyDownsamplingFactor is thus - Observation:channelsPerSubband divided by the numberOfcolapsedChannels. - There is also downsampling in time from the rawSamplingTime coming out of the polyphasefilter, usually in nanoseconds, using the timeDownsamplingFactor to - get to the samplingTime. The timeDownsamplingFactor can be quite large, with the resulting samplingtime in the miliseconds. - - Also note that within the same Observation, these settings can be different for CoherentStokes and IncoherentStokes. if both types are being generated. - </xs:documentation> - </xs:annotation> - <xs:complexType name="CoherentStokes"> - <xs:complexContent> - <xs:extension base="RealTimeProcess"> - <xs:sequence> - <xs:element name="rawSamplingTime" type="Time"/> - <xs:element name="timeDownsamplingFactor" type="xs:unsignedInt"/> - <xs:element name="samplingTime" type="Time"/> - <xs:element minOccurs="0" name="frequencyDownsamplingFactor" type="xs:unsignedShort"/><!--BlueGene compatibility--> - <xs:element minOccurs="0" name="numberOfCollapsedChannels" type="xs:unsignedShort"/><!--BlueGene compatibility--> - <xs:element name="stokes" type="PolarizationType" maxOccurs="4"/> - <xs:element name="numberOfStations" type="xs:unsignedByte"/> - <xs:element name="stations" type="Stations"/> - <xs:element minOccurs="0" name="channelWidth" type="Frequency"/><!--BlueGene compatibility--> - <xs:element minOccurs="0" name="channelsPerSubband" type="xs:unsignedShort"/><!--BlueGene compatibility--> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:complexType name="IncoherentStokes"> - <xs:complexContent> - <xs:extension base="RealTimeProcess"> - <xs:sequence> - <xs:element name="rawSamplingTime" type="Time"/> - <xs:element name="timeDownsamplingFactor" type="xs:unsignedInt"/> - <xs:element name="samplingTime" type="Time"/> - <xs:element minOccurs="0" name="frequencyDownsamplingFactor" type="xs:unsignedShort"/><!--BlueGene compatibility--> - <xs:element minOccurs="0" name="numberOfCollapsedChannels" type="xs:unsignedShort"/><!--BlueGene compatibility--> - <xs:element name="stokes" type="PolarizationType" maxOccurs="4"/> - <xs:element name="numberOfStations" type="xs:unsignedByte"/> - <xs:element name="stations" type="Stations"/> - <xs:element minOccurs="0" name="channelWidth" type="Frequency"/><!--BlueGene compatibility--> - <xs:element minOccurs="0" name="channelsPerSubband" type="xs:unsignedShort"/><!--BlueGene compatibility--> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:complexType name="FlysEye"> - <xs:complexContent> - <xs:extension base="RealTimeProcess"> - <xs:sequence> - <xs:element name="rawSamplingTime" type="Time"/> - <xs:element name="timeDownsamplingFactor" type="xs:unsignedInt"/> - <xs:element name="samplingTime" type="Time"/> - <xs:element name="stokes" type="PolarizationType" maxOccurs="4"/> - <xs:element minOccurs="0" name="channelWidth" type="Frequency"/><!--BlueGene compatibility--> - <xs:element minOccurs="0" name="channelsPerSubband" type="xs:unsignedShort"/><!--BlueGene compatibility--> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:complexType name="NonStandard"> - <xs:complexContent> - <xs:extension base="RealTimeProcess"> - <xs:sequence> - <xs:element name="channelWidth" type="Frequency"/> - <xs:element name="channelsPerSubband" type="xs:unsignedShort"/> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:complexType name="TransientBufferBoardEvents"> - <xs:sequence> - <xs:element maxOccurs="unbounded" name="transientBufferBoardEvent" type="TransientBufferBoardEvent"/> - </xs:sequence> - </xs:complexType> - <xs:complexType name="TransientBufferBoardEvent"> - <xs:sequence> - <xs:element name="eventSource" type="xs:string"/> - </xs:sequence> - </xs:complexType> - <xs:complexType name="SubArrayPointings"> - <xs:sequence> - <xs:element maxOccurs="unbounded" name="subArrayPointing" type="SubArrayPointing"/> - </xs:sequence> - </xs:complexType> - <xs:annotation> - <xs:documentation>SubArrayPointing is one of the core classes of the SIP. It contains important information on - what direction the telescope is pointing and what object was the target as well as the length of time the patch - of sky was was observed. - - See the XML standard for the format of xs::duration. - </xs:documentation> - </xs:annotation> - <xs:complexType name="SubArrayPointing"> - <xs:sequence> - <xs:element name="pointing" type="Pointing"/> - <xs:element name="beamNumber" type="xs:unsignedShort"/> - <xs:element minOccurs="0" name="measurementDescription" type="xs:string"/> - <xs:element name="subArrayPointingIdentifier" type="IdentifierType"/> - <xs:element name="measurementType" type="MeasurementType"/> - <xs:element name="targetName" type="xs:string"/> - <xs:element name="startTime" type="xs:dateTime"/> - <xs:element name="duration" type="xs:duration"/> - <xs:element name="numberOfProcessing" type="xs:unsignedShort"/> - <xs:element minOccurs="0" name="processing" type="Processing"/> - <xs:element name="numberOfCorrelatedDataProducts" type="xs:unsignedShort"/> - <xs:element name="numberOfBeamFormedDataProducts" type="xs:unsignedShort"/> - <xs:element name="relations" type="ProcessRelations"/> - </xs:sequence> - </xs:complexType> - <xs:complexType name="DataSources"> - <xs:annotation> - <xs:documentation>============================Pipeline============================ - - This section describes the various pipelines. - </xs:documentation> - </xs:annotation> - <xs:sequence> - <xs:element name="dataProductIdentifier" type="IdentifierType" maxOccurs="unbounded"/> - </xs:sequence> - </xs:complexType> - <xs:complexType name="PipelineRun"> - <xs:complexContent> - <xs:extension base="Process"> - <xs:sequence> - <xs:element name="pipelineName" type="xs:string"/> - <xs:element name="pipelineVersion" type="xs:string"/> - <xs:element name="sourceData" type="DataSources"/> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:annotation> - <xs:documentation>This definition might not be entirely finished as the ImagingPipeline is still being worked on.</xs:documentation> - </xs:annotation> - <xs:complexType name="ImagingPipeline"> - <xs:complexContent> - <xs:extension base="PipelineRun"> - <xs:sequence> - <xs:element minOccurs="0" name="frequencyIntegrationStep" type="xs:unsignedShort"/> - <xs:element minOccurs="0" name="timeIntegrationStep" type="xs:unsignedShort"/> - <xs:element minOccurs="0" name="skyModelDatabase" type="xs:string"/> - <xs:element minOccurs="0" name="demixing" type="xs:boolean"/> - <xs:element name="imagerIntegrationTime" type="Time"/> - <xs:element name="numberOfMajorCycles" type="xs:unsignedShort"/> - <xs:element name="numberOfInstrumentModels" type="xs:unsignedShort"/> - <xs:element name="numberOfCorrelatedDataProducts" type="xs:unsignedShort"/> - <xs:element name="numberOfSkyImages" type="xs:unsignedShort"/> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:complexType name="CalibrationPipeline"> - <xs:complexContent> - <xs:extension base="PipelineRun"> - <xs:sequence> - <xs:element minOccurs="0" name="frequencyIntegrationStep" type="xs:unsignedShort"/> - <xs:element minOccurs="0" name="timeIntegrationStep" type="xs:unsignedShort"/> - <xs:element minOccurs="0" name="flagAutoCorrelations" type="xs:boolean"/> - <xs:element minOccurs="0" name="demixing" type="xs:boolean"/> - <xs:element name="skyModelDatabase" type="xs:string"/> - <xs:element name="numberOfInstrumentModels" type="xs:unsignedShort"/> - <xs:element name="numberOfCorrelatedDataProducts" type="xs:unsignedShort"/> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:complexType name="AveragingPipeline"> - <xs:complexContent> - <xs:extension base="PipelineRun"> - <xs:sequence> - <xs:element name="frequencyIntegrationStep" type="xs:unsignedShort"/> - <xs:element name="timeIntegrationStep" type="xs:unsignedShort"/> - <xs:element name="flagAutoCorrelations" type="xs:boolean"/> - <xs:element name="demixing" type="xs:boolean"/> - <xs:element name="numberOfCorrelatedDataProducts" type="xs:unsignedShort"/> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:annotation> - <xs:documentation>Pulsar pipeline. Which pulsars are selected for processing is a complex system</xs:documentation> - </xs:annotation> - <xs:simpleType name="PulsarSelectionType"> - <xs:restriction base="xs:string"> - <xs:enumeration value="Pulsars in observation specs, file or SAP"/><!--Default--> - <xs:enumeration value="Pulsars in observation specs"/><!--parset--> - <xs:enumeration value="Pulsar specified in dataproduct"/><!--meta--> - <xs:enumeration value="Brightest known pulsar in SAP"/><!--sapfind--> - <xs:enumeration value="Three brightest known pulsars in SAP"/><!--sapfind3--> - <xs:enumeration value="Brightest known pulsar in TAB"/><!--tabfind--> - <xs:enumeration value="Pulsars in observation specs, file and brightest in SAP and TAB"/><!--tabfind+--> - <xs:enumeration value="Specified pulsar list"/><!--given list, comma separated--> - </xs:restriction> - </xs:simpleType> - <xs:complexType name="PulsarPipeline"> - <xs:complexContent> - <xs:extension base="PipelineRun"> - <xs:sequence> - <xs:element name="pulsarSelection" type="PulsarSelectionType"/> - <xs:element name="pulsars" type="ListOfString"/> - <xs:element name="doSinglePulseAnalysis" type="xs:boolean"/><!--single-pulse--> - <xs:element name="convertRawTo8bit" type="xs:boolean"/><!--raw-to-8bit--> - <xs:element name="subintegrationLength" type="Time"/><!--tsubint--> - <xs:element name="skipRFIExcision" type="xs:boolean"/><!--norfi--> - <xs:element name="skipDataFolding" type="xs:boolean"/><!--nofold--> - <xs:element name="skipOptimizePulsarProfile" type="xs:boolean"/><!--nopdmp--> - <xs:element name="skipConvertRawIntoFoldedPSRFITS" type="xs:boolean"/><!--skip-dspsr--> - <xs:element name="runRotationalRAdioTransientsAnalysis" type="xs:boolean"/><!--rrats RRAT capitals on purpose--> - <xs:element name="skipDynamicSpectrum" type="xs:boolean"/><!--skip-dynamic-spectrum--> - <xs:element name="skipPreFold" type="xs:boolean"/><!--skip-prefold--> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:complexType name="CosmicRayPipeline"> - <xs:complexContent> - <xs:extension base="PipelineRun"/> - </xs:complexContent> - </xs:complexType> - <xs:complexType name="LongBaselinePipeline"> - <xs:complexContent> - <xs:extension base="PipelineRun"> - <xs:sequence> - <xs:element name="subbandsPerSubbandGroup" type="xs:unsignedShort"/> - <xs:element name="subbandGroupsPerMS" type="xs:unsignedShort"/> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:complexType name="GenericPipeline"> - <xs:complexContent> - <xs:extension base="PipelineRun"/> - </xs:complexContent> - </xs:complexType> - <xs:annotation> - <xs:documentation>============================DataProduct============================ - - This section describes the dataproducts. - </xs:documentation> - </xs:annotation> - <xs:simpleType name="DataProductType"> - <xs:restriction base="xs:string"> - <xs:enumeration value="Correlator data"/> - <xs:enumeration value="Beam Formed data"/> - <xs:enumeration value="Transient Buffer Board data"/> - <xs:enumeration value="Sky Image"/> - <xs:enumeration value="Pixel Map"/> - <xs:enumeration value="Direct Data Storage data"/> - <xs:enumeration value="Dynamic Spectra data"/> - <xs:enumeration value="Instrument Model"/> - <xs:enumeration value="Sky Model"/> - <xs:enumeration value="Pulsar pipeline output"/> - <xs:enumeration value="Pulsar pipeline summary output"/> - <xs:enumeration value="Non Standard"/> - <xs:enumeration value="Unknown"/> - </xs:restriction> - </xs:simpleType> - <xs:simpleType name="ChecksumAlgorithm"> - <xs:restriction base="xs:string"> - <xs:enumeration value="MD5"/> - <xs:enumeration value="Adler32"/> - </xs:restriction> - </xs:simpleType> - <xs:complexType name="ChecksumType"> - <xs:sequence> - <xs:element name="algorithm" type="ChecksumAlgorithm"/> - <xs:element name="value" type="xs:string"/> - </xs:sequence> - </xs:complexType> - <xs:annotation> - <xs:documentation>We currently have three well documented fileformats in the LTA: - FITS, CASA and HDF5. There are LOFAR ICDs describing the various types (Correlated, BeamFormed, TBB, InstrumentModel, ...) - and how they are stored in one or more of these file formats. - We also have PULP, which currently isn't well documented, and expect PREFACTOR in 2017, which might get documented. - Next to this we have the UNDOCUMENTED option for data that is in a format that will never be documented. - </xs:documentation> - </xs:annotation> - <xs:simpleType name="FileFormatType"> - <xs:restriction base="xs:string"> - <xs:enumeration value="FITS"/> - <xs:enumeration value="AIPS++/CASA"/> - <xs:enumeration value="HDF5"/> - <xs:enumeration value="PULP"/> - <xs:enumeration value="PREFACTOR"/> - <xs:enumeration value="UNDOCUMENTED"/> - </xs:restriction> - </xs:simpleType> - <xs:annotation> - <xs:documentation>This very well defined yet! type probably needs to be an enumeration</xs:documentation> - </xs:annotation> - <xs:complexType name="TBBTrigger"> - <xs:sequence> - <xs:element name="type" type="xs:string"/> - <xs:element name="value" type="xs:string"/> - </xs:sequence> - </xs:complexType> - <xs:annotation> - <xs:documentation>From AIPS++/CASA: None=0,I=1,Q=2,U=3,V=4,RR=5,RL=6,LR=7,LL=8,XX=9,XY=10,YX=11,YY=12</xs:documentation> - </xs:annotation> - <xs:simpleType name="PolarizationType"> - <xs:restriction base="xs:string"> - <xs:enumeration value="None"/> - <xs:enumeration value="I"/> - <xs:enumeration value="Q"/> - <xs:enumeration value="U"/> - <xs:enumeration value="V"/> - <xs:enumeration value="RR"/> - <xs:enumeration value="RL"/> - <xs:enumeration value="LR"/> - <xs:enumeration value="LL"/> - <xs:enumeration value="XX"/> - <xs:enumeration value="XY"/> - <xs:enumeration value="YX"/> - <xs:enumeration value="YY"/> - <xs:enumeration value="Xre"/> - <xs:enumeration value="Xim"/> - <xs:enumeration value="Yre"/> - <xs:enumeration value="Yim"/> - </xs:restriction> - </xs:simpleType> - <xs:annotation> - <xs:documentation>Base class of the various DataProducts</xs:documentation> - </xs:annotation> - <xs:complexType name="DataProduct"> - <xs:sequence> - <xs:element name="dataProductType" type="DataProductType"/> - <xs:element name="dataProductIdentifier" type="IdentifierType"/> - <xs:element minOccurs="0" name="storageTicket" type="xs:string"/> - <xs:element name="size" type="xs:unsignedLong"/><!--Bytes--> - <xs:element maxOccurs="unbounded" minOccurs="0" name="checksum" type="ChecksumType"/> - <xs:element name="fileName" type="xs:string"/> - <xs:element name="fileFormat" type="FileFormatType"/> - <xs:element name="processIdentifier" type="IdentifierType"/> - </xs:sequence> - </xs:complexType> - <xs:annotation> - <xs:documentation>======================Interferometer=========================== - - subArrayPointingIdentifier (also knows as MeasurementIdentifier within MoM) is not an optional parameter as the - LTA catalog needs it because it can't otherwise find this information because of limits in the PipelineRun model. - See the XML standard for the format of xs::duration. - </xs:documentation> - </xs:annotation> - <xs:complexType name="CorrelatedDataProduct"> - <xs:complexContent> - <xs:extension base="DataProduct"> - <xs:sequence> - <xs:element name="subArrayPointingIdentifier" type="IdentifierType"/> - <xs:element name="subband" type="xs:unsignedShort"/> - <xs:element minOccurs="0" name="stationSubband" type="xs:unsignedShort"/> - <xs:element name="startTime" type="xs:dateTime"/> - <xs:element name="duration" type="xs:duration"/> - <xs:element name="integrationInterval" type="Time"/> - <xs:element name="centralFrequency" type="Frequency"/> - <xs:element name="channelWidth" type="Frequency"/> - <xs:element name="channelsPerSubband" type="xs:unsignedShort"/> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:annotation> - <xs:documentation>This currently describes the ParmDB. No fields are defined because in the model is functions - as a link between Calibrator and Target observations and doesn't contain any searchable metadata at the moment. - </xs:documentation> - </xs:annotation> - <xs:complexType name="InstrumentModelDataProduct"> - <xs:complexContent> - <xs:extension base="DataProduct"/> - </xs:complexContent> - </xs:complexType> - <xs:annotation> - <xs:documentation>This currently describes the SourceDB. No fields are defined because in the model is functions - as a link between Calibrator and Target observations and doesn't contain any searchable metadata at the moment. - </xs:documentation> - </xs:annotation> - <xs:complexType name="SkyModelDataProduct"> - <xs:complexContent> - <xs:extension base="DataProduct"/> - </xs:complexContent> - </xs:complexType> - <xs:annotation> - <xs:documentation>=====================TransientBufferBoard===================== - - The timeStamp contains the exact number of seconds from Observation::startTime - </xs:documentation> - </xs:annotation> - <xs:complexType name="TransientBufferBoardDataProduct"> - <xs:complexContent> - <xs:extension base="DataProduct"> - <xs:sequence> - <xs:element name="numberOfSamples" type="xs:unsignedInt"/> - <xs:element name="timeStamp" type="xs:unsignedInt"/> - <xs:element name="triggerParameters" type="TBBTrigger"/> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:annotation> - <xs:documentation>=====================BeamFormed===================== - - This section describes BeamFormed dataproducts. The important part is that this is where all the individual ArrayBeams are - described, the actual BeamFormedDataProduct is basically just a container. - </xs:documentation> - </xs:annotation> - <xs:complexType name="ArrayBeams"> - <xs:sequence> - <xs:element maxOccurs="unbounded" name="arrayBeam" type="ArrayBeam"/> - </xs:sequence> - </xs:complexType> - <xs:annotation> - <xs:documentation>SamplingTime is the duration of a single sample usually in ms or ns.</xs:documentation> - </xs:annotation> - <xs:complexType name="ArrayBeam"> - <xs:sequence> - <xs:element name="subArrayPointingIdentifier" type="IdentifierType"/> - <xs:element name="beamNumber" type="xs:unsignedShort"/> - <xs:element name="dispersionMeasure" type="xs:double"/> - <xs:element name="numberOfSubbands" type="xs:unsignedShort"/> - <xs:element name="stationSubbands" type="ListOfSubbands"/> - <xs:element name="samplingTime" type="Time"/> - <xs:element name="centralFrequencies" type="ListOfFrequencies"/> - <xs:element name="channelWidth" type="Frequency"/> - <xs:element name="channelsPerSubband" type="xs:unsignedShort"/> - <xs:element name="stokes" type="PolarizationType" maxOccurs="4"/> - </xs:sequence> - </xs:complexType> - <xs:annotation> - <xs:documentation>The pointing is the actual pointing of the ArrayBeam - The offset is the difference in the pointing of the ArrayBeam and the SubArrayPointing - </xs:documentation> - </xs:annotation> - <xs:complexType name="CoherentStokesBeam"> - <xs:complexContent> - <xs:extension base="ArrayBeam"> - <xs:sequence> - <xs:element name="pointing" type="Pointing"/> - <xs:element name="offset" type="Pointing"/> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:complexType name="IncoherentStokesBeam"> - <xs:complexContent> - <xs:extension base="ArrayBeam"> - <xs:sequence/> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:complexType name="FlysEyeBeam"> - <xs:complexContent> - <xs:extension base="ArrayBeam"> - <xs:sequence> - <xs:element name="station" type="Station"/> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:complexType name="BeamFormedDataProduct"> - <xs:complexContent> - <xs:extension base="DataProduct"> - <xs:sequence> - <xs:element name="numberOfBeams" type="xs:unsignedShort"/> - <xs:element minOccurs="0" name="beams" type="ArrayBeams"/> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:annotation> - <xs:documentation>=====================Pulsar Pipeline Dataproducts===================== - - PULP is a name for the output of the pulsar pipeline, it contains a list of small files. - It also contains pointers back to the observation/SAP/beam that the raw data came out of. - </xs:documentation> - </xs:annotation> - <xs:simpleType name="PulsarPipelineDataType"> - <xs:restriction base="xs:string"> - <xs:enumeration value="CoherentStokes"/> - <xs:enumeration value="IncoherentStokes"/> - <xs:enumeration value="ComplexVoltages"/> - <xs:enumeration value="SummaryCoherentStokes"/> - <xs:enumeration value="SummaryIncoherentStokes"/> - <xs:enumeration value="SummaryComplexVoltages"/> - </xs:restriction> - </xs:simpleType> - <xs:complexType name="PulpSummaryDataProduct"> - <xs:complexContent> - <xs:extension base="DataProduct"> - <xs:sequence> - <xs:element name="fileContent" type="ListOfString"/> - <xs:element name="dataType" type="PulsarPipelineDataType"/> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:complexType name="PulpDataProduct"> - <xs:complexContent> - <xs:extension base="DataProduct"> - <xs:sequence> - <xs:element name="fileContent" type="ListOfString"/> - <xs:element name="dataType" type="PulsarPipelineDataType"/> - <xs:element name="arrayBeam" type="ArrayBeam"/> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:annotation> - <xs:documentation>===================Generic/Unspecified====================== - - Please note that the difference between Generic and Unspecified is that the first describes a non standard dataproduct, while the second describes an unknown - dataproduct. The latter is mostly used when there are partial errors during the ingest of data into the archive. - </xs:documentation> - </xs:annotation> - <xs:complexType name="GenericDataProduct"> - <xs:complexContent> - <xs:extension base="DataProduct"/> - </xs:complexContent> - </xs:complexType> - <xs:complexType name="UnspecifiedDataProduct"> - <xs:complexContent> - <xs:extension base="DataProduct"/> - </xs:complexContent> - </xs:complexType> - <xs:annotation> - <xs:documentation>===================Images====================== - - Below are structures related to image type dataproducts. The main dataproduct here is the SkyImage, which contains three levels - of data. At the top there is the general information, below that there are coordinate types and those have one or more axes. - </xs:documentation> - </xs:annotation> - <xs:complexType name="Axis"> - <xs:sequence> - <xs:element name="number" type="xs:unsignedShort"/> - <xs:element name="name" type="xs:string"/> - <xs:element name="units" type="xs:string"/> - <xs:element name="length" type="xs:unsignedInt"/> - </xs:sequence> - </xs:complexType> - <xs:complexType name="LinearAxis"> - <xs:complexContent> - <xs:extension base="Axis"> - <xs:sequence> - <xs:element name="increment" type="xs:double"/> - <xs:element name="referencePixel" type="xs:double"/> - <xs:element name="referenceValue" type="xs:double"/> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:complexType name="TabularAxis"> - <xs:complexContent> - <xs:extension base="Axis"> - <xs:sequence/> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:complexType name="Coordinate"> - <xs:sequence/> - </xs:complexType> - <xs:simpleType name="RaDecSystem"> - <xs:restriction base="xs:string"> - <xs:enumeration value="ICRS"/> - <xs:enumeration value="FK5"/> - <xs:enumeration value="FK4"/> - <xs:enumeration value="FK4-NO-E"/> - <xs:enumeration value="GAPPT"/> - </xs:restriction> - </xs:simpleType> - <xs:simpleType name="LocationFrame"> - <xs:restriction base="xs:string"> - <xs:enumeration value="GEOCENTER"/> - <xs:enumeration value="BARYCENTER"/> - <xs:enumeration value="HELIOCENTER"/> - <xs:enumeration value="TOPOCENTER"/> - <xs:enumeration value="LSRK"/> - <xs:enumeration value="LSRD"/> - <xs:enumeration value="GALACTIC"/> - <xs:enumeration value="LOCAL_GROUP"/> - <xs:enumeration value="RELOCATABLE"/> - </xs:restriction> - </xs:simpleType> - <xs:annotation> - <xs:documentation>The DirectionCoordinate defines the RA and DEC axes and their projection on the celestial sphere.</xs:documentation> - </xs:annotation> - <xs:complexType name="DirectionCoordinate"> - <xs:complexContent> - <xs:extension base="Coordinate"> - <xs:sequence> - <xs:element minOccurs="2" maxOccurs="2" name="directionLinearAxis" type="LinearAxis"/> - <xs:element name="PC0_0" type="xs:double"/> - <xs:element name="PC0_1" type="xs:double"/> - <xs:element name="PC1_0" type="xs:double"/> - <xs:element name="PC1_1" type="xs:double"/> - <xs:element name="equinox" type="xs:string"/> - <xs:element name="raDecSystem" type="RaDecSystem"/> - <xs:element name="projection" type="xs:string"/> - <xs:element name="projectionParameters" type="ListOfDouble"/> - <xs:element name="longitudePole" type="Angle"/> - <xs:element name="latitudePole" type="Angle"/> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:simpleType name="SpectralQuantityType"> - <xs:restriction base="xs:string"> - <xs:enumeration value="Frequency"/> - <xs:enumeration value="Energy"/> - <xs:enumeration value="Wavenumber"/> - <xs:enumeration value="VelocityRadio"/> - <xs:enumeration value="VelocityOptical"/> - <xs:enumeration value="VelocityAppRadial"/> - <xs:enumeration value="Redshift"/> - <xs:enumeration value="WaveLengthVacuum"/> - <xs:enumeration value="WaveLengthAir"/> - <xs:enumeration value="BetaFactor"/> - </xs:restriction> - </xs:simpleType> - <xs:complexType name="SpectralQuantity"> - <xs:sequence> - <xs:element name="type" type="SpectralQuantityType"/> - <xs:element name="value" type="xs:double"/> - </xs:sequence> - </xs:complexType> - <xs:complexType name="SpectralCoordinate"> - <xs:complexContent> - <xs:extension base="Coordinate"> - <xs:sequence> - <xs:choice> - <xs:element name="spectralLinearAxis" type="LinearAxis"/> - <xs:element name="spectralTabularAxis" type="TabularAxis"/> - </xs:choice> - <xs:element name="spectralQuantity" type="SpectralQuantity"/> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:complexType name="TimeCoordinate"> - <xs:complexContent> - <xs:extension base="Coordinate"> - <xs:sequence> - <xs:choice> - <xs:element name="timeLinearAxis" type="LinearAxis"/> - <xs:element name="timeTabularAxis" type="TabularAxis"/> - </xs:choice> - <xs:element name="equinox" type="EquinoxType"/> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:complexType name="PolarizationCoordinate"> - <xs:complexContent> - <xs:extension base="Coordinate"> - <xs:sequence> - <xs:element name="polarizationTabularAxis" type="TabularAxis"/> - <xs:element maxOccurs="4" name="polarization" type="PolarizationType"/> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:annotation> - <xs:documentation>PixelMap is a generic base class. Currently only SkyImage is derived from it, but in the future others will be, lik DynamicSpectra - The limit of 999 is based on the limits in the FITS standard. In practice it's not likely to be reached. - </xs:documentation> - </xs:annotation> - <xs:complexType name="PixelMapDataProduct"> - <xs:complexContent> - <xs:extension base="DataProduct"> - <xs:sequence> - <xs:element name="numberOfAxes" type="xs:unsignedShort"/> - <xs:element name="numberOfCoordinates" type="xs:unsignedShort"/> - <xs:element maxOccurs="999" name="coordinate" type="Coordinate"/> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:annotation> - <xs:documentation>The SkyImage has two pointings: One for the actual image, encoded in the axes of - the DirectionCoordinate, and an observationPointing, which is the direction the telescope was actually facing. This need not - be the same although it often will be. - - Usually a SkyImage will have one DirectionCoordinate, one PolarizationCoordinate and one SpectralCoordinate. - </xs:documentation> - </xs:annotation> - <xs:complexType name="SkyImageDataProduct"> - <xs:complexContent> - <xs:extension base="PixelMapDataProduct"> - <xs:sequence> - <xs:element name="locationFrame" type="LocationFrame"/> - <xs:element name="timeFrame" type="xs:string"/> - <xs:element name="observationPointing" type="Pointing"/> - <xs:element name="restoringBeamMajor" type="Angle"/> - <xs:element name="restoringBeamMinor" type="Angle"/> - <xs:element name="rmsNoise" type="Pixel"/> - </xs:sequence> - </xs:extension> - </xs:complexContent> - </xs:complexType> - <xs:annotation> - <xs:documentation>============================Parset============================ - - This section describes the Parset. It's an optional section, given that not all processes that will be run to generate data might have - been run from SAS/MAC in the future. At the moment practically all processes should have a parset as we only run stuff on CEP/Tier 0. - </xs:documentation> - </xs:annotation> - <xs:complexType name="Parset"> - <xs:sequence> - <xs:element name="identifier" type="IdentifierType"/> - <xs:element name="contents" type="xs:string"/> - </xs:sequence> - </xs:complexType> - <xs:annotation> - <xs:documentation>============================Project============================ - - This section describes Project information. - </xs:documentation> - </xs:annotation> - <xs:simpleType name="Telescope"> - <xs:restriction base="xs:string"> - <xs:enumeration value="LOFAR"/> - </xs:restriction> - </xs:simpleType> - <xs:annotation> - <xs:documentation>The assumption is that all processes leading to the dataproduct are in the same project or public. - A dataproduct that would be created from non-public data from different projects is not modelled in the archive.</xs:documentation> - </xs:annotation> - <xs:complexType name="Project"> - <xs:sequence> - <xs:element name="projectCode" type="xs:string"/> - <xs:element name="primaryInvestigator" type="xs:string"/> - <xs:element name="coInvestigator" type="xs:string" minOccurs="0" maxOccurs="unbounded"/> - <xs:element name="contactAuthor" type="xs:string"/> - <xs:element name="telescope" type="Telescope"/> - <xs:element name="projectDescription" type="xs:string"/> - </xs:sequence> - </xs:complexType> - <xs:annotation> - <xs:documentation>============================LTASip root element============================ - - This is the root of the LTA SIP. It should have at least one Observation or PipelineRun, describing the process that generated the - dataProduct. - </xs:documentation> - </xs:annotation> - <xs:element name="ltaSip" type="LTASip"/> - <xs:complexType name="LTASip"> - <xs:sequence> - <xs:element name="sipGeneratorVersion" type="xs:string"/> - <xs:element name="project" type="Project"/> - <xs:element name="dataProduct" type="DataProduct"/> - <xs:element maxOccurs="unbounded" minOccurs="0" name="observation" type="Observation"/> - <xs:element maxOccurs="unbounded" minOccurs="0" name="pipelineRun" type="PipelineRun"/> - <xs:element maxOccurs="unbounded" minOccurs="0" name="unspecifiedProcess" type="UnspecifiedProcess"/> - <xs:element name="relatedDataProduct" type="DataProduct" minOccurs="0" maxOccurs="unbounded"/> - <xs:element maxOccurs="unbounded" minOccurs="0" name="parset" type="Parset"/> - </xs:sequence> - </xs:complexType> -</xs:schema> diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/lib/ingestpipeline.py b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/lib/ingestpipeline.py index 8f73bfabd8c1dfb6e00e5c3fa37afe7d698078b7..d6894e0f743abbb22aaad5be8ca6d63e3a15c986 100755 --- a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/lib/ingestpipeline.py +++ b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/lib/ingestpipeline.py @@ -371,7 +371,8 @@ class IngestPipeline(): 'project': self.Project, 'type': self.Type, 'ingest_server': self.hostname, - 'dataproduct': self.DataProduct } + 'dataproduct': self.DataProduct, + 'srm_url': self.PrimaryUri } if 'ObservationId' in self.job: contentDict['otdb_id'] = self.job['ObservationId'] diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/lib/ingesttransferserver.py b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/lib/ingesttransferserver.py index a10fc9c0f54ea86d4540cb2b052227c4fe00bb0c..9754f8549d04047bcd140944a0a7946f1f3aec36 100644 --- a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/lib/ingesttransferserver.py +++ b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/lib/ingesttransferserver.py @@ -57,7 +57,8 @@ logger = logging.getLogger(__name__) def _getBytesSent(): try: - return psutil.net_io_counters(True).get(NET_IF_TO_MONITOR, psutil.net_io_counters(False)).bytes_sent + # return the sum of all traffic over the given interfaces + return sum(psutil.net_io_counters(True).get(interface, psutil.net_io_counters(False)).bytes_sent for interface in NET_IF_TO_MONITOR) except Exception: return 0 @@ -161,7 +162,8 @@ class IngestTransferServer: # note that this is a 'soft' limit. # we cannot control the actual bandwith used by the running transfers # we can only not start new jobs if we already exceed the MAX_USED_BANDWITH_TO_START_NEW_JOBS - if used_bandwidth > MAX_USED_BANDWITH_TO_START_NEW_JOBS: + # allow an excption if we are running less then 2 jobs (then some other process is stealing our bandwith and we don't care) + if used_bandwidth > MAX_USED_BANDWITH_TO_START_NEW_JOBS and len(self.__running_jobs) > 2: log_recource_warning('not enough bandwith available to start new jobs, using %s, max %s' % (humanreadablesize(used_bandwidth, 'bps'), humanreadablesize(MAX_USED_BANDWITH_TO_START_NEW_JOBS, 'bps'))) diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/lib/ltacp.py b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/lib/ltacp.py index f10960ce76712ca8bdd4d0434363ffadc98f2983..0c1090e918bbaf3ec9eef238fde45ff5e91e70e1 100755 --- a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/lib/ltacp.py +++ b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/lib/ltacp.py @@ -20,9 +20,10 @@ import atexit from datetime import datetime, timedelta from lofar.common.util import humanreadablesize from lofar.common.datetimeutils import totalSeconds -from lofar.common.subprocess import PipeReader +from lofar.common.subprocess_utils import PipeReader from lofar.lta.ingest.common.config import hostnameToIp from lofar.lta.ingest.server.config import GLOBUS_TIMEOUT +from lofar.lta.ingest.common.srm import * logger = logging.getLogger() @@ -39,27 +40,6 @@ class LtacpDestinationExistsException(LtacpException): def getLocalIPAddress(): return hostnameToIp(socket.gethostname()) -def convert_surl_to_turl(surl): - ''' converts given srm url of an LTA site into a transport url as needed by gridftp. - ''' - if 'grid.sara.nl' in surl: - # sara provides dynamic hostnames via a round-robin dns. Get a random/dynamic host as provided by them. - dyn_hostname = socket.getfqdn(socket.gethostbyname('gridftp.grid.sara.nl')) - return re.sub('srm://srm\.grid\.sara\.nl:?\d*', 'gsiftp://%s:2811' % (dyn_hostname,), surl) - - if 'lta-head.lofar.psnc.pl' in surl: - # poznan provides dynamic hostnames via a round-robin dns. Get a random/dynamic host as provided by them. - dyn_hostname = socket.getfqdn(socket.gethostbyname('gridftp.lofar.psnc.pl')) - return re.sub('srm://lta-head\.lofar\.psnc\.pl:?\d*', 'gsiftp://%s:2811' % (dyn_hostname,), surl) - - if 'lofar-srm.fz-juelich.de' in surl: - # juelich provides dynamic hostnames via a round-robin dns. Get a random/dynamic host as provided by them. - dyn_hostname = socket.getfqdn(socket.gethostbyname('lofar-gridftp.fz-juelich.de')) - return re.sub('srm://lofar-srm\.fz-juelich\.de:?\d*', 'gsiftp://%s:2811' % (dyn_hostname,), surl) - - raise LtacpException('Cannot convert surl to turl. Unknown destination in surl: \'%s\'.' % surl) - - def createNetCatCmd(listener, user=None, host=None): '''helper method to determine the proper call syntax for netcat on host''' @@ -654,119 +634,6 @@ class LtaCp: logger.debug('ltacp %s: finished cleaning up' % (self.logId)) - - -# execute command and return (stdout, stderr, returncode) tuple -def execute(cmd, log_prefix='', timeout=-1): - if log_prefix: - if not isinstance(log_prefix, basestring): - log_prefix = str(log_prefix) - if log_prefix[-1] != ' ': - log_prefix += ' ' - - logger.info('%sexecuting: %s', log_prefix, ' '.join(cmd)) - p_cmd = Popen(cmd, stdout=PIPE, stderr=PIPE) - - if timeout > 0: - timeout = timedelta(seconds=timeout) - logger.debug('%swaiting at most %s for command to finish...', log_prefix, timeout) - start_wait = datetime.now() - while datetime.now() - start_wait < timeout: - if p_cmd.poll() is not None: - break; - time.sleep(1) - - if p_cmd.poll() is None: - raise Exception('%s%s did not finish within %s.' % (log_prefix, cmd, timeout)) - - stdout, stderr = p_cmd.communicate() - return (stdout, stderr, p_cmd.returncode) - -# remove file from srm -def srmrm(surl, log_prefix='', timeout=-1): - logger.info('%s removing surl: %s', log_prefix, surl) - return execute(['/bin/bash', '-c', 'srmrm %s' % (surl,)], log_prefix, timeout) - -# remove (empty) directory from srm -def srmrmdir(surl, log_prefix='', timeout=-1): - return execute(['/bin/bash', '-c', 'srmrmdir %s' % (surl,)], log_prefix, timeout) - -# create directory in srm -def srmmkdir(surl, log_prefix='', timeout=-1): - return execute(['/bin/bash', '-c', 'srmmkdir -retry_num=0 %s' % (surl,)], log_prefix, timeout) - -# detailed listing -def srmls(surl, log_prefix='', timeout=-1): - return execute(['/bin/bash', '-c', 'srmls %s' % (surl,)], log_prefix, timeout) - -# detailed listing -def srmll(surl, log_prefix='', timeout=-1): - return execute(['/bin/bash', '-c', 'srmls -l %s' % (surl,)], log_prefix, timeout) - -# get file size and checksum from srm via srmll -def get_srm_size_and_a32_checksum(surl, log_prefix='', timeout=-1): - try: - output, errors, code = srmll(surl, log_prefix, timeout) - logger.debug(output) - - if code != 0: - return (False, None, None) - - pathLine = output.strip() - pathLineItems = [x.strip() for x in pathLine.split()] - - if len(pathLineItems) < 2: - #path line shorter than expected - return (False, None, None) - - file_size = int(pathLineItems[0]) - - if not 'Checksum type:' in output: - return False - - if 'Checksum type:' in output: - cstype = output.split('Checksum type:')[1].split()[0].strip() - if cstype.lower() != 'adler32': - return (False, None, None) - - if 'Checksum value:' in output: - a32_value = output.split('Checksum value:')[1].lstrip().split()[0] - return (True, file_size, a32_value) - - except Exception as e: - logger.error(e) - - return (False, None, None) - -#recursively checks for presence of parent directory and created the missing part of a tree -def create_missing_directories(surl): - - parent, child = os.path.split(surl) - missing = [] - - # determine missing dirs - while parent: - logger.info('checking path: %s' % parent) - o, e, code = srmls(parent) - if code == 0: - logger.info('srmls returned successfully, so this path apparently exists: %s' % parent) - break; - else: - parent, child = os.path.split(parent) - missing.append(child) - - # recreate missing dirs - while len(missing) > 0: - parent = parent + '/' + missing.pop() - code = srmmkdir(parent)[2] - if code != 0: - logger.info('failed to create missing directory: %s' % parent) - return code - - logger.info('successfully created parent directory: %s' % parent) - return 0 - - # limited standalone mode for testing: # usage: ltacp.py <remote-host> <remote-path> <surl> def main(): diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/lib/unspecifiedSIP.py b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/lib/unspecifiedSIP.py index 8915e017807bc55d4f3b936514c5020a424b01b7..195537cb1cdc019c729400e60302e2d8f796f36b 100755 --- a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/lib/unspecifiedSIP.py +++ b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/lib/unspecifiedSIP.py @@ -31,6 +31,8 @@ genericSIP = '''<?xml version="1.0" encoding="UTF-8"?> </checksum> <fileName>%s</fileName> <fileFormat>%s</fileFormat> + <storageWriter>%s</storageWriter> + <storageWriterVersion>%s</storageWriterVersion> <processIdentifier> <source>SAS</source> <identifier>%s</identifier> @@ -59,7 +61,8 @@ genericSIP = '''<?xml version="1.0" encoding="UTF-8"?> ''' -def makeSIP(Project, ObsId, ArchiveId, ticket, FileName, FileSize, MD5Checksum, Adler32Checksum, Type): +def makeSIP(Project, ObsId, ArchiveId, ticket, FileName, FileSize, MD5Checksum, Adler32Checksum, Type, + storageWriter='Unknown', storageWriterVersion='Unknown'): if FileName[-3:] == '.MS': fileFormat = 'AIPS++/CASA' elif FileName[-3:].lower() == '.h5': @@ -68,7 +71,10 @@ def makeSIP(Project, ObsId, ArchiveId, ticket, FileName, FileSize, MD5Checksum, fileFormat = 'FITS' else: ## Maybe we need an 'Unknown' in the future? fileFormat = 'PULP' - return genericSIP % (Project, Type, ArchiveId, FileName, ticket, FileSize, MD5Checksum, Adler32Checksum, FileName, fileFormat, ObsId, ObsId, ObsId, ObsId, ObsId) + return genericSIP % (Project, Type, ArchiveId, FileName, ticket, FileSize, MD5Checksum, Adler32Checksum, + FileName, fileFormat, + storageWriter, storageWriterVersion, + ObsId, ObsId, ObsId, ObsId, ObsId) ## Stand alone execution code ------------------------------------------ if __name__ == '__main__': diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/CMakeLists.txt b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/CMakeLists.txt index 8831546a33cca20d24cc64575f04e72f3624d573..1e5ba317b0f2901500023b6f048ad2f22e1458fd 100644 --- a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/CMakeLists.txt +++ b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/CMakeLists.txt @@ -1,5 +1,6 @@ include(LofarCTest) +lofar_add_test(t_sip) lofar_add_test(t_ltacp) lofar_add_test(t_ingestpipeline) diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/ltastubs.py b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/ltastubs.py index b8ecfcd5225c9d093fdc2b583db6bd0d7a3d2ae3..4f75e11d1890e694d285f129a35184020e7e15ac 100644 --- a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/ltastubs.py +++ b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/ltastubs.py @@ -25,7 +25,7 @@ def stub(): if 'globus-url-copy' in ' '.join(args): dppath = [x for x in args[2].split() if 'file://' in x][0] - dest_path = [x for x in args[2].split() if 'srm://' in x][0] + dest_path = [x for x in args[2].split() if 'gsiftp://' in x][0] dest_filename = os.path.basename(dest_path) global _local_globus_file_path _local_globus_file_path = '/tmp/globus_output_%s/%s' % (uuid.uuid1(), dest_filename) @@ -68,8 +68,8 @@ def stub(): return '\n'.join(lines), '', 0 - lofar.lta.ingest.server.ltacp.srmll_org = lofar.lta.ingest.server.ltacp.srmll - lofar.lta.ingest.server.ltacp.srmll = stub_srmll + lofar.lta.ingest.common.srm.srmll_org = lofar.lta.ingest.common.srm.srmll + lofar.lta.ingest.common.srm.srmll = stub_srmll def un_stub(): global _local_globus_file_path @@ -78,7 +78,7 @@ def un_stub(): subprocess.Popen.__init__ = subprocess.Popen.__init__org logger.info('un-stubbing srmll command') - lofar.lta.ingest.server.ltacp.srmll = lofar.lta.ingest.server.ltacp.srmll_org + lofar.lta.ingest.common.srm.srmll = lofar.lta.ingest.common.srm.srmll_org if _local_globus_file_path and os.path.exists(_local_globus_file_path): logger.info('removing _local_globus_file_path: %s', _local_globus_file_path) diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/t_ingestpipeline.py b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/t_ingestpipeline.py index c59de5adcde3087291aa2f9429db666e944e7fbc..3d4c972e58ce26076e988c9959d11ae63529ab0b 100755 --- a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/t_ingestpipeline.py +++ b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/t_ingestpipeline.py @@ -23,6 +23,12 @@ except ImportError: print 'Please install MagicMock: pip install mock' exit(3) +from subprocess import call +if call(['ssh', '-o', 'PasswordAuthentication=no', '-o', 'PubkeyAuthentication=yes', '-o', 'ConnectTimeout=1', 'localhost', 'true']) != 0: + print 'this test depends on keybased ssh login to localhost, which is not setup correctly. skipping test...' + exit(3) + + connection = None broker = None @@ -56,244 +62,249 @@ try: # modify the return values of the various MoMClient methods with pre-cooked answers mommock.setStatus.return_value = True - from lofar.lta.ingest.common.job import createJobXml, parseJobXml - from lofar.lta.ingest.server.ltaclient import LTAClient # <-- thanks to magick mock, we get the mocked ltaclient - from lofar.lta.ingest.server.momclient import MoMClient # <-- thanks to magick mock, we get the mocked momclient - from lofar.lta.ingest.server.ingestpipeline import * - import ltastubs - - logger = logging.getLogger() - - class TestIngestPipeline(unittest.TestCase): - def setUp(self): - ltastubs.stub() - self.ltaclient = LTAClient() - self.momclient = MoMClient() - - def tearDown(self): - ltastubs.un_stub() - - def test_single_file(self): - try: - project_name = 'test-project' - obs_id = 987654321 - dpname = 'L%s_SAP000_SB000_im.h5' % obs_id - test_dir_path = os.path.join(os.getcwd(), 'testdir_%s' % uuid.uuid1()) - - def stub_GetStorageTicket(project, filename, filesize, archive_id, job_id, obs_id, check_mom_id=True, id_source='MoM'): - return { 'primary_uri_rnd': 'srm://some.site.name:8443/some/path/data/lofar/ops/projects/%s/%s/%s' % (project, obs_id, dpname), - 'result': 'ok', - 'error': '', - 'ticket': '3E0A47ED860D6339E053B316A9C3BEE2'} - ltamock.GetStorageTicket.side_effect = stub_GetStorageTicket - - def stub_uploadDataAndGetSIP(archive_id, storage_ticket, filename, uri, filesize, md5_checksum, adler32_checksum, validate=True): - #return unpecified sip with proper details - from lofar.lta.ingest.server.unspecifiedSIP import makeSIP - return makeSIP(project_name, obs_id, archive_id, storage_ticket, filename, filesize, md5_checksum, adler32_checksum, 'TEST') - mommock.uploadDataAndGetSIP.side_effect = stub_uploadDataAndGetSIP - - os.makedirs(test_dir_path) - test_file_path = os.path.join(test_dir_path, dpname) - with open(test_file_path, 'w') as file: - file.write(4096*'a') - - job_xml = createJobXml(testname, 123456789, obs_id, dpname, 918273645, 'localhost:%s' % test_file_path) - logger.info('job xml: %s', job_xml) - job = parseJobXml(job_xml) - - pl = IngestPipeline(job, self.momclient, self.ltaclient) - pl.run() - - except Exception as e: - self.assertTrue(False, 'Unexpected exception in pipeline: %s' % e) - finally: - # the 'stub-transfered' file ended up in out local stub lta - # with the path: ltastubs._local_globus_file_path - #check extension - self.assertEqual(os.path.splitext(test_file_path)[-1], os.path.splitext(ltastubs._local_globus_file_path)[-1]) - - #compare with original - with open(test_file_path) as input, open(ltastubs._local_globus_file_path) as output: - self.assertEqual(input.read(), output.read()) - - for f in os.listdir(test_dir_path): - os.remove(os.path.join(test_dir_path, f)) - os.removedirs(test_dir_path) - - def test_h5_plus_raw_file(self): - #beam formed h5 files are always accompanied by a raw file - #these should be tarred togheter - try: - project_name = 'test-project' - obs_id = 987654321 - dpname = 'L%s_SAP000_SB000_bf.h5' % obs_id - rawname = dpname.replace('.h5', '.raw') - test_dir_path = os.path.join(os.getcwd(), 'testdir_%s' % uuid.uuid1()) - - def stub_GetStorageTicket(project, filename, filesize, archive_id, job_id, obs_id, check_mom_id=True, id_source='MoM'): - return { 'primary_uri_rnd': 'srm://some.site.name:8443/some/path/data/lofar/ops/projects/%s/%s/%s.tar' % (project, obs_id, dpname), - 'result': 'ok', - 'error': '', - 'ticket': '3E0A47ED860D6339E053B316A9C3BEE2'} - ltamock.GetStorageTicket.side_effect = stub_GetStorageTicket - - def stub_uploadDataAndGetSIP(archive_id, storage_ticket, filename, uri, filesize, md5_checksum, adler32_checksum, validate=True): - #return unpecified sip with proper details - from lofar.lta.ingest.server.unspecifiedSIP import makeSIP - return makeSIP(project_name, obs_id, archive_id, storage_ticket, filename, filesize, md5_checksum, adler32_checksum, 'TEST') - mommock.uploadDataAndGetSIP.side_effect = stub_uploadDataAndGetSIP - - os.makedirs(test_dir_path) - test_file_path = os.path.join(test_dir_path, dpname) - with open(test_file_path, 'w') as file: - file.write(4096*'a') - raw_test_file_path = os.path.join(test_dir_path, dpname.replace('.h5', '.raw')) - with open(raw_test_file_path, 'w') as file: - file.write(4096*'b') - - job_xml = createJobXml(testname, 123456789, obs_id, dpname, 918273645, 'localhost:%s' % test_file_path) - logger.info('job xml: %s', job_xml) - job = parseJobXml(job_xml) - - pl = IngestPipeline(job, self.momclient, self.ltaclient) - pl.run() - - except Exception as e: - self.assertTrue(False, 'Unexpected exception in pipeline: %s' % e) - finally: - # the 'stub-transfered' file ended up in out local stub lta - # with the path: ltastubs._local_globus_file_path - #check extension - self.assertEqual('.tar', os.path.splitext(ltastubs._local_globus_file_path)[-1]) - - #check tar contents - tar = subprocess.Popen(['tar', '--list', '-f', ltastubs._local_globus_file_path], stdout=subprocess.PIPE) - tar_file_list, err = tar.communicate() - self.assertEqual(tar.returncode, 0) - logger.info('file list in tar:\n%s', tar_file_list) - - self.assertTrue(os.path.basename(test_file_path) in tar_file_list) - self.assertTrue(os.path.basename(raw_test_file_path) in tar_file_list) - logger.info('all expected source files are in tar!') - - os.remove(test_file_path) - os.remove(raw_test_file_path) - os.removedirs(test_dir_path) - - - def test_directory(self): - try: - project_name = 'test-project' - obs_id = 987654321 - dpname = 'L%s_SAP000_SB000_uv.MS' % obs_id - test_dir_path = os.path.join(os.getcwd(), 'testdir_%s' % uuid.uuid1(), dpname) - - def stub_GetStorageTicket(project, filename, filesize, archive_id, job_id, obs_id, check_mom_id=True, id_source='MoM'): - return { 'primary_uri_rnd': 'srm://some.site.name:8443/some/path/data/lofar/ops/projects/%s/%s/%s.tar' % (project, obs_id, dpname), - 'result': 'ok', - 'error': '', - 'ticket': '3E0A47ED860D6339E053B316A9C3BEE2'} - ltamock.GetStorageTicket.side_effect = stub_GetStorageTicket - - def stub_uploadDataAndGetSIP(archive_id, storage_ticket, filename, uri, filesize, md5_checksum, adler32_checksum, validate=True): - #return unpecified sip with proper details - from lofar.lta.ingest.server.unspecifiedSIP import makeSIP - return makeSIP(project_name, obs_id, archive_id, storage_ticket, filename, filesize, md5_checksum, adler32_checksum, 'TEST') - mommock.uploadDataAndGetSIP.side_effect = stub_uploadDataAndGetSIP - - os.makedirs(test_dir_path) - test_file_paths = [] - for i in range(10): - test_file_path = os.path.join(test_dir_path, 'testfile_%s.txt' % i) - test_file_paths.append(test_file_path) + # patch (mock) the convert_surl_to_turl method during these tests. + with patch('lofar.lta.ingest.server.ltacp.convert_surl_to_turl') as mock_convert_surl_to_turl: + mock_convert_surl_to_turl.side_effect = lambda surl: surl.replace('srm', 'gsiftp') + + from lofar.lta.ingest.common.job import createJobXml, parseJobXml + from lofar.lta.ingest.server.ltaclient import LTAClient # <-- thanks to magick mock, we get the mocked ltaclient + from lofar.lta.ingest.server.momclient import MoMClient # <-- thanks to magick mock, we get the mocked momclient + from lofar.lta.ingest.server.ingestpipeline import * + import ltastubs + + logger = logging.getLogger() + + class TestIngestPipeline(unittest.TestCase): + def setUp(self): + ltastubs.stub() + self.ltaclient = LTAClient() + self.momclient = MoMClient() + + def tearDown(self): + ltastubs.un_stub() + + def test_single_file(self): + try: + project_name = 'test-project' + obs_id = 987654321 + dpname = 'L%s_SAP000_SB000_im.h5' % obs_id + test_dir_path = os.path.join(os.getcwd(), 'testdir_%s' % uuid.uuid1()) + + def stub_GetStorageTicket(project, filename, filesize, archive_id, job_id, obs_id, check_mom_id=True, id_source='MoM'): + return { 'primary_uri_rnd': 'srm://some.site.name:8443/some/path/data/lofar/ops/projects/%s/%s/%s' % (project, obs_id, dpname), + 'result': 'ok', + 'error': '', + 'ticket': '3E0A47ED860D6339E053B316A9C3BEE2'} + ltamock.GetStorageTicket.side_effect = stub_GetStorageTicket + + def stub_uploadDataAndGetSIP(archive_id, storage_ticket, filename, uri, filesize, md5_checksum, adler32_checksum, validate=True): + #return unpecified sip with proper details + from lofar.lta.ingest.server.unspecifiedSIP import makeSIP + return makeSIP(project_name, obs_id, archive_id, storage_ticket, filename, filesize, md5_checksum, adler32_checksum, 'TEST') + mommock.uploadDataAndGetSIP.side_effect = stub_uploadDataAndGetSIP + + os.makedirs(test_dir_path) + test_file_path = os.path.join(test_dir_path, dpname) with open(test_file_path, 'w') as file: - file.write(1000*'a') - - job_xml = createJobXml(testname, 123456789, obs_id, dpname, 918273645, 'localhost:%s' % test_dir_path) - logger.info('job xml: %s', job_xml) - job = parseJobXml(job_xml) - - pl = IngestPipeline(job, self.momclient, self.ltaclient) - pl.run() - except Exception as e: - self.assertTrue(False, 'Unexpected exception in pipeline: %s' % e) - finally: - # the 'stub-transfered' file ended up in out local stub lta - # with the path: ltastubs._local_globus_file_path - #check extension - self.assertTrue('.tar' == os.path.splitext(ltastubs._local_globus_file_path)[-1]) - - #check tar contents - tar = subprocess.Popen(['tar', '--list', '-f', ltastubs._local_globus_file_path], stdout=subprocess.PIPE) - tar_file_list, err = tar.communicate() - self.assertEqual(tar.returncode, 0) - logger.info('file list in tar:\n%s', tar_file_list) - - for test_file_path in test_file_paths: - self.assertTrue(os.path.basename(test_file_path) in tar_file_list) - logger.info('all expected source files are in tar!') - - for f in os.listdir(test_dir_path): - os.remove(os.path.join(test_dir_path, f)) - os.removedirs(test_dir_path) - - def test_directory_with_odd_dataproduct_name(self): - #sometimes somebody has data in a odd directory - #and gives the dataproduct a different name than it's directory - try: - project_name = 'test-project' - obs_id = 987654321 - dpname = 'my_funky_dp_name' - test_dir_path = os.path.join(os.getcwd(), 'testdir_%s' % uuid.uuid1(), 'my_data_dir') - - def stub_uploadDataAndGetSIP(archive_id, storage_ticket, filename, uri, filesize, md5_checksum, adler32_checksum, validate=True): - #return unpecified sip with proper details - from lofar.lta.ingest.server.unspecifiedSIP import makeSIP - return makeSIP(project_name, obs_id, archive_id, storage_ticket, filename, filesize, md5_checksum, adler32_checksum, 'TEST') - mommock.uploadDataAndGetSIP.side_effect = stub_uploadDataAndGetSIP - - os.makedirs(test_dir_path) - test_file_paths = [] - for i in range(10): - test_file_path = os.path.join(test_dir_path, 'testfile_%s.txt' % i) - test_file_paths.append(test_file_path) + file.write(4096*'a') + + job_xml = createJobXml(testname, 123456789, obs_id, dpname, 918273645, 'localhost:%s' % test_file_path) + logger.info('job xml: %s', job_xml) + job = parseJobXml(job_xml) + + pl = IngestPipeline(job, self.momclient, self.ltaclient) + pl.run() + + except Exception as e: + self.assertTrue(False, 'Unexpected exception in pipeline: %s' % e) + finally: + # the 'stub-transfered' file ended up in out local stub lta + # with the path: ltastubs._local_globus_file_path + #check extension + self.assertEqual(os.path.splitext(test_file_path)[-1], + os.path.splitext(ltastubs._local_globus_file_path)[-1]) + + #compare with original + with open(test_file_path) as input, open(ltastubs._local_globus_file_path) as output: + self.assertEqual(input.read(), output.read()) + + for f in os.listdir(test_dir_path): + os.remove(os.path.join(test_dir_path, f)) + os.removedirs(test_dir_path) + + def test_h5_plus_raw_file(self): + #beam formed h5 files are always accompanied by a raw file + #these should be tarred togheter + try: + project_name = 'test-project' + obs_id = 987654321 + dpname = 'L%s_SAP000_SB000_bf.h5' % obs_id + rawname = dpname.replace('.h5', '.raw') + test_dir_path = os.path.join(os.getcwd(), 'testdir_%s' % uuid.uuid1()) + + def stub_GetStorageTicket(project, filename, filesize, archive_id, job_id, obs_id, check_mom_id=True, id_source='MoM'): + return { 'primary_uri_rnd': 'srm://some.site.name:8443/some/path/data/lofar/ops/projects/%s/%s/%s.tar' % (project, obs_id, dpname), + 'result': 'ok', + 'error': '', + 'ticket': '3E0A47ED860D6339E053B316A9C3BEE2'} + ltamock.GetStorageTicket.side_effect = stub_GetStorageTicket + + def stub_uploadDataAndGetSIP(archive_id, storage_ticket, filename, uri, filesize, md5_checksum, adler32_checksum, validate=True): + #return unpecified sip with proper details + from lofar.lta.ingest.server.unspecifiedSIP import makeSIP + return makeSIP(project_name, obs_id, archive_id, storage_ticket, filename, filesize, md5_checksum, adler32_checksum, 'TEST') + mommock.uploadDataAndGetSIP.side_effect = stub_uploadDataAndGetSIP + + os.makedirs(test_dir_path) + test_file_path = os.path.join(test_dir_path, dpname) with open(test_file_path, 'w') as file: - file.write(1000*'a') - - job_xml = createJobXml(testname, 123456789, obs_id, dpname, 918273645, 'localhost:%s' % test_dir_path) - logger.info('job xml: %s', job_xml) - job = parseJobXml(job_xml) - - pl = IngestPipeline(job, self.momclient, self.ltaclient) - pl.run() - except Exception as e: - self.assertTrue(False, 'Unexpected exception in pipeline: %s' % e) - finally: - # the 'stub-transfered' file ended up in out local stub lta - # with the path: ltastubs._local_globus_file_path - #check extension - self.assertTrue('.tar' == os.path.splitext(ltastubs._local_globus_file_path)[-1]) - - #check tar contents - tar = subprocess.Popen(['tar', '--list', '-f', ltastubs._local_globus_file_path], stdout=subprocess.PIPE) - tar_file_list, err = tar.communicate() - self.assertEqual(tar.returncode, 0) - logger.info('file list in tar:\n%s', tar_file_list) - - for test_file_path in test_file_paths: - self.assertTrue(os.path.basename(test_file_path) in tar_file_list) - logger.info('all expected source files are in tar!') - - for f in os.listdir(test_dir_path): - os.remove(os.path.join(test_dir_path, f)) - os.removedirs(test_dir_path) + file.write(4096*'a') + raw_test_file_path = os.path.join(test_dir_path, dpname.replace('.h5', '.raw')) + with open(raw_test_file_path, 'w') as file: + file.write(4096*'b') + + job_xml = createJobXml(testname, 123456789, obs_id, dpname, 918273645, 'localhost:%s' % test_file_path) + logger.info('job xml: %s', job_xml) + job = parseJobXml(job_xml) + + pl = IngestPipeline(job, self.momclient, self.ltaclient) + pl.run() + + except Exception as e: + self.assertTrue(False, 'Unexpected exception in pipeline: %s' % e) + finally: + # the 'stub-transfered' file ended up in out local stub lta + # with the path: ltastubs._local_globus_file_path + #check extension + self.assertEqual('.tar', os.path.splitext(ltastubs._local_globus_file_path)[-1]) + + #check tar contents + tar = subprocess.Popen(['tar', '--list', '-f', ltastubs._local_globus_file_path], stdout=subprocess.PIPE) + tar_file_list, err = tar.communicate() + self.assertEqual(tar.returncode, 0) + logger.info('file list in tar:\n%s', tar_file_list) - - if __name__ == '__main__': - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', - level=logging.DEBUG) - unittest.main() + self.assertTrue(os.path.basename(test_file_path) in tar_file_list) + self.assertTrue(os.path.basename(raw_test_file_path) in tar_file_list) + logger.info('all expected source files are in tar!') + + os.remove(test_file_path) + os.remove(raw_test_file_path) + os.removedirs(test_dir_path) + + + def test_directory(self): + try: + project_name = 'test-project' + obs_id = 987654321 + dpname = 'L%s_SAP000_SB000_uv.MS' % obs_id + test_dir_path = os.path.join(os.getcwd(), 'testdir_%s' % uuid.uuid1(), dpname) + + def stub_GetStorageTicket(project, filename, filesize, archive_id, job_id, obs_id, check_mom_id=True, id_source='MoM'): + return { 'primary_uri_rnd': 'srm://some.site.name:8443/some/path/data/lofar/ops/projects/%s/%s/%s.tar' % (project, obs_id, dpname), + 'result': 'ok', + 'error': '', + 'ticket': '3E0A47ED860D6339E053B316A9C3BEE2'} + ltamock.GetStorageTicket.side_effect = stub_GetStorageTicket + + def stub_uploadDataAndGetSIP(archive_id, storage_ticket, filename, uri, filesize, md5_checksum, adler32_checksum, validate=True): + #return unpecified sip with proper details + from lofar.lta.ingest.server.unspecifiedSIP import makeSIP + return makeSIP(project_name, obs_id, archive_id, storage_ticket, filename, filesize, md5_checksum, adler32_checksum, 'TEST') + mommock.uploadDataAndGetSIP.side_effect = stub_uploadDataAndGetSIP + + os.makedirs(test_dir_path) + test_file_paths = [] + for i in range(10): + test_file_path = os.path.join(test_dir_path, 'testfile_%s.txt' % i) + test_file_paths.append(test_file_path) + with open(test_file_path, 'w') as file: + file.write(1000*'a') + + job_xml = createJobXml(testname, 123456789, obs_id, dpname, 918273645, 'localhost:%s' % test_dir_path) + logger.info('job xml: %s', job_xml) + job = parseJobXml(job_xml) + + pl = IngestPipeline(job, self.momclient, self.ltaclient) + pl.run() + except Exception as e: + self.assertTrue(False, 'Unexpected exception in pipeline: %s' % e) + finally: + # the 'stub-transfered' file ended up in out local stub lta + # with the path: ltastubs._local_globus_file_path + #check extension + self.assertTrue('.tar' == os.path.splitext(ltastubs._local_globus_file_path)[-1]) + + #check tar contents + tar = subprocess.Popen(['tar', '--list', '-f', ltastubs._local_globus_file_path], stdout=subprocess.PIPE) + tar_file_list, err = tar.communicate() + self.assertEqual(tar.returncode, 0) + logger.info('file list in tar:\n%s', tar_file_list) + + for test_file_path in test_file_paths: + self.assertTrue(os.path.basename(test_file_path) in tar_file_list) + logger.info('all expected source files are in tar!') + + for f in os.listdir(test_dir_path): + os.remove(os.path.join(test_dir_path, f)) + os.removedirs(test_dir_path) + + def test_directory_with_odd_dataproduct_name(self): + #sometimes somebody has data in a odd directory + #and gives the dataproduct a different name than it's directory + try: + project_name = 'test-project' + obs_id = 987654321 + dpname = 'my_funky_dp_name' + test_dir_path = os.path.join(os.getcwd(), 'testdir_%s' % uuid.uuid1(), 'my_data_dir') + + def stub_uploadDataAndGetSIP(archive_id, storage_ticket, filename, uri, filesize, md5_checksum, adler32_checksum, validate=True): + #return unpecified sip with proper details + from lofar.lta.ingest.server.unspecifiedSIP import makeSIP + return makeSIP(project_name, obs_id, archive_id, storage_ticket, filename, filesize, md5_checksum, adler32_checksum, 'TEST') + mommock.uploadDataAndGetSIP.side_effect = stub_uploadDataAndGetSIP + + os.makedirs(test_dir_path) + test_file_paths = [] + for i in range(10): + test_file_path = os.path.join(test_dir_path, 'testfile_%s.txt' % i) + test_file_paths.append(test_file_path) + with open(test_file_path, 'w') as file: + file.write(1000*'a') + + job_xml = createJobXml(testname, 123456789, obs_id, dpname, 918273645, 'localhost:%s' % test_dir_path) + logger.info('job xml: %s', job_xml) + job = parseJobXml(job_xml) + + pl = IngestPipeline(job, self.momclient, self.ltaclient) + pl.run() + except Exception as e: + self.assertTrue(False, 'Unexpected exception in pipeline: %s' % e) + finally: + # the 'stub-transfered' file ended up in out local stub lta + # with the path: ltastubs._local_globus_file_path + #check extension + self.assertTrue('.tar' == os.path.splitext(ltastubs._local_globus_file_path)[-1]) + + #check tar contents + tar = subprocess.Popen(['tar', '--list', '-f', ltastubs._local_globus_file_path], stdout=subprocess.PIPE) + tar_file_list, err = tar.communicate() + self.assertEqual(tar.returncode, 0) + logger.info('file list in tar:\n%s', tar_file_list) + + for test_file_path in test_file_paths: + self.assertTrue(os.path.basename(test_file_path) in tar_file_list) + logger.info('all expected source files are in tar!') + + for f in os.listdir(test_dir_path): + os.remove(os.path.join(test_dir_path, f)) + os.removedirs(test_dir_path) + + + if __name__ == '__main__': + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', + level=logging.DEBUG) + unittest.main() except ConnectError as ce: logger.error(ce) diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/t_ltacp.py b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/t_ltacp.py index e2232779939aea5a69bcd2b7d6228c4d8725b134..6c239c4acdc15604765c6be849a239f8bbe3750c 100755 --- a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/t_ltacp.py +++ b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/t_ltacp.py @@ -1,130 +1,147 @@ #!/usr/bin/env python +try: + import mock +except ImportError: + print 'Cannot run test without python MagicMock' + print 'Please install MagicMock: pip install mock' + exit(3) + import logging import unittest import uuid import os, os.path -import lofar.lta.ingest.server.ltacp as ltacp -import ltastubs -logger = logging.getLogger() +with mock.patch('lofar.lta.ingest.common.srm.convert_surl_to_turl', + new=lambda surl: surl.replace('srm', 'gsiftp')): -class TestLtaCp(unittest.TestCase): - def setUp(self): - ltastubs.stub() + import lofar.lta.ingest.server.ltacp as ltacp + import ltastubs - def tearDown(self): - ltastubs.un_stub() + logger = logging.getLogger() - def test_path_exists(self): - test_file_path = os.path.join(os.getcwd(), str(uuid.uuid1()), 'testfile.txt') - os.makedirs(os.path.dirname(test_file_path)) - with open(test_file_path, 'w') as file: - file.write(1000*'a') + class TestLtaCp(unittest.TestCase): + def setUp(self): + ltastubs.stub() - try: - cp = ltacp.LtaCp('localhost', test_file_path, 'srm://fake_surl') - self.assertTrue(cp.source_exists()) - except Exception as e: - self.assertTrue(False, 'Unexpected exception in transfer: %s' % e) - finally: - os.remove(test_file_path) - - def test_path_mounted(self): - #first test with a valid path, the current working dir + some random dir + file - test_file_path = os.path.join(os.getcwd(), str(uuid.uuid1()), 'testfile.txt') - cp = ltacp.LtaCp('localhost', test_file_path, 'srm://fake_surl') - - #the path should not exists, but it should be mounted - self.assertFalse(cp.source_exists()) - self.assertTrue(cp.source_mounted()) - - #let's try to transfer this file, should not succeed, but raise an exception - try: + def tearDown(self): + ltastubs.un_stub() + + def test_path_exists(self): + test_file_path = os.path.join(os.getcwd(), str(uuid.uuid1()), 'testfile.txt') + os.makedirs(os.path.dirname(test_file_path)) + with open(test_file_path, 'w') as file: + file.write(1000*'a') + + try: + cp = ltacp.LtaCp('localhost', test_file_path, 'srm://fake_surl') + self.assertTrue(cp.source_exists()) + except Exception as e: + self.assertTrue(False, 'Unexpected exception in transfer: %s' % e) + finally: + os.remove(test_file_path) + + def test_path_mounted(self): + #first test with a valid path, the current working dir + some random dir + file + test_file_path = os.path.join(os.getcwd(), str(uuid.uuid1()), 'testfile.txt') cp = ltacp.LtaCp('localhost', test_file_path, 'srm://fake_surl') - cp.transfer() - except ltacp.LtacpException as e: - logger.info('caught expected LtacpException: %s', e.value) - self.assertTrue('source path' in e.value and 'does not exist' in e.value) - except Exception as e: - self.assertTrue(False, 'Unexpected exception in transfer: %s' % e) + #the path should not exists, but it should be mounted + self.assertFalse(cp.source_exists()) + self.assertTrue(cp.source_mounted()) - #repeat same test, but now with a non-mounted disk - test_file_path = '/non-existing-root-dir/dir1/dir2/file.txt' - cp = ltacp.LtaCp('localhost', test_file_path, 'srm://fake_surl') - self.assertFalse(cp.source_mounted()) + #let's try to transfer this file, should not succeed, but raise an exception + try: + cp = ltacp.LtaCp('localhost', test_file_path, 'srm://fake_surl') + cp.transfer() + except ltacp.LtacpException as e: + logger.info('caught expected LtacpException: %s', e.value) + self.assertTrue('source path' in e.value and 'does not exist' in e.value) + except Exception as e: + self.assertTrue(False, 'Unexpected exception in transfer: %s' % e) - #let's try to transfer this file, should not succeed, but raise an exception - try: - cp = ltacp.LtaCp('localhost', test_file_path, 'srm://fake_surl') - cp.transfer() - except ltacp.LtacpException as e: - logger.info('caught expected LtacpException: %s', e.value) - self.assertTrue('the disk of source path' in e.value and 'does not seem to be mounted' in e.value) - except Exception as e: - self.assertTrue(False, 'Unexpected exception in transfer: %s' % e) - - def test_single_file(self): - test_file_path = os.path.join(os.getcwd(), str(uuid.uuid1()), 'testfile.txt') - os.makedirs(os.path.dirname(test_file_path)) - with open(test_file_path, 'w') as file: - file.write(1000*'a') - - try: - cp = ltacp.LtaCp('localhost', test_file_path, 'srm://fake_surl') - md5cs, a32cs, fs = cp.transfer() - #it suffices to check only the filesize as transfer result - #if the checksums whould have been different between source, local, and/or 'lta' - #then an exception would have been raised, and that is asserted below - self.assertEqual(1000, int(fs)) - except Exception as e: - self.assertTrue(False, 'Unexpected exception in transfer: %s' % e) - finally: - os.remove(test_file_path) - - def test_multiple_files(self): - test_dir_path = os.path.join(os.getcwd(), 'testdir_%s' % uuid.uuid1()) - os.makedirs(test_dir_path) - test_file_paths = [] - for i in range(10): - test_file_path = os.path.join(test_dir_path, 'testfile_%s.txt' % i) - with open(test_file_path, 'w') as file: - file.write(1000*'a') - if i%2==0: #only transfer half the files in the directory - test_file_paths.append(test_file_path) - - try: - cp = ltacp.LtaCp('localhost', test_file_paths, 'srm://fake_surl') - md5cs, a32cs, fs = cp.transfer() - except Exception as e: - self.assertTrue(False, 'Unexpected exception in transfer: %s' % e) - finally: - for f in os.listdir(test_dir_path): - os.remove(os.path.join(test_dir_path, f)) - os.removedirs(test_dir_path) - - def test_directory(self): - test_dir_path = os.path.join(os.getcwd(), 'testdir_%s' % uuid.uuid1()) - os.makedirs(test_dir_path) - for i in range(10): - test_file_path = os.path.join(test_dir_path, 'testfile_%s.txt' % i) + #repeat same test, but now with a non-mounted disk + test_file_path = '/non-existing-root-dir/dir1/dir2/file.txt' + cp = ltacp.LtaCp('localhost', test_file_path, 'srm://fake_surl') + self.assertFalse(cp.source_mounted()) + + #let's try to transfer this file, should not succeed, but raise an exception + try: + cp = ltacp.LtaCp('localhost', test_file_path, 'srm://fake_surl') + cp.transfer() + except ltacp.LtacpException as e: + logger.info('caught expected LtacpException: %s', e.value) + self.assertTrue('the disk of source path' in e.value and 'does not seem to be mounted' in e.value) + except Exception as e: + self.assertTrue(False, 'Unexpected exception in transfer: %s' % e) + + def test_single_file(self): + test_file_path = os.path.join(os.getcwd(), str(uuid.uuid1()), 'testfile.txt') + os.makedirs(os.path.dirname(test_file_path)) with open(test_file_path, 'w') as file: file.write(1000*'a') - try: - cp = ltacp.LtaCp('localhost', test_dir_path, 'srm://fake_surl') - md5cs, a32cs, fs = cp.transfer() - except Exception as e: - self.assertTrue(False, 'Unexpected exception in transfer: %s' % e) - finally: - for f in os.listdir(test_dir_path): - os.remove(os.path.join(test_dir_path, f)) - os.removedirs(test_dir_path) - - -if __name__ == '__main__': - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', - level=logging.DEBUG) - unittest.main() + try: + cp = ltacp.LtaCp('localhost', test_file_path, 'srm://fake_surl') + md5cs, a32cs, fs = cp.transfer() + #it suffices to check only the filesize as transfer result + #if the checksums whould have been different between source, local, and/or 'lta' + #then an exception would have been raised, and that is asserted below + self.assertEqual(1000, int(fs)) + except Exception as e: + logger.exception(e) + self.assertTrue(False, 'Unexpected exception in transfer: %s' % e) + finally: + os.remove(test_file_path) + + def test_multiple_files(self): + test_dir_path = os.path.join(os.getcwd(), 'testdir_%s' % uuid.uuid1()) + os.makedirs(test_dir_path) + test_file_paths = [] + for i in range(10): + test_file_path = os.path.join(test_dir_path, 'testfile_%s.txt' % i) + with open(test_file_path, 'w') as file: + file.write(1000*'a') + + if i%2==0: #only transfer half the files in the directory + test_file_paths.append(test_file_path) + + try: + cp = ltacp.LtaCp('localhost', test_file_paths, 'srm://fake_surl') + md5cs, a32cs, fs = cp.transfer() + except Exception as e: + self.assertTrue(False, 'Unexpected exception in transfer: %s' % e) + finally: + for f in os.listdir(test_dir_path): + os.remove(os.path.join(test_dir_path, f)) + os.removedirs(test_dir_path) + + def test_directory(self): + test_dir_path = os.path.join(os.getcwd(), 'testdir_%s' % uuid.uuid1()) + os.makedirs(test_dir_path) + for i in range(10): + test_file_path = os.path.join(test_dir_path, 'testfile_%s.txt' % i) + with open(test_file_path, 'w') as file: + file.write(1000*'a') + + try: + cp = ltacp.LtaCp('localhost', test_dir_path, 'srm://fake_surl') + md5cs, a32cs, fs = cp.transfer() + except Exception as e: + self.assertTrue(False, 'Unexpected exception in transfer: %s' % e) + finally: + for f in os.listdir(test_dir_path): + os.remove(os.path.join(test_dir_path, f)) + os.removedirs(test_dir_path) + + + if __name__ == '__main__': + from subprocess import call + if call(['ssh', '-o', 'PasswordAuthentication=no', '-o', 'PubkeyAuthentication=yes', '-o', 'ConnectTimeout=1', 'localhost', 'true']) != 0: + print 'this test depends on keybased ssh login to localhost, which is not setup correctly. skipping test...' + exit(3) + + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', + level=logging.DEBUG) + unittest.main() diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/t_sip.py b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/t_sip.py new file mode 100755 index 0000000000000000000000000000000000000000..52b6e999cacca9ddfdb6469624e081124e8e9e30 --- /dev/null +++ b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/t_sip.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python + +import logging +import unittest +from lofar.lta.ingest.server.sip import * +from lofar.lta.ingest.server.unspecifiedSIP import * + +logger = logging.getLogger(__file__) + +class TestSIP(unittest.TestCase): + def test_valid_SIP(self): + sip = makeSIP('project', 123456, 234567, 'abc-123', 'foo.txt', 0, '', '', 'TEST', 'LofarStorageManager', 'Unknown') + logger.info(sip) + self.assertTrue(validateSIPAgainstSchema(sip)) + + def test_invalid_SIP_with_incorrect_storageWriter(self): + sip = makeSIP('project', 123456, 234567, 'abc-123', 'foo.txt', 0, '', '', 'TEST', 'incorrect-storageWriter', 'Unknown') + logger.info(sip) + self.assertFalse(validateSIPAgainstSchema(sip)) + +if __name__ == '__main__': + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', + level=logging.DEBUG) + unittest.main() diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/t_sip.sh b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/t_sip.sh new file mode 100755 index 0000000000000000000000000000000000000000..cd79949a26bb063be9947ea1fd6a89592f3f54b0 --- /dev/null +++ b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/test/t_sip.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +./runctest.sh t_sip diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestWebServer/lib/ingestwebserver.py b/LTA/LTAIngest/LTAIngestServer/LTAIngestWebServer/lib/ingestwebserver.py index 65458fa7e7a82403e2f712f98d8f4b95619167b9..3a1bea6f8f2463e7f22768c35f35c78c5fa116b3 100644 --- a/LTA/LTAIngest/LTAIngestServer/LTAIngestWebServer/lib/ingestwebserver.py +++ b/LTA/LTAIngest/LTAIngestServer/LTAIngestWebServer/lib/ingestwebserver.py @@ -87,7 +87,7 @@ def index(): nr_of_jobs_in_queue += status_dict['jobs']['scheduled'] nr_of_jobs_in_queue += status_dict['jobs']['retry'] - body = '''<p style="max-width: 1400px; margin: auto; margin-bottom: 12px; text-align: right;">Help and monitoring: <a href="https://www.astron.nl/lofarwiki/doku.php?id=engineering:software:ingest_services#faq_support" target=_blank>Ingest FAQ</a> / <a href="https://proxy.lofar.eu/zabbix/screens.php?sid=3ffcb45c82da9d9d&form_refresh=1&fullscreen=0&elementid=25&groupid=0&hostid=0" target=_blank>Zabbix ingest network transfer speeds</a> / <a href="https://lofar.astron.nl/birt-viewer/frameset?__report=Ingest.rptdesign&sample=my+parameter" target=_blank>MoM BIRT view of exports</a> / <a href="http://web.grid.sara.nl/cgi-bin/lofar.py" target=_blank>SARA maintenance</a></p>''' + body = '''<p style="max-width: 1400px; margin: auto; margin-bottom: 12px; text-align: right;">Help and monitoring: <a href="https://www.astron.nl/lofarwiki/doku.php?id=engineering:software:ingest_services#faq_support" target=_blank>Ingest FAQ</a> / <a href="https://proxy.lofar.eu/zabbix/screens.php?sid=3ffcb45c82da9d9d&form_refresh=1&fullscreen=0&elementid=25&groupid=0&hostid=0" target=_blank>Zabbix ingest network transfer speeds</a> / <a href="https://lofar.astron.nl/birt-viewer/frameset?__report=Ingest.rptdesign&sample=my+parameter" target=_blank>MoM BIRT view of exports</a> / <a href="http://web.grid.sara.nl/cgi-bin/lofar.py" target=_blank>SARA maintenance</a> / <a href="http://scu001.control.lofar:9632/" target=_blank>LTA storage overview</a></p>''' body += '''<p style="max-width: 1400px; margin: auto; margin-bottom: 8px; font-size: 16px; font-weight: bold">Total #jobs waiting in queue: %s</p>''' % nr_of_jobs_in_queue body += '''<table>''' diff --git a/LTA/ltastorageoverview/CMakeLists.txt b/LTA/ltastorageoverview/CMakeLists.txt index 1d1e0e0ad791ee647f02f40141b8c56b654a376e..3ec201ad5d387e976f5aef66eac0b95232206244 100644 --- a/LTA/ltastorageoverview/CMakeLists.txt +++ b/LTA/ltastorageoverview/CMakeLists.txt @@ -1,10 +1,9 @@ # $Id$ lofar_find_package(Python 2.6 REQUIRED) -lofar_package(ltastorageoverview 0.1 DEPENDS PyCommon) +lofar_package(ltastorageoverview 0.1 DEPENDS PyCommon LTAIngestClient) include(PythonInstall) -set(USE_PYTHON_COMPILATION Off) add_subdirectory(lib) add_subdirectory(bin) diff --git a/LTA/ltastorageoverview/bin/CMakeLists.txt b/LTA/ltastorageoverview/bin/CMakeLists.txt index a4da3191e54bdaa5f89b730305844cb9df51c098..49bf80f19d757bd11aee07bacaa8c71da3f2e514 100644 --- a/LTA/ltastorageoverview/bin/CMakeLists.txt +++ b/LTA/ltastorageoverview/bin/CMakeLists.txt @@ -1,5 +1,11 @@ # $Id$ -install(PROGRAMS - ltastorageoverviewscraper - DESTINATION bin) +lofar_add_bin_scripts(ltastorageoverviewscraper + ltastorageoverviewreport + ltastorageoverviewwebservice) + +# supervisord config files +install(FILES + ltastorageoverviewscraper.ini + ltastorageoverviewwebservice.ini + DESTINATION etc/supervisord.d) diff --git a/LTA/ltastorageoverview/bin/ltastorageoverviewreport b/LTA/ltastorageoverview/bin/ltastorageoverviewreport new file mode 100755 index 0000000000000000000000000000000000000000..53f9a0b86aa4d69d55d1bec43f72dc06ef7ef6a2 --- /dev/null +++ b/LTA/ltastorageoverview/bin/ltastorageoverviewreport @@ -0,0 +1,25 @@ +#!/usr/bin/python + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +from lofar.lta.ltastorageoverview import report + +''' Starts the scraper from ltastorageoverview''' +if __name__ == "__main__": + report.main() + diff --git a/LTA/ltastorageoverview/bin/ltastorageoverviewscraper b/LTA/ltastorageoverview/bin/ltastorageoverviewscraper old mode 100644 new mode 100755 index 94397565639a41e9678e820559089510c1be45b0..88951d48f38b02870438fa6b60ad2b57b8563574 --- a/LTA/ltastorageoverview/bin/ltastorageoverviewscraper +++ b/LTA/ltastorageoverview/bin/ltastorageoverviewscraper @@ -18,9 +18,9 @@ # with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. import sys -from ltastorageoverview import scraper +from lofar.lta.ltastorageoverview import scraper ''' Starts the scraper from ltastorageoverview''' if __name__ == "__main__": - scraper.main(sys.argv[1:]) + scraper.main() diff --git a/LTA/ltastorageoverview/bin/ltastorageoverviewscraper.ini b/LTA/ltastorageoverview/bin/ltastorageoverviewscraper.ini new file mode 100644 index 0000000000000000000000000000000000000000..12a7e1e7c3923f7915819e5dca646984cb2b3179 --- /dev/null +++ b/LTA/ltastorageoverview/bin/ltastorageoverviewscraper.ini @@ -0,0 +1,8 @@ +[program:ltastorageoverviewscraper] +command=/bin/bash -c 'source $LOFARROOT/lofarinit.sh;exec ltastorageoverviewscraper --parallel 32' +user=lofarsys +stopsignal=INT ; KeyboardInterrupt +stopasgroup=true ; bash does not propagate signals +stdout_logfile=%(program_name)s.log +redirect_stderr=true +stderr_logfile=NONE diff --git a/LTA/ltastorageoverview/bin/ltastorageoverviewwebservice b/LTA/ltastorageoverview/bin/ltastorageoverviewwebservice new file mode 100755 index 0000000000000000000000000000000000000000..0a0d9dc4b4c2a0eac9029ad6d9f39a5a1983195b --- /dev/null +++ b/LTA/ltastorageoverview/bin/ltastorageoverviewwebservice @@ -0,0 +1,26 @@ +#!/usr/bin/python + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +import sys +from lofar.lta.ltastorageoverview.webservice import webservice + +''' Starts the webservice from ltastorageoverview''' +if __name__ == "__main__": + webservice.main() + diff --git a/LTA/ltastorageoverview/bin/ltastorageoverviewwebservice.ini b/LTA/ltastorageoverview/bin/ltastorageoverviewwebservice.ini new file mode 100644 index 0000000000000000000000000000000000000000..a644588fc8233da3a875ecca1d56e58593968ecd --- /dev/null +++ b/LTA/ltastorageoverview/bin/ltastorageoverviewwebservice.ini @@ -0,0 +1,8 @@ +[program:ltastorageoverviewwebservice] +command=/bin/bash -c 'source $LOFARROOT/lofarinit.sh;exec ltastorageoverviewwebservice' +user=lofarsys +stopsignal=INT ; KeyboardInterrupt +stopasgroup=true ; bash does not propagate signals +stdout_logfile=%(program_name)s.log +redirect_stderr=true +stderr_logfile=NONE diff --git a/LTA/ltastorageoverview/doc/lta_storage_overview.md b/LTA/ltastorageoverview/doc/lta_storage_overview.md index 260d6f5f6ea09678965ed263b39a5b05bdc05a48..756e4810268804817e0fb7e5f6ea7a9ec991fd38 100644 --- a/LTA/ltastorageoverview/doc/lta_storage_overview.md +++ b/LTA/ltastorageoverview/doc/lta_storage_overview.md @@ -1,3 +1,150 @@ # LTA Storage Overview {#lta_storage_overview} -... +## General + +### Description/Summary + +For the Lofar LTA we have the [LTA catalogue](https://lta.lofar.eu/) which gives an overview of all described dataproducts in the LTA. There are however (quite a lot of) files in the LTA which are not described in the [LTA catalogue](https://lta.lofar.eu/). +Apart from that, we would like to have an overview of all files/directories on tape/disk in the LTA, and relate that to the current quota which we get each year from SARA, Juelich and Poznan. + +So, the LTA Storage Overview services provides the following features: + - gather information from the LTA at *file level* for each and every file in the LTA, even those which are not in the catalogue. (via ltastorageoverviewscraper) + - provide RO/SOS with 'du-like' information on the available and used storage per site. (via ltastorageoverviewwebservice) + + +It uses [srm](https://sdm.lbl.gov/srm-wg/documents.html) + our grid certificates from the lexars to gather this info. The file/directory tree is stored in a database ('ltaso' at ldb003.control.lofar, and exposed via a simple overview website http://scu001.control.lofar:9632 + +### Authors/Owners + +- Jorrit Schaap <mailto:schaap@astron.nl> + +### Overview + +There are 2 services which run individually on the scu001 under supervisord. +Furthermore the 2 services both use one postgres database ('ltaso' at ldb003.control.lofar) to store and retrieve the information. +- service ltastorageoverviewscraper: + - This service runs in the background and "scrapes" information from the LTA sites using srmls (via ssh calls to lexar003/lexar004, because only the lexars have grid access and certificates). + - The gathered information about files and directories is stored in the ltaso database. + - It keeps track of when each directory is visited, and plans a revisit once in a while. + - It listens for events from [Ingest](@ref lta_ingest) to schedule a scraper visit for each new directory that an ingest job creates. +- service ltastorageoverviewwebservice: + - Very simple (and slow...) python flask webservice which generates one webpage with an overview of: + - amount of data stored at each site (trend, delta/month, pie chart) + - amount of quota used + - amount of free space left + +- - - + +## DEVELOPMENT + +### Analyses +This project originated from the need by SOS to have an overview of: +- what is in the LTA at *file level* (because not every file is in the [LTA catalogue](https://lta.lofar.eu/)) +- set quota per year per LTA site. +- summarize tape usage (in (peta)bytes) per site. +- have insight in free tape space per site until the end of the quota period. +- A future requirement might be to have an (REST?) API to query for certain projects/sites/quotas/timespans etc. + +Before this package ltastorageoverview existed, we generated similar overviews using srm to do a tree walk on the LTA sites, but nowadays with a large LTA this takes more than a week to complete. So we needed a background process which does the tree walk, and stores the information in a database. The scraper service was based on this original script/idea. + +### Design +- The software needs to run in the background (standard lofar solution: service under supervisord) +- The infermation needs to be instantaneously retreivable (so, use a database. standard lofar database: postgres) +- Website can be simple (and slow) for now, so in this first phase we chose python flask. + +### Source Code +- [LTA Storage Overview in SVN](https://svn.astron.nl/LOFAR/trunk/LTA/ltastorageoverview/) +- [LTA Storage Overview Code Documentation](@ref lta_storage_overview) + +### Testing + +#### Unit Testing + +Unit tests are available in: + <source-root>/LTA/ltastorageoverview/test + +The tests cover: +- the creation of the ltaso database +- inserts of sites, files and directories +- checks on site and directory statistics +- a minor webservice test + +#### Integration Testing + +There are no integration tests since these services operate independently from other lofar software. +The 2 services work on the same (shared) database, so there is some integration there, which is tested in the unittests. + +#### Build & Deploy + +##### Build locally + + svn co https://svn.astron.nl/LOFAR/<some_branch_or_trunk> <my_source_dir> + cd <my_source_dir> + mkdir -p build/gnu_debug + cd build/gnu_debug + cmake -DBUILD_PACKAGES=ltastorageoverview -DCMAKE_INSTALL_PREFIX=/opt/lofar/ ../.. + cd ../.. + make + make install + +##### Build using Jenkins + +1. Open [the generic CentOS7 LOFAR SubSystems Jenkins project](https://support.astron.nl/jenkins/view/LOFAR%20Subsystems/view/Subsystems%20builds/job/Subsystems_CentOS7/build?delay=0sec) +2. Select buildhost (defaults to correct buildhostcentos7) +3. Select the branch you want to build: + - For a release/rollout: Select the latest release tag + - For a (test) build of a branch: select any branch you like (for example the one you are working on) +4. Set the MINOR_RELEASE_NR (should be equal to tag minor version number for release/rollout build) +5. Select SubSystem: RAServices (which should be named SCU because it's more services now than just resource assigner services) +6. Click "Build" button, wait, build should finish successfully. + +##### Deploy / SubSystems + +The lofar package 'ltastorageoverview' is part of the RAServices subsystems package. So building and deploying the standard RAServices package for deployement on scu001 automatically gives you the ltastorageoverview services on scu001 as well. + +- - - + +## OPERATIONS + +### Configuration +- There are no configuration files, except from the standard supervisord ini files. +- Both services come with a -h or --help option which explain the available options. + +### Log Files +- Log files are located in the standard location. In this specific case, you can find ltastorageoverviewscraper.log and ltastorageoverviewwebservice.log in scu001.control.lofar:/opt/lofar/var/log/ + +### Runtime +- the services run under supervisord on host scu001.control.lofar +- There is no need to run these services manually from the commandline. (There is no harm in doing so either, even when the services already run under supervisord). +- It is perfectly safe to stop/start/restart the services at any time. Really, no harm is done. All information is always stored in the database. + +### Interfaces (API) +- These services run standalone and have no external API. +- These services are not connected to the qpid messagebus. +- There is a start for a simple REST API in the webservice, but that's only for testing/development purposes. Might be improved when needed by SOS. +- The only user interface is the website: http://scu001.control.lofar:9632 + +### Files/Databases +- A single postgres 9.3+ database called 'ltaso' is used, which runs on ldb003.control.lofar +- A database create sql script is deployed (along with the python packages) in /opt/lofar/share/ltaso +- the ltaso database login credentials are stored in the standard lofar credentials location: ~/.lofar/dbcredentials/ltaso.ini +- No other files and/or databases are needed. + +### Dependencies +- dependencies on 3rd party Python packages + - python-flask + - psycopg2 +- dependencies on LTA software + - the scraper uses srmls to get file/directory information from the LTA sites. It just uses the srm tools and the grid certificates from [Ingest](@ref lta_ingest) via ssh calls to lexar003/lexar004. +- dependencies on network: + - a working ssh connection with key-based logging for lofarsys from scu001 to ingest@lexar003 or ingest@lexar004 +- dependencies on QPID: + - the scraper listens for events from [Ingest](@ref lta_ingest) via qpid. + - the exchange 'lofar.lta.ingest.notification' is federated from lexar003 to scu001 (so all services on scu001 can listen for ingest events) + - the exchange 'lofar.lta.ingest.notification' on scu001 is routed to queue 'lofar.lta.ingest.notification.for.ltastorageoverview' on which the scraper listens. We use a dedicated queue for the scraper so that no events are lost, and all ingested data is found as quickly as possible by a scraper visit. + +### Security +- It is assumed that the grid certificates for user 'ingest' on lexar003/lexar004 are in place and valid. If not, contact holties@astron.nl or schaap@astron.nl +- the ltaso database login credentials are stored in the standard lofar credentials location: ~/.lofar/dbcredentials/ltaso.ini + + diff --git a/LTA/ltastorageoverview/lib/CMakeLists.txt b/LTA/ltastorageoverview/lib/CMakeLists.txt index 799e321a88c440b2c93d3ee163549d682df001d9..8f6241c4ebb8b1670c670960273755e2c2c36aba 100644 --- a/LTA/ltastorageoverview/lib/CMakeLists.txt +++ b/LTA/ltastorageoverview/lib/CMakeLists.txt @@ -4,16 +4,19 @@ python_install( __init__.py scraper.py store.py - create_db_ltastorageoverview.sql report.py + ingesteventhandler.py webservice/webservice.py webservice/__init__.py - DESTINATION ltastorageoverview) + DESTINATION lofar/lta/ltastorageoverview) + +set(sql_files ltaso/create_db_ltastorageoverview.sql) +lofar_add_data_files(${sql_files}) set(web_files webservice/templates/index.html) install(FILES ${web_files} - DESTINATION ${PYTHON_INSTALL_DIR}/ltastorageoverview/webservice/templates/) + DESTINATION ${PYTHON_INSTALL_DIR}/lofar/lta/ltastorageoverview/webservice/templates/) foreach(web_file ${web_files}) get_filename_component(web_file_path ${web_file} PATH) diff --git a/LTA/ltastorageoverview/lib/create_db_ltastorageoverview.sql b/LTA/ltastorageoverview/lib/create_db_ltastorageoverview.sql deleted file mode 100644 index 4a87ff6421107b259895017e7b4c0123f9b225d1..0000000000000000000000000000000000000000 --- a/LTA/ltastorageoverview/lib/create_db_ltastorageoverview.sql +++ /dev/null @@ -1,204 +0,0 @@ -/* -# Copyright (C) 2012-2015 asTRON (Netherlands Institute for Radio Astronomy) -# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands -# -# This file is part of the LOFAR software suite. -# The LOFAR software suite is free software: you can redistribute it and/or -# modify it under the terms of the GNU General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# The LOFAR software suite is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. -*/ - --- $Id$ - --- sqlite3 create script for ltastorageoverview database - -PRAGMA foreign_keys = ON; - -create table storage_site ( - id integer primary key autoincrement unique not null, - name text unique not null, - url text not null); - -create index ss_name_idx on storage_site(name); - -create table directory ( - id integer primary key autoincrement unique not null, - name text key not null COLLATE NOCASE, - parent_directory_id integer, - foreign key (parent_directory_id) references directory(id) ); - -create table directory_closure ( - ancestor_id integer not null, - descendant_id integer not null, - depth integer not null, - primary key (ancestor_id, descendant_id) - foreign key (ancestor_id) references directory(id) - foreign key (descendant_id) references directory(id) ); - -create index dc_ancestor_id_idx on directory_closure(ancestor_id); -create index dc_descendant_id_idx on directory_closure(descendant_id); -create index dc_depth_idx on directory_closure(depth); - -create trigger directory_closure_trigger - after insert on directory - begin - insert into directory_closure (ancestor_id, descendant_id, depth) values (new.id, new.id, 0) ; - - insert into directory_closure (ancestor_id, descendant_id, depth) - select p.ancestor_id, c.descendant_id, p.depth+c.depth+1 - from directory_closure p, directory_closure c - where p.descendant_id=new.parent_directory_id and c.ancestor_id=new.id ; - end; - -create table storage_site_root ( - storage_site_id integer not null, - directory_id integer not null, - primary key (storage_site_id, directory_id), - foreign key (storage_site_id) references storage_site(id), - foreign key (directory_id) references directory(id) ); - -create index ssr_storage_site_id_idx on storage_site_root(storage_site_id); -create index ssr_directory_id_idx on storage_site_root(directory_id); - -create table fileinfo ( - id integer primary key autoincrement not null, - name text key not null, - size integer not null, - creation_date datetime not null, - directory_id integer not null, - foreign key (directory_id) references directory(id) ); - -create index fi_directory_id_idx on fileinfo(directory_id); -create index fi_creation_date_idx on fileinfo(creation_date); - -create table directory_stats ( - id integer primary key autoincrement unique not null, - directory_id integer unique not null, - num_files integer, - total_file_size integer, - min_file_size integer, - max_file_size integer, - min_file_creation_date datetime, - max_file_creation_date datetime, - foreign key (directory_id) references directory(id) ); - -create index ds_directory_id_idx on directory_stats(directory_id); -create index ds_min_file_creation_date_idx on directory_stats(min_file_creation_date); -create index ds_max_file_creation_date_idx on directory_stats(max_file_creation_date); - -create table _temp_fileinfo_for_dir_stats ( - size integer not null, - creation_date datetime not null ); - -create trigger fileinfo_to_directory_stats_trigger - after insert on fileinfo - begin - insert or ignore into directory_stats (directory_id) - values (new.directory_id) ; - - delete from _temp_fileinfo_for_dir_stats ; - - insert into _temp_fileinfo_for_dir_stats - select fileinfo.size, fileinfo.creation_date from fileinfo - where directory_id = new.directory_id ; - - update directory_stats set - num_files=(select count(size) from _temp_fileinfo_for_dir_stats), - total_file_size=(select sum(size) from _temp_fileinfo_for_dir_stats), - min_file_size=(select min(size) from _temp_fileinfo_for_dir_stats), - max_file_size=(select max(size) from _temp_fileinfo_for_dir_stats), - min_file_creation_date=(select min(creation_date) from _temp_fileinfo_for_dir_stats), - max_file_creation_date=(select max(creation_date) from _temp_fileinfo_for_dir_stats) - where directory_id = new.directory_id ; - end; - -create table project ( - id integer primary key autoincrement unique not null, - name text unique not null); - -create index project_name_idx on project(name); - -create table project_top_level_directory ( - project_id integer, - directory_id integer, - primary key (project_id, directory_id) - foreign key (project_id) references project(id) - foreign key (directory_id) references directory(id) ); - - - -create table scraper_last_directory_visit ( - directory_id integer not null, - visit_date datetime not null, - primary key (directory_id) - foreign key (directory_id) references directory(id) ); - -create view root_directories as - select dir.id as dir_id, dir.name as dir_name, ss.id as site_id, ss.name as site_name - from storage_site_root - join directory dir on dir.id = storage_site_root.directory_id - join storage_site ss on ss.id = storage_site_root.storage_site_id ; - -create view site_directory_tree as - select rootdir.site_id as site_id, - rootdir.site_name as site_name, - rootdir.dir_id as rootdir_id, - rootdir.dir_name as rootdir_name, - dir.id as dir_id, - dir.name as dir_name, - dir.parent_directory_id as parent_directory_id, - dc.depth as depth - from root_directories rootdir - inner join directory_closure dc on dc.ancestor_id = rootdir.dir_id - inner join directory dir on dc.descendant_id = dir.id ; - -create view site_scraper_last_directoy_visit as - select rootdir.site_id as site_id, - rootdir.site_name as site_name, - dir.id as dir_id, - dir.name as dir_name, - sldv.visit_date as last_visit - from root_directories rootdir - inner join directory_closure dc on dc.ancestor_id = rootdir.dir_id - inner join directory dir on dc.descendant_id = dir.id - inner join scraper_last_directory_visit sldv on sldv.directory_id = dir.id ; - -create view site_directory_file as - select site.id as site_id, - site.name as site_name, - dir.id as dir_id, - dir.name as dir_name, - fileinfo.id as file_id, - fileinfo.name as file_name, - fileinfo.size as file_size, - fileinfo.creation_date as file_creation_date - from storage_site site - join storage_site_root on storage_site_root.storage_site_id = site.id - inner join directory_closure dc on dc.ancestor_id = storage_site_root.directory_id - inner join directory dir on dc.descendant_id = dir.id - inner join fileinfo on fileinfo.directory_id = dir.id ; - -create view project_directory as - select - project.id as project_id, - project.name as project_name, - dir.id as dir_id, - dir.name as dir_name - from project_top_level_directory - inner join project on project.id = project_top_level_directory.project_id - inner join directory_closure dc on dc.ancestor_id = project_top_level_directory.directory_id - inner join directory dir on dc.descendant_id = dir.id ; - -create view project_directory_stats as - select * from project_directory - inner join directory_stats ds on ds.directory_id = project_directory.dir_id ; - diff --git a/LTA/ltastorageoverview/lib/ingesteventhandler.py b/LTA/ltastorageoverview/lib/ingesteventhandler.py new file mode 100755 index 0000000000000000000000000000000000000000..5cb762ea3fbdf6b49c758d482b0e67188218a100 --- /dev/null +++ b/LTA/ltastorageoverview/lib/ingesteventhandler.py @@ -0,0 +1,110 @@ +# Copyright (C) 2018 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id$ + +from lofar.lta.ltastorageoverview import store +from lofar.lta.ingest.common.srm import * +from lofar.lta.ingest.client.ingestbuslistener import IngestBusListener +from lofar.lta.ingest.common.config import DEFAULT_INGEST_NOTIFICATION_SUBJECTS +from lofar.lta.ingest.common.config import DEFAULT_BROKER +from lofar.messaging import adaptNameToEnvironment + +import logging +logger = logging.getLogger(__name__) + +DEFAULT_INGEST_NOTIFICATION_QUEUE = adaptNameToEnvironment('lofar.lta.ingest.notification.for.ltastorageoverview') + + +class IngestEventHandler(IngestBusListener): + def __init__(self, dbcreds, + busname=DEFAULT_INGEST_NOTIFICATION_QUEUE, + subjects=DEFAULT_INGEST_NOTIFICATION_SUBJECTS, + broker=DEFAULT_BROKER): + self._dbcreds = dbcreds + super(IngestEventHandler, self).__init__(busname=busname, subjects=subjects, broker=broker) + + def onJobFinished(self, job_dict): + """onJobFinished is called upon receiving a JobFinished message. + In this IngestEventHandler, it calls _schedule_srmurl_for_visit to schedule the finished surl for a scraper visit. + :param job_dict: dictionary with the finised job""" + self._logJobNotification('finished', job_dict) + self._schedule_srmurl_for_visit(job_dict.get('srm_url')) + + def onTaskFinished(self, task_dict): + """onTaskFinished is called upon receiving a TaskFinished message. (when all dataproducts of a observation/pipeline were ingested) + In this IngestEventHandler, it calls _schedule_srmurl_for_visit to schedule the finished surl for a scraper visit. + :param task_dict: dictionary with the finished task""" + self._logJobNotification('task finised', task_dict) + self._schedule_srmurl_for_visit(task_dict.get('srm_url')) + + def _schedule_srmurl_for_visit(self, srm_url): + """process the given srm_url, insert it in the db if needed, and mark it as not visited, + so that the scraper will visit it soon. + :param srm_url: a valid srm url like: srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884/L652884_SAP000_B000_P001_bf_e619e5da.tar + :return: None + """ + if srm_url: + with store.LTAStorageDb(self._dbcreds) as db: + site = self._get_site_from_db(srm_url) + dir_path = get_dir_path_in_site(srm_url) + directory = db.directoryByName(dir_path, site['id']) + + if directory is None: + dir_id = self._insert_missing_directory_tree_if_needed(srm_url).get(dir_path) + else: + dir_id = directory.get('dir_id') + + if dir_id is not None: + self._mark_directory_for_a_visit(dir_id) + + def _mark_directory_for_a_visit(self, dir_id): + """ + update the directory's last visit time to unix-epoch (which is the lowest possible visit timestamp), so that it + appears in the visitStats which are used by the scraper to determine the next directory to be visited. + :param int dir_id: the id of the directory + :return: None + """ + with store.LTAStorageDb(self._dbcreds) as db: + return db.updateDirectoryLastVisitTime(dir_id, datetime.fromtimestamp(0)) + + def _get_site_from_db(self, srm_url): + """ + find the site entry in the database for the given srm_url. + raises a lookup error if not found. + :param string srm_url: a valid srm url + :return: a site entry dict from the database + """ + site_url = get_site_surl(srm_url) + + # find site in db + with store.LTAStorageDb(self._dbcreds) as db: + site = next((s for s in db.sites() if s['url'] == site_url), None) + if site is None: + raise LookupError('Could not find site %s in database %s' % (site_url, self._dbcreds.database)) + return site + + def _insert_missing_directory_tree_if_needed(self, srm_url): + # example url: srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884/L652884_SAP000_B000_P001_bf_e619e5da.tar + # or for a dir: srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884 + # site_url then becomes: srm://lofar-srm.fz-juelich.de:8443 + # dir_path then becomes: /pnfs/fz-juelich.de/data/lofar/ops/projects/lc8_029/652884 + site = self._get_site_from_db(srm_url) + dir_path = get_dir_path_in_site(srm_url) + + with store.LTAStorageDb(self._dbcreds) as db: + return db.insert_missing_directory_tree_if_needed(dir_path, site['id']) diff --git a/LTA/ltastorageoverview/lib/ltaso/create_db_ltastorageoverview.sql b/LTA/ltastorageoverview/lib/ltaso/create_db_ltastorageoverview.sql new file mode 100644 index 0000000000000000000000000000000000000000..ed68325dce5accadfaec07634cf4933fb213648b --- /dev/null +++ b/LTA/ltastorageoverview/lib/ltaso/create_db_ltastorageoverview.sql @@ -0,0 +1,952 @@ +/* +# Copyright (C) 2012-2015 asTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +*/ + +-- $Id$ + +-- postgresql create script for ltastorageoverview database + +-- run from command line as: +-- psql ltaso -f create_db_ltastorageoverview.sql -W + +-- \set VERBOSITY terse + +BEGIN; + +DROP SCHEMA IF EXISTS lta CASCADE; +DROP SCHEMA IF EXISTS scraper CASCADE; +DROP SCHEMA IF EXISTS metainfo CASCADE; + +CREATE SCHEMA lta; +CREATE SCHEMA scraper; +CREATE SCHEMA metainfo; + +-- TABLES + +CREATE TABLE lta.site ( + id serial, + name text UNIQUE NOT NULL, + url text UNIQUE NOT NULL, + PRIMARY KEY (id) +) WITH (OIDS=FALSE); + +CREATE INDEX ss_name_idx on lta.site(name); + +CREATE TABLE lta.directory ( + id serial, + name text NOT NULL, + parent_dir_id integer REFERENCES lta.directory ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, + PRIMARY KEY (id), + UNIQUE (name, parent_dir_id) +) WITH (OIDS=FALSE); + +CREATE INDEX d_parent_dir_id_idx on lta.directory(parent_dir_id); +CREATE INDEX d_name_idx on lta.directory(name); + +CREATE TABLE lta.directory_closure ( + ancestor_id integer NOT NULL REFERENCES lta.directory ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, + descendant_id integer NOT NULL REFERENCES lta.directory ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, + depth integer NOT NULL, + primary key (ancestor_id, descendant_id) +) WITH (OIDS=FALSE); + +CREATE INDEX dc_ancestor_id_idx on lta.directory_closure(ancestor_id); +CREATE INDEX dc_descendant_id_idx on lta.directory_closure(descendant_id); +CREATE INDEX dc_depth_idx on lta.directory_closure(depth); + +CREATE TABLE lta.fileinfo ( + id serial, + name text NOT NULL, + size bigint NOT NULL, + creation_date timestamp without time zone NOT NULL, + dir_id integer NOT NULL REFERENCES lta.directory ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, + PRIMARY KEY (id), + UNIQUE (name, dir_id) +) WITH (OIDS=FALSE); + +CREATE INDEX fi_dir_id_idx on lta.fileinfo(dir_id); +CREATE INDEX fi_creation_date_idx on lta.fileinfo(creation_date); +CREATE INDEX fi_name_idx on lta.fileinfo(name); + +CREATE TABLE lta.site_root_dir ( + site_id integer NOT NULL REFERENCES lta.site ON DELETE CASCADE DEFERRABLE INITIALLY IMMEDIATE, + root_dir_id integer NOT NULL REFERENCES lta.directory ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, + primary key (site_id, root_dir_id) +) WITH (OIDS=FALSE); + +CREATE INDEX ssr_site_id_idx on lta.site_root_dir(site_id); +CREATE INDEX ssr_root_dir_id_idx on lta.site_root_dir(root_dir_id); + +CREATE TABLE lta.site_quota ( + id serial, + site_id integer NOT NULL REFERENCES lta.site ON DELETE CASCADE DEFERRABLE INITIALLY IMMEDIATE, + quota bigint NOT NULL, + valid_until_date timestamp without time zone NOT NULL, + primary key (id) +) WITH (OIDS=FALSE); + +CREATE TABLE lta.quota_root_dirs ( + site_id integer NOT NULL REFERENCES lta.site ON DELETE CASCADE DEFERRABLE INITIALLY IMMEDIATE, + root_dir_id integer NOT NULL REFERENCES lta.directory ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, + primary key (site_id, root_dir_id) +); + +CREATE TABLE lta._directory_update_cache ( + dir_id integer NOT NULL REFERENCES lta.directory ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, + PRIMARY KEY (dir_id) +) WITH (OIDS=FALSE); + +CREATE TABLE scraper.last_directory_visit ( + id serial, + dir_id integer NOT NULL REFERENCES lta.directory ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, + visit_date timestamp without time zone NOT NULL DEFAULT '1970-01-01', + PRIMARY KEY (id) +) WITH (OIDS=FALSE); + +CREATE INDEX ldv_dir_id_idx on scraper.last_directory_visit(dir_id); +CREATE INDEX ldv_visit_date_idx on scraper.last_directory_visit(visit_date); + +CREATE TABLE metainfo.stats ( + id serial, + dir_id integer NOT NULL REFERENCES lta.directory ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, + dir_num_files integer DEFAULT 0 NOT NULL, + dir_total_file_size bigint DEFAULT 0 NOT NULL, + dir_min_file_size bigint DEFAULT 0 NOT NULL, + dir_max_file_size bigint DEFAULT 0 NOT NULL, + dir_min_file_creation_date timestamp without time zone DEFAULT NULL, + dir_max_file_creation_date timestamp without time zone DEFAULT NULL, + tree_num_files integer DEFAULT 0 NOT NULL, + tree_total_file_size bigint DEFAULT 0 NOT NULL, + tree_min_file_size bigint DEFAULT NULL, + tree_max_file_size bigint DEFAULT NULL, + tree_min_file_creation_date timestamp without time zone DEFAULT NULL, + tree_max_file_creation_date timestamp without time zone DEFAULT NULL, + PRIMARY KEY (id) +); + +CREATE INDEX stats_dir_id_idx on metainfo.stats(dir_id); +CREATE INDEX stats_dir_min_file_creation_date_idx on metainfo.stats(dir_min_file_creation_date); +CREATE INDEX stats_dir_max_file_creation_date_idx on metainfo.stats(dir_max_file_creation_date); +CREATE INDEX stats_tree_min_file_creation_date_idx on metainfo.stats(tree_min_file_creation_date); +CREATE INDEX stats_tree_max_file_creation_date_idx on metainfo.stats(tree_max_file_creation_date); + +CREATE TABLE metainfo.project ( + id serial, + name text UNIQUE NOT NULL, + PRIMARY KEY (id) +) WITH (OIDS=FALSE); + +CREATE INDEX project_name_idx on metainfo.project(name); + +CREATE TABLE metainfo.project_top_level_directory ( + project_id integer NOT NULL REFERENCES metainfo.project ON DELETE CASCADE DEFERRABLE INITIALLY IMMEDIATE, + dir_id integer NOT NULL REFERENCES lta.directory ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, + primary key (project_id, dir_id) +) WITH (OIDS=FALSE); + +CREATE INDEX ptld_project_id_idx on metainfo.project_top_level_directory(project_id); +CREATE INDEX ptld_dir_id_idx on metainfo.project_top_level_directory(dir_id); + +CREATE TABLE metainfo.observation ( + id int, -- sas id, like 'L123456', but then as integer, so 123456 + PRIMARY KEY (id) +) WITH (OIDS=FALSE); + +CREATE TABLE metainfo.project_observation ( + project_id integer NOT NULL REFERENCES metainfo.project ON DELETE CASCADE DEFERRABLE INITIALLY IMMEDIATE, + observation_id integer NOT NULL REFERENCES metainfo.observation ON DELETE CASCADE DEFERRABLE INITIALLY IMMEDIATE, + PRIMARY KEY (project_id, observation_id) +) WITH (OIDS=FALSE); + +CREATE TABLE metainfo.dataproduct ( + id serial, + fileinfo_id integer NOT NULL REFERENCES lta.fileinfo ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, + observation_id integer NOT NULL REFERENCES metainfo.observation ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, + name text NOT NULL, + PRIMARY KEY (id) +) WITH (OIDS=FALSE); + +CREATE INDEX dp_dataproduct_name_idx on metainfo.dataproduct(name); +CREATE INDEX dp_fileinfo_id_idx on metainfo.dataproduct(fileinfo_id); + +-- END TABLES + + +-- TRIGGERS + +CREATE OR REPLACE FUNCTION lta.on_site_root_dir_deleted_do_delete_directory() +RETURNS trigger AS +$BODY$ +BEGIN + DELETE FROM lta.directory WHERE id = OLD.root_dir_id; + RETURN OLD; +END; +$BODY$ +LANGUAGE plpgsql VOLATILE +COST 100; + +CREATE TRIGGER trigger_on_site_root_dir_deleted_do_delete_directory +AFTER DELETE +ON lta.site_root_dir +FOR EACH ROW +EXECUTE PROCEDURE lta.on_site_root_dir_deleted_do_delete_directory(); + +-------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION lta.on_directory_inserted_add_directory_closure_entry() +RETURNS trigger AS +$BODY$ +BEGIN + INSERT INTO lta.directory_closure (ancestor_id, descendant_id, depth) values (NEW.id, NEW.id, 0) ; + + INSERT INTO lta.directory_closure (ancestor_id, descendant_id, depth) + SELECT p.ancestor_id, c.descendant_id, p.depth+c.depth+1 + FROM lta.directory_closure p, lta.directory_closure c + WHERE p.descendant_id=new.parent_dir_id AND c.ancestor_id=new.id ; + + RETURN NEW; +END; +$BODY$ +LANGUAGE plpgsql VOLATILE +COST 100; + +CREATE TRIGGER trigger_on_directory_inserted_add_directory_closure_entry +AFTER INSERT +ON lta.directory +FOR EACH ROW +EXECUTE PROCEDURE lta.on_directory_inserted_add_directory_closure_entry(); + +-------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION scraper.on_directory_inserted_add_last_directory_visit_entry() +RETURNS trigger AS +$BODY$ +BEGIN + --RAISE NOTICE 'on_directory_inserted_add_last_directory_visit_entry, NEW=%', NEW; + --postgres < 9.5 way of doing INSERT...ON CONFLICT DO NOTHING + INSERT INTO scraper.last_directory_visit(dir_id) + (SELECT NEW.id WHERE NOT EXISTS (SELECT dir_id FROM scraper.last_directory_visit WHERE dir_id = NEW.id)); + + RETURN NEW; +END; +$BODY$ +LANGUAGE plpgsql VOLATILE +COST 100; + +CREATE TRIGGER trigger_on_directory_inserted_add_last_directory_visit_entry +AFTER INSERT +ON lta.directory +FOR EACH ROW +EXECUTE PROCEDURE scraper.on_directory_inserted_add_last_directory_visit_entry(); + +-------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION scraper.on_site_root_dir_inserted_do_add_to_quota_root_dirs() +RETURNS trigger AS +$BODY$ +BEGIN + -- by default, add each root directory as 'directory under quota' + -- users can remove them by hand + INSERT INTO lta.quota_root_dirs(site_id, root_dir_id) + VALUES (NEW.site_id, NEW.root_dir_id); + + RETURN NEW; +END; +$BODY$ +LANGUAGE plpgsql VOLATILE +COST 100; + +CREATE TRIGGER trigger_on_site_root_dir_inserted_do_add_to_quota_root_dirs +AFTER INSERT +ON lta.site_root_dir +FOR EACH ROW +EXECUTE PROCEDURE scraper.on_site_root_dir_inserted_do_add_to_quota_root_dirs(); + +-------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION metainfo.on_directory_inserted_add_stats_entry() +RETURNS trigger AS +$BODY$ +BEGIN + --RAISE NOTICE 'on_directory_inserted_add_stats_entry, NEW=%', NEW; + INSERT INTO metainfo.stats(dir_id) values (NEW.id); + + -- always trim trailing slashes from dirname + NEW.name := trim(trailing '/' from NEW.name); + + RETURN NEW; +END; +$BODY$ +LANGUAGE plpgsql VOLATILE +COST 100; + +CREATE TRIGGER trigger_on_directory_inserted_add_stats_entry +BEFORE INSERT +ON lta.directory +FOR EACH ROW +EXECUTE PROCEDURE metainfo.on_directory_inserted_add_stats_entry(); + +-------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION metainfo.on_fileinfo_insert_update_delete_store_in_cache() +RETURNS trigger AS +$BODY$ +BEGIN + IF TG_OP = 'DELETE' THEN + INSERT INTO lta._directory_update_cache (dir_id) VALUES (OLD.dir_id); + RETURN OLD; + END IF; + + --postgres < 9.5 way of doing INSERT...ON CONFLICT DO NOTHING + INSERT INTO lta._directory_update_cache (dir_id) + (SELECT NEW.dir_id WHERE NOT EXISTS (SELECT dir_id FROM lta._directory_update_cache WHERE dir_id = NEW.dir_id)); + + RETURN NEW; +END; +$BODY$ +LANGUAGE plpgsql VOLATILE +COST 100; + +CREATE TRIGGER trigger_on_fileinfo_insert_update_delete_store_in_cache +AFTER INSERT OR UPDATE OR DELETE +ON lta.fileinfo +FOR EACH ROW +EXECUTE PROCEDURE metainfo.on_fileinfo_insert_update_delete_store_in_cache(); + +-------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION metainfo.on_directory_update_cache_commit_do_update_dir_stats() +RETURNS trigger AS +$BODY$ +DECLARE + fileinfo_row lta.fileinfo%ROWTYPE; + _dir_id integer; + _dir_num_files bigint; + _dir_total_file_size bigint; + _dir_min_file_size bigint; + _dir_max_file_size bigint; + _dir_min_file_creation_date timestamp without time zone; + _dir_max_file_creation_date timestamp without time zone; +BEGIN + FOR _dir_id in (SELECT DISTINCT(c.dir_id) FROM lta._directory_update_cache c) LOOP + _dir_num_files := 0; + _dir_total_file_size := 0; + _dir_min_file_size := NULL; + _dir_max_file_size := NULL; + + -- aggregate results + FOR fileinfo_row IN (SELECT * FROM lta.fileinfo fi where fi.dir_id = _dir_id) LOOP + _dir_num_files := _dir_num_files + 1; + _dir_total_file_size := _dir_total_file_size + fileinfo_row.size; + _dir_min_file_size := LEAST(_dir_min_file_size, fileinfo_row.size); + _dir_max_file_size := GREATEST(_dir_max_file_size, fileinfo_row.size); + _dir_min_file_creation_date := LEAST(_dir_min_file_creation_date, fileinfo_row.creation_date); + _dir_max_file_creation_date := GREATEST(_dir_max_file_creation_date, fileinfo_row.creation_date); + END LOOP; + + UPDATE metainfo.stats + SET (dir_num_files, dir_total_file_size, dir_min_file_size, dir_max_file_size, dir_min_file_creation_date, dir_max_file_creation_date) = + (_dir_num_files, _dir_total_file_size, _dir_min_file_size, _dir_max_file_size, _dir_min_file_creation_date, _dir_max_file_creation_date) + WHERE dir_id = _dir_id; + + DELETE FROM lta._directory_update_cache WHERE dir_id = _dir_id; + END LOOP; + + RETURN NULL; +END; +$BODY$ +LANGUAGE plpgsql VOLATILE +COST 100; + +-- use DEFERRABLE INITIALLY DEFERRED CONSTRAINT trigger which fires only once upon committing the file inserts +-- then run method on_directory_update_cache_commit_do_update_dir_stats to collect all inserted fileinfo's into dir/tree stats +CREATE CONSTRAINT TRIGGER trigger_on_directory_update_cache_commit_do_update_dir_stats +AFTER INSERT +ON lta._directory_update_cache +DEFERRABLE INITIALLY DEFERRED +FOR EACH ROW +EXECUTE PROCEDURE metainfo.on_directory_update_cache_commit_do_update_dir_stats(); + +-------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION metainfo.on_dir_stats_update_do_update_tree_stats() +RETURNS trigger AS +$BODY$ +DECLARE + stats_row metainfo.stats%ROWTYPE; +BEGIN + -- initialize the NEW.tree_* variables with this dir's dir_stats... + NEW.tree_num_files := NEW.dir_num_files; + NEW.tree_total_file_size := NEW.dir_total_file_size; + NEW.tree_min_file_size := NEW.dir_min_file_size; + NEW.tree_max_file_size := NEW.dir_max_file_size; + NEW.tree_min_file_creation_date := NEW.dir_min_file_creation_date; + NEW.tree_max_file_creation_date := NEW.dir_max_file_creation_date; + + -- loop over the tree stats from all filled subdirs of this directory + -- and aggregate them to the new_tree_* variables + FOR stats_row IN SELECT st.* FROM metainfo.stats st + INNER JOIN lta.directory dir ON dir.id = st.dir_id + WHERE dir.parent_dir_id = NEW.dir_id + AND tree_max_file_creation_date IS NOT NULL + AND dir_max_file_creation_date IS NOT NULL LOOP + + -- aggregate + NEW.tree_num_files := NEW.tree_num_files + stats_row.tree_num_files; + NEW.tree_total_file_size := NEW.tree_total_file_size + stats_row.tree_total_file_size; + NEW.tree_min_file_size := LEAST(NEW.tree_min_file_size, stats_row.tree_min_file_size); + NEW.tree_max_file_size := GREATEST(NEW.tree_max_file_size, stats_row.tree_max_file_size); + NEW.tree_min_file_creation_date := LEAST(NEW.tree_min_file_creation_date, stats_row.tree_min_file_creation_date); + NEW.tree_max_file_creation_date := GREATEST(NEW.tree_max_file_creation_date, stats_row.tree_max_file_creation_date); + END LOOP; + + -- return the NEW row with the updated tree_* variables + RETURN NEW; +END; +$BODY$ +LANGUAGE plpgsql VOLATILE +COST 100; + +CREATE TRIGGER trigger_on_dir_stats_update_do_update_tree_stats +BEFORE UPDATE OF dir_num_files, dir_total_file_size, dir_min_file_size, dir_max_file_size, dir_min_file_creation_date, dir_max_file_creation_date +ON metainfo.stats +FOR EACH ROW +EXECUTE PROCEDURE metainfo.on_dir_stats_update_do_update_tree_stats(); + +-------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION metainfo.on_stats_update_do_update_parents_tree_stats() +RETURNS trigger AS +$BODY$ +DECLARE + stats_row metainfo.stats%ROWTYPE; + parent_stats_row metainfo.stats%ROWTYPE; + new_tree_num_files bigint; + new_tree_total_file_size bigint; + new_tree_min_file_size bigint; + new_tree_max_file_size bigint; + new_tree_min_file_creation_date timestamp without time zone; + new_tree_max_file_creation_date timestamp without time zone; +BEGIN + -- climb up the tree until at root, start with the first direct parent + SELECT st.* FROM metainfo.stats st + INNER JOIN lta.directory dir on dir.parent_dir_id = st.dir_id + WHERE dir.id = NEW.dir_id + LIMIT 1 + INTO parent_stats_row; + + --loop and climb further up the tree until at root + WHILE parent_stats_row.id IS NOT NULL LOOP + -- initialize all new_tree_* vars with the current parent_stats_row's values or 0/null. + new_tree_num_files := GREATEST(0, parent_stats_row.dir_num_files); + new_tree_total_file_size := GREATEST(0, parent_stats_row.dir_total_file_size); + new_tree_min_file_size := parent_stats_row.tree_min_file_size; + new_tree_max_file_size := parent_stats_row.tree_max_file_size; + new_tree_min_file_creation_date := parent_stats_row.tree_min_file_creation_date; + new_tree_max_file_creation_date := parent_stats_row.tree_max_file_creation_date; + + -- loop over the tree stats from all filled subdirs of the parent's directory + -- and aggregate them to the new_tree_* variables + FOR stats_row in SELECT st.* FROM metainfo.stats st + INNER JOIN lta.directory dir ON dir.id = st.dir_id + WHERE dir.parent_dir_id = parent_stats_row.dir_id LOOP + + -- aggregate + new_tree_num_files := new_tree_num_files + stats_row.tree_num_files; + new_tree_total_file_size := new_tree_total_file_size + stats_row.tree_total_file_size; + new_tree_min_file_size := LEAST(new_tree_min_file_size, stats_row.tree_min_file_size); + new_tree_max_file_size := GREATEST(new_tree_max_file_size, stats_row.tree_max_file_size); + new_tree_min_file_creation_date := LEAST(new_tree_min_file_creation_date, stats_row.tree_min_file_creation_date); + new_tree_max_file_creation_date := GREATEST(new_tree_max_file_creation_date, stats_row.tree_max_file_creation_date); + END LOOP; + + -- and update the parent stats row with the aggregated results + UPDATE metainfo.stats stats + SET (tree_num_files, tree_total_file_size, tree_min_file_size, tree_max_file_size, tree_min_file_creation_date, tree_max_file_creation_date) = + (new_tree_num_files, new_tree_total_file_size, new_tree_min_file_size, new_tree_max_file_size, new_tree_min_file_creation_date, new_tree_max_file_creation_date) + WHERE stats.dir_id = parent_stats_row.dir_id; + + -- climb the tree by selecting the parent's parent, and loop again. + SELECT st.* FROM metainfo.stats st + INNER JOIN lta.directory dir on dir.parent_dir_id = st.dir_id + WHERE dir.id = parent_stats_row.dir_id + LIMIT 1 + INTO parent_stats_row; + END LOOP; + + RETURN NEW; +END; +$BODY$ +LANGUAGE plpgsql VOLATILE +COST 100; + +CREATE TRIGGER trigger_on_stats_update_do_update_parents_tree_stats +AFTER UPDATE OF dir_num_files, dir_total_file_size, dir_min_file_size, dir_max_file_size, dir_min_file_creation_date, dir_max_file_creation_date +ON metainfo.stats +FOR EACH ROW +EXECUTE PROCEDURE metainfo.on_stats_update_do_update_parents_tree_stats(); + +-------------------------------------------------------------------------------- + +CREATE OR REPLACE FUNCTION lta.on_directory_inserted_parse_project_info() +RETURNS trigger AS +$BODY$ +DECLARE + project_pos int; + next_slash_pos int; + new_dir_name text; + dir_name_tail text; + new_project_name text; + new_project_id int; + project_dir_name text; + project_dir_id int; + obs_id int; + obs_dir_name text; +BEGIN + new_dir_name := trim(trailing '/' from NEW.name); + project_pos := strpos(new_dir_name, '/projects'); + + IF project_pos > 0 THEN + dir_name_tail := substring(new_dir_name from project_pos + 10); + IF length(dir_name_tail) > 0 THEN + next_slash_pos := strpos(dir_name_tail, '/'); + IF next_slash_pos > 0 THEN + new_project_name := substring(dir_name_tail from 0 for next_slash_pos); + ELSE + new_project_name := dir_name_tail; + END IF; + + IF length(new_project_name) > 0 THEN + --postgres < 9.5 way of doing INSERT...ON CONFLICT DO NOTHING + INSERT INTO metainfo.project(name) + (SELECT new_project_name WHERE NOT EXISTS (SELECT name FROM metainfo.project WHERE name = new_project_name)); + + SELECT id FROM metainfo.project WHERE name = new_project_name LIMIT 1 INTO new_project_id; + + IF new_project_id IS NOT NULL THEN + IF next_slash_pos > 0 THEN + project_dir_name := substring(new_dir_name from 0 for project_pos + 10 + next_slash_pos - 1); + ELSE + project_dir_name := new_dir_name; + END IF; + + IF project_dir_name = new_dir_name THEN + --postgres < 9.5 way of doing INSERT...ON CONFLICT DO NOTHING + INSERT INTO metainfo.project_top_level_directory(project_id, dir_id) + (SELECT new_project_id, NEW.id WHERE NOT EXISTS (SELECT ptld.project_id, ptld.dir_id FROM metainfo.project_top_level_directory ptld WHERE ptld.project_id = new_project_id AND ptld.dir_id = NEW.id)); + ELSE + dir_name_tail := substring(dir_name_tail from length(new_project_name)+2); + next_slash_pos := strpos(dir_name_tail, '/'); + IF next_slash_pos > 0 THEN + obs_dir_name := substring(dir_name_tail from 0 for next_slash_pos); + ELSE + obs_dir_name := dir_name_tail; + END IF; + BEGIN + obs_id := obs_dir_name::integer; + + --postgres < 9.5 way of doing INSERT...ON CONFLICT DO NOTHING + INSERT INTO metainfo.observation(id) + (SELECT obs_id WHERE NOT EXISTS (SELECT id FROM metainfo.observation WHERE id = obs_id)); + + --postgres < 9.5 way of doing INSERT...ON CONFLICT DO NOTHING + INSERT INTO metainfo.project_observation(project_id, observation_id) + (SELECT new_project_id, obs_id WHERE NOT EXISTS (SELECT project_id, observation_id FROM metainfo.project_observation WHERE project_id = new_project_id AND observation_id = obs_id)); + EXCEPTION WHEN invalid_text_representation THEN + END; + END IF; + END IF; + END IF; + END IF; + END IF; + + RETURN NEW; +END; +$BODY$ +LANGUAGE plpgsql VOLATILE +COST 100; + +CREATE TRIGGER trigger_on_directory_inserted_parse_project_info +AFTER INSERT +ON lta.directory +FOR EACH ROW +EXECUTE PROCEDURE lta.on_directory_inserted_parse_project_info(); + +-------------------------------------------------------------------------------- + + CREATE OR REPLACE FUNCTION lta.on_fileinfo_inserted_parse_observation_info() + RETURNS trigger AS + $BODY$ + DECLARE + new_file_name text; + L_pos int; + first_underscore_pos int; + first_dot_pos int; + obs_id int; + dataproduct_name text; + BEGIN + new_file_name := trim(leading '/' from NEW.name); + L_pos := strpos(new_file_name, 'L'); + first_underscore_pos := strpos(new_file_name, '_'); + IF L_pos > 0 AND first_underscore_pos > L_pos THEN + BEGIN + obs_id := substring(new_file_name from L_pos+1 for first_underscore_pos-2)::integer; + + --postgres < 9.5 way of doing INSERT...ON CONFLICT DO NOTHING + INSERT INTO metainfo.observation(id) + (SELECT obs_id WHERE NOT EXISTS (SELECT id FROM metainfo.observation WHERE id = obs_id)); + + first_dot_pos := strpos(new_file_name, '.'); + IF first_dot_pos > L_pos THEN + dataproduct_name := substring(new_file_name from L_pos for first_dot_pos-1); + + --postgres < 9.5 way of doing INSERT...ON CONFLICT DO NOTHING + INSERT INTO metainfo.dataproduct(fileinfo_id, observation_id, name) + (SELECT NEW.id, obs_id, dataproduct_name WHERE NOT EXISTS (SELECT fileinfo_id, observation_id, name FROM metainfo.dataproduct WHERE fileinfo_id = NEW.id AND observation_id = obs_id AND name = dataproduct_name)); + END IF; + + EXCEPTION WHEN invalid_text_representation THEN + END; + END IF; + RETURN NEW; + END; + $BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; + +CREATE TRIGGER trigger_on_fileinfo_inserted_parse_observation_info + AFTER INSERT + ON lta.fileinfo + FOR EACH ROW + EXECUTE PROCEDURE lta.on_fileinfo_inserted_parse_observation_info(); + +-- END TRIGGERS + + +-- BEGIN NORMAL FUNCTIONS + +--TODO: this method get_tree_stats is recursive (it calls itself), which is notoriously slow in sql. rewrite method to use WITH RECURSIVE statements, see https://www.postgresql.org/docs/9.3/static/queries-with.html +CREATE OR REPLACE FUNCTION metainfo.get_tree_stats(tree_root_dir_id integer, lower_ts timestamp without time zone DEFAULT NULL, upper_ts timestamp without time zone DEFAULT NULL, + OUT dir_id integer, OUT tree_num_files bigint, OUT tree_total_file_size bigint) +RETURNS record AS $$ +DECLARE + stats_row metainfo.stats%ROWTYPE; + dir_num_files bigint; + dir_total_file_size bigint; + subdirs_tree_num_files bigint; + subdirs_tree_total_file_size bigint; +BEGIN + -- we need to provide the requested tree_root_dir_id also as an output, so we can join on it + dir_id := tree_root_dir_id; + + -- check for valid lower_ts/upper_ts + IF lower_ts IS NULL THEN + lower_ts := '-infinity'; + END IF; + IF upper_ts IS NULL THEN + upper_ts := 'infinity'; + END IF; + + SELECT st.* FROM metainfo.stats st + WHERE st.dir_id = tree_root_dir_id + LIMIT 1 + INTO stats_row; + + -- directory has no tree stats. So return 0,0 + IF stats_row.tree_min_file_creation_date IS NULL OR stats_row.tree_max_file_creation_date IS NULL THEN + tree_num_files := 0; + tree_total_file_size := 0; + RETURN; + END IF; + + + -- the tree stats of this directory have no overlap at all for the requested timerange + IF (stats_row.tree_min_file_creation_date > upper_ts) OR (stats_row.tree_max_file_creation_date < lower_ts) THEN + tree_num_files := 0; + tree_total_file_size := 0; + RETURN; + END IF; + + -- the tree stats of this directory have full overlap the requested timerange + IF stats_row.tree_min_file_creation_date >= lower_ts AND stats_row.tree_max_file_creation_date <= upper_ts THEN + tree_num_files := stats_row.tree_num_files; + tree_total_file_size := stats_row.tree_total_file_size; + RETURN; + END IF; + + -- the tree stats of this directory have partial overlap the requested timerange + -- recurse into subdirectories, and accumulate subdir results + IF stats_row.tree_min_file_creation_date <= upper_ts OR stats_row.tree_max_file_creation_date >= lower_ts THEN + --sum all results from the subdirs which have at least partial overlap + SELECT SUM(gts.tree_num_files), SUM(gts.tree_total_file_size) + FROM lta.directory d + INNER JOIN metainfo.stats s ON s.dir_id = d.id + INNER JOIN metainfo.get_tree_stats(d.id, lower_ts, upper_ts) gts ON gts.dir_id = d.id + WHERE d.parent_dir_id = tree_root_dir_id + AND NOT (s.tree_min_file_creation_date > upper_ts OR s.tree_max_file_creation_date < lower_ts) + INTO subdirs_tree_num_files, subdirs_tree_total_file_size; + + IF subdirs_tree_num_files IS NULL THEN + subdirs_tree_num_files := 0; + END IF; + + IF subdirs_tree_total_file_size IS NULL THEN + subdirs_tree_total_file_size := 0; + END IF; + + -- and add the num_files and total_file_size in this dir... + IF stats_row.dir_num_files > 0 THEN + IF stats_row.dir_min_file_creation_date >= lower_ts AND stats_row.dir_max_file_creation_date <= upper_ts THEN + -- all files in this dir are in the requested time range + -- when 'all files'=0, that's ok, cause then dir_num_files and dir_total_file_size are 0 which is the answer we need + dir_num_files := stats_row.dir_num_files; + dir_total_file_size := stats_row.dir_total_file_size; + ELSE + -- some files in this dir are in the requested time range + -- make selection of files in this dir in the requested time range + SELECT COUNT(fi.id), SUM(fi.size) FROM lta.fileinfo fi + WHERE fi.dir_id = tree_root_dir_id + AND fi.creation_date >= lower_ts AND fi.creation_date <= upper_ts + INTO dir_num_files, dir_total_file_size; + END IF; + + IF dir_num_files IS NULL OR dir_num_files = 0 THEN + dir_total_file_size := 0; + END IF; + ELSE + dir_num_files := 0; + dir_total_file_size := 0; + END IF; + + tree_num_files := subdirs_tree_num_files + dir_num_files; + tree_total_file_size := subdirs_tree_total_file_size + dir_total_file_size; + + RETURN; + END IF; + + --this should never occur + RAISE EXCEPTION 'metainfo.get_tree_stats could not find no/partial/full overlap'; +END; +$$ LANGUAGE plpgsql; + + +--TODO: this method get_site_stats calls the recursive get_tree_stats methods, which needs a rewrite. After that, it is quite likely that this method also performs way faster. +CREATE OR REPLACE FUNCTION metainfo.get_site_stats(_site_id integer, lower_ts timestamp without time zone DEFAULT NULL::timestamp without time zone, upper_ts timestamp without time zone DEFAULT NULL::timestamp without time zone, + OUT tree_num_files bigint, OUT tree_total_file_size bigint) + RETURNS record AS $$ +BEGIN + SELECT SUM(gts.tree_num_files), SUM(gts.tree_total_file_size) + FROM lta.site_root_dir srd, metainfo.get_tree_stats(srd.root_dir_id, lower_ts, upper_ts) gts + WHERE srd.site_id = _site_id + INTO tree_num_files, tree_total_file_size; + + IF tree_num_files IS NULL THEN + tree_num_files := 0; + END IF; + + IF tree_total_file_size IS NULL THEN + tree_total_file_size := 0; + END IF; +END; +$$ LANGUAGE plpgsql; + + + +--TODO: see remarks at get_site_stats and get_tree_stats for optimizations. +CREATE OR REPLACE FUNCTION metainfo.get_site_quota_usage(_site_quota_id integer, OUT site_id integer, OUT site_name text, OUT quota bigint, OUT total_file_size bigint, OUT space_left bigint, OUT num_files bigint, OUT valid_until_date timestamp without time zone) + RETURNS record AS $$ +BEGIN + SELECT s.id, s.name, sq.quota, sq.valid_until_date + FROM lta.site_quota sq + JOIN lta.site s on s.id = sq.site_id + WHERE sq.id = _site_quota_id + LIMIT 1 + INTO site_id, site_name, quota, valid_until_date; + + SELECT gts.tree_total_file_size, gts.tree_num_files + FROM metainfo.get_site_stats(site_id, NULL, valid_until_date) gts + LIMIT 1 + INTO total_file_size, num_files; + + space_left := quota - total_file_size; +END; +$$ LANGUAGE plpgsql; + + + +-- END NORMAL FUNCTIONS + +-- +-- +-- -- VIEWS + +CREATE VIEW lta.site_root_directory as + select ss.id as site_id, ss.name as site_name, srd.root_dir_id, dir.name as dir_name + from lta.site_root_dir srd + join lta.directory dir on dir.id = srd.root_dir_id + join lta.site ss on ss.id = srd.site_id ; + +CREATE VIEW lta.site_quota_view as + select ss.id as site_id, ss.name as site_name, ssq.quota, ssq.valid_until_date + from lta.site ss + left join lta.site_quota ssq on ssq.site_id = ss.id; + +CREATE VIEW lta.site_quota_root_directory as + SELECT s.id AS site_id, s.name AS site_name, d.id AS dir_id, d.name AS dir_name + FROM lta.quota_root_dirs qrd + JOIN lta.site s ON s.id = qrd.site_id + JOIN lta.directory d ON d.id = qrd.root_dir_id; + +CREATE VIEW lta.site_directory_tree as + select rd.site_id as site_id, + rd.site_name as site_name, + rd.root_dir_id as root_dir_id, + rd.dir_name as root_dir_name, + dir.id as dir_id, + dir.name as dir_name, + dir.parent_dir_id as parent_dir_id, + dc.depth as depth + from lta.site_root_directory rd + inner join lta.directory_closure dc on dc.ancestor_id = rd.root_dir_id + inner join lta.directory dir on dc.descendant_id = dir.id; + +CREATE VIEW scraper.site_scraper_last_directory_visit as + select rd.site_id as site_id, + rd.site_name as site_name, + dir.id as dir_id, + dir.name as dir_name, + sldv.visit_date as last_visit + from lta.site_root_directory rd + inner join lta.directory_closure dc on dc.ancestor_id = rd.root_dir_id + inner join lta.directory dir on dc.descendant_id = dir.id + inner join scraper.last_directory_visit sldv on sldv.dir_id = dir.id ; + +CREATE VIEW lta.site_directory_file as + select site.id as site_id, + site.name as site_name, + dir.id as dir_id, + dir.name as dir_name, + fileinfo.id as file_id, + fileinfo.name as file_name, + fileinfo.size as file_size, + fileinfo.creation_date as file_creation_date + from lta.site site + join lta.site_root_dir srd on srd.site_id = site.id + inner join lta.directory_closure dc on dc.ancestor_id = srd.root_dir_id + inner join lta.directory dir on dc.descendant_id = dir.id + inner join lta.fileinfo on fileinfo.dir_id = dir.id ; + +CREATE VIEW metainfo.project_directory as + select + project.id as project_id, + project.name as project_name, + dir.id as dir_id, + dir.name as dir_name + from metainfo.project_top_level_directory ptld + inner join metainfo.project on project.id = ptld.project_id + inner join lta.directory_closure dc on dc.ancestor_id = ptld.dir_id + inner join lta.directory dir on dc.descendant_id = dir.id ; + +CREATE VIEW metainfo.site_directory_stats as + select sdt.site_id, + sdt.site_name, + sdt.dir_id, + sdt.dir_name, + st.dir_num_files, + st.dir_total_file_size, + st.dir_min_file_size, + st.dir_max_file_size, + st.dir_min_file_creation_date, + st.dir_max_file_creation_date, + st.tree_num_files, + st.tree_total_file_size, + st.tree_min_file_size, + st.tree_max_file_size, + st.tree_min_file_creation_date, + st.tree_max_file_creation_date + from lta.site_directory_tree sdt + left join metainfo.stats st on st.dir_id = sdt.dir_id; + +CREATE OR REPLACE VIEW metainfo.project_directory_stats AS + SELECT pd.project_id, pd.project_name, sds.* + FROM metainfo.project_directory pd + JOIN metainfo.site_directory_stats sds ON sds.dir_id = pd.dir_id; + +CREATE VIEW metainfo.observation_dataproduct_file as + SELECT sdf.site_id, sdf.site_name, dp.observation_id, dp.id as dataproduct_id, dp.name as dataproduct_name, sdf.dir_id, sdf.dir_name, sdf.file_id, sdf.file_name, sdf.file_size, sdf.file_creation_date + FROM metainfo.dataproduct dp + JOIN lta.site_directory_file sdf ON sdf.file_id = dp.fileinfo_id; + +CREATE VIEW metainfo.project_observation_dataproduct as + SELECT p.id AS project_id, + p.name AS project_name, + dp.observation_id, + dp.id AS dataproduct_id, + dp.name AS dataproduct_name, + dp.fileinfo_id AS fileinfo_id + FROM metainfo.dataproduct dp + INNER JOIN metainfo.project_observation po ON po.observation_id = dp.observation_id + INNER JOIN metainfo.project p ON p.id = po.project_id; + +CREATE VIEW metainfo.dataproduct_all as + SELECT pod.*, sdf.* + FROM metainfo.project_observation_dataproduct pod + INNER JOIN lta.site_directory_file sdf on sdf.file_id = pod.fileinfo_id; + +CREATE VIEW metainfo.site_project_stats as + select ptld.project_id, p.name as project_name, site_id, site_name, sds.dir_id, sds.dir_name, tree_num_files, tree_total_file_size, tree_min_file_creation_date, tree_max_file_creation_date + from metainfo.project_top_level_directory ptld + inner join metainfo.project p on p.id = ptld.project_id + inner join metainfo.site_directory_stats sds on sds.dir_id = ptld.dir_id + where tree_num_files IS NOT NULL; + +CREATE VIEW metainfo.project_stats AS + SELECT project_id, project_name, COUNT(site_id) num_sites, SUM(tree_num_files) total_num_files, SUM(tree_total_file_size) total_file_size, MIN(tree_min_file_creation_date) min_file_creation_date, MAX(tree_max_file_creation_date) max_file_creation_date + FROM metainfo.site_project_stats + group by project_id, project_name; + +CREATE VIEW metainfo.site_project_observation_dataproduct_dir_file AS + SELECT sdf.site_id, sdf.site_name, pod.project_id, pod.project_name, pod.observation_id, pod.dataproduct_id, pod.dataproduct_name, sdf.dir_id, sdf.dir_name, sdf.file_id, sdf.file_name, sdf.file_size, sdf.file_creation_date + FROM metainfo.project_observation_dataproduct pod + JOIN lta.site_directory_file sdf ON sdf.file_id = pod.fileinfo_id; + +CREATE VIEW metainfo.site_root_dir_tree_stats AS + SELECT srd.site_id, srd.site_name, srd.root_dir_id as root_dir_id, srd.dir_name as root_dir_name, + sds.tree_num_files, sds.tree_total_file_size, sds.tree_min_file_size, sds.tree_max_file_size, sds.tree_min_file_creation_date, sds.tree_max_file_creation_date + FROM lta.site_root_directory srd + INNER JOIN metainfo.site_directory_stats sds ON sds.dir_id = srd.root_dir_id; + +CREATE VIEW metainfo.site_stats as + SELECT site_id, site_name, SUM(tree_num_files) total_num_files, SUM(tree_total_file_size) total_file_size, MIN(tree_min_file_size) min_file_size, MAX(tree_max_file_size) max_file_size, MIN(tree_min_file_creation_date) min_file_creation_date, MAX(tree_max_file_creation_date) max_file_creation_date + from metainfo.site_root_dir_tree_stats + group by site_id, site_name; + +CREATE VIEW metainfo.site_quota_usage AS + select gsqu.* + from lta.site_quota sq + join metainfo.get_site_quota_usage(sq.id) gsqu on gsqu.site_id = sq.site_id; + +CREATE OR REPLACE VIEW metainfo.site_quota_root_dir_stats AS + SELECT sds.site_id, sds.site_name, sds.dir_id, sds.dir_name, sds.tree_num_files, sds.tree_total_file_size + FROM lta.quota_root_dirs qrd + INNER JOIN metainfo.site_directory_stats sds on sds.dir_id = qrd.root_dir_id; + + +-- END VIEWS + +COMMIT; diff --git a/LTA/ltastorageoverview/lib/report.py b/LTA/ltastorageoverview/lib/report.py index b77f0a764bd5387f72933fb9b6a10997606b3e8a..168ee9833dcfbd065b1498748c6f5b4f9fb16274 100755 --- a/LTA/ltastorageoverview/lib/report.py +++ b/LTA/ltastorageoverview/lib/report.py @@ -25,59 +25,55 @@ from datetime import datetime, timedelta import sys import os import os.path -from ltastorageoverview import store +from lofar.lta.ltastorageoverview import store from lofar.common.util import humanreadablesize from lofar.common.datetimeutils import monthRanges -def main(argv): - dbpath = argv[0] if argv else 'ltastorageoverview.sqlite' - print 'Report for ' + dbpath +logger = logging.getLogger() - db = store.LTAStorageDb(dbpath) +def main(): + from optparse import OptionParser + from lofar.common import dbcredentials + + # Check the invocation arguments + parser = OptionParser("%prog [options]", description='runs the lta scraper and stores results in the speficied database.') + parser.add_option('-V', '--verbose', dest='verbose', action='store_true', help='verbose logging') + parser.add_option_group(dbcredentials.options_group(parser)) + parser.set_defaults(dbcredentials="LTASO") + (options, args) = parser.parse_args() + + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', + level=logging.DEBUG if options.verbose else logging.INFO) + + dbcreds = dbcredentials.parse_options(options) + + logger.info("Using dbcreds: %s" % dbcreds.stringWithHiddenPassword()) + + db = store.LTAStorageDb(dbcreds, options.verbose) sites = db.sites() - numFilesTotal = sum([db.numFilesInSite(s[0]) for s in sites]) - totalFileSize = sum([db.totalFileSizeInSite(s[0]) for s in sites]) + numFilesTotal = sum([db.numFilesInSite(s['id']) for s in sites]) + totalFileSize = sum([db.totalFileSizeInSite(s['id']) for s in sites]) print '\n*** TOTALS *** #files=%s total_size=%s' % (humanreadablesize(numFilesTotal, ''), humanreadablesize(totalFileSize)) for site in sites: - numFilesInSite = db.numFilesInSite(site[0]) - totalFileSizeInSite = db.totalFileSizeInSite(site[0]) + numFilesInSite = db.numFilesInSite(site['id']) + totalFileSizeInSite = db.totalFileSizeInSite(site['id']) - print '\n--- %s --- #files=%s total_size=%s' % (site[1], + print '\n--- %s --- #files=%s total_size=%s' % (site['name'], humanreadablesize(numFilesInSite, ''), humanreadablesize(totalFileSizeInSite)) - root_dirs = db.rootDirectoriesForSite(site[0]) + root_dirs = db.rootDirectoriesForSite(site['id']) for root_dir in root_dirs: - numFilesInTree = db.numFilesInTree(root_dir[0]) - totalFileSizeInTree = db.totalFileSizeInTree(root_dir[0]) - - print " %s #files=%d total_size=%s" % (root_dir[1], numFilesInTree, humanreadablesize(totalFileSizeInTree)) - - subdirs = db.subDirectories(root_dir[0], 1, False) - subdirs = sorted(subdirs, key=lambda x: x[1]) - - for subdir in subdirs: - numFilesInTree = db.numFilesInTree(subdir[0]) - totalFileSizeInTree = db.totalFileSizeInTree(subdir[0]) - - print " %s #files=%d total_size=%s" % (subdir[1], numFilesInTree, humanreadablesize(totalFileSizeInTree)) - - if subdir[1].endswith('projects/'): - projectsSubDirs = db.subDirectories(subdir[0], 1, False) - projectsSubDirs = sorted(projectsSubDirs, key=lambda x: x[1]) - - for projectsSubDir in projectsSubDirs: - numFilesInTree = db.numFilesInTree(projectsSubDir[0]) - totalFileSizeInTree = db.totalFileSizeInTree(projectsSubDir[0]) - - print " %s #files=%d total_size=%s" % (projectsSubDir[1], numFilesInTree, humanreadablesize(totalFileSizeInTree)) + numFilesInTree = db.numFilesInTree(root_dir['root_dir_id']) + totalFileSizeInTree = db.totalFileSizeInTree(root_dir['root_dir_id']) + print " %s #files=%d total_size=%s" % (root_dir['dir_name'], numFilesInTree, humanreadablesize(totalFileSizeInTree)) utcnow = datetime.utcnow() monthbegin = datetime(utcnow.year, utcnow.month, 1) @@ -85,36 +81,36 @@ def main(argv): print '\n\n*** CHANGES THIS MONTH %s ***' % monthbegin.strftime('%Y/%m') for site in sites: - root_dirs = db.rootDirectoriesForSite(site[0]) + root_dirs = db.rootDirectoriesForSite(site['id']) - numChangedFilesInSite = db.numFilesInSite(site[0], + numChangedFilesInSite = db.numFilesInSite(site['id'], monthbegin, monthend) if numChangedFilesInSite == 0: - print '\n--- %s --- None' % (site[1],) + print '\n--- %s --- None' % (site['name'],) continue - totalChangedFileSizeInSite = db.totalFileSizeInSite(site[0], + totalChangedFileSizeInSite = db.totalFileSizeInSite(site['id'], monthbegin, monthend) - print '\n--- %s --- #files=%d total_size=%s' % (site[1], + print '\n--- %s --- #files=%d total_size=%s' % (site['name'], numChangedFilesInSite, humanreadablesize(totalChangedFileSizeInSite)) for root_dir in root_dirs: - changedFiles = db.filesInTree(root_dir[0], monthbegin, monthend) + changedFiles = db.filesInTree(root_dir['dir_id'], monthbegin, monthend) if len(changedFiles) > 0: - numFilesInTree = db.numFilesInTree(root_dir[0], + numFilesInTree = db.numFilesInTree(root_dir['dir_id'], monthbegin, monthend) - totalFileSizeInTree = db.totalFileSizeInTree(root_dir[0], + totalFileSizeInTree = db.totalFileSizeInTree(root_dir['dir_id'], monthbegin, monthend) - print " %s #files=%d total_size=%s" % (root_dir[1], + print " %s #files=%d total_size=%s" % (root_dir['dir_name'], numFilesInTree, humanreadablesize(totalFileSizeInTree)) @@ -133,18 +129,19 @@ def main(argv): print '\n\n*** CHANGES PER MONTH ***' min_date, max_date = db.datetimeRangeOfFilesInTree() - month_ranges = monthRanges(min_date, max_date) + if min_date and max_date: + month_ranges = monthRanges(min_date, max_date) - for site in sites: - print '\n--- %s ---' % site[1] + for site in sites: + print '\n--- %s ---' % site['name'] - for month_range in month_ranges: - numFilesInSite = db.numFilesInSite(site[0], month_range[0], month_range[1]) - totalFileSizeInSite = db.totalFileSizeInSite(site[0], month_range[0], month_range[1]) + for month_range in month_ranges: + numFilesInSite = db.numFilesInSite(site['id'], month_range[0], month_range[1]) + totalFileSizeInSite = db.totalFileSizeInSite(site['id'], month_range[0], month_range[1]) - print " %s %s %s #files=%d total_size=%s" % (site[1], month_range[0], month_range[1], numFilesInSite, humanreadablesize(totalFileSizeInSite)) + print " %s %s %s #files=%d total_size=%s" % (site['name'], month_range[0], month_range[1], numFilesInSite, humanreadablesize(totalFileSizeInSite)) if __name__ == "__main__": - main(sys.argv[1:]) + main() diff --git a/LTA/ltastorageoverview/lib/scraper.py b/LTA/ltastorageoverview/lib/scraper.py index 5b9cbe2bdc343a6eea8632dac02c5a71d41abb72..96c2a39e6a4405500c9a6a84b3e5000f6d3947e5 100755 --- a/LTA/ltastorageoverview/lib/scraper.py +++ b/LTA/ltastorageoverview/lib/scraper.py @@ -28,18 +28,19 @@ import logging import time import datetime import sys +import socket import os import os.path import threading import multiprocessing -from ltastorageoverview import store -from ltastorageoverview.utils import humanreadablesize -from random import random +from lofar.lta.ltastorageoverview import store +from lofar.common.util import humanreadablesize +from random import random, randint -#logging.basicConfig(filename='scraper.' + time.strftime("%Y-%m-%d") + '.log', level=logging.DEBUG, format="%(asctime)-15s %(levelname)s %(message)s") -logging.basicConfig(level=logging.DEBUG, format="%(asctime)-15s %(levelname)s %(message)s") logger = logging.getLogger() +VISIT_INTERVAL = datetime.timedelta(days=3) +LEXAR_HOST = 'ingest@lexar004.offline.lofar' class FileInfo: '''Simple struct to hold filename and size''' @@ -58,6 +59,7 @@ class FileInfo: return self.filename + " " + humanreadablesize(self.size) + " " + str(self.created_at) class SrmlsException(Exception): + '''Exception which is raised when an srmls command failes''' def __init__(self, command, exitcode, stdout, stderr): self.command = command self.exitcode = exitcode @@ -69,6 +71,7 @@ class SrmlsException(Exception): (self.command, self.exitcode, self.stdout, self.stderr) class ParseException(Exception): + '''Exception which is raised when parsing srmls results fails''' def __init__(self, message): self.message = message @@ -87,8 +90,14 @@ class Location: directory : int a directory at the storage site. for example: /pnfs/grid.sara.nl/data/lofar/storage ''' - self.srmurl = srmurl - self.directory = directory + self.srmurl = srmurl.rstrip('/') + self.directory = directory.rstrip('/') if len(directory) > 1 else directory + + if not self.srmurl.startswith('srm://'): + raise ValueError('malformed srm url: %s' % (self.srmurl,)) + + if not self.directory.startswith('/'): + raise ValueError('malformed directory path: "%s". should start with a /' % (self.directory,)) def path(self): '''returns the full path srmurl + directory''' @@ -121,12 +130,17 @@ class Location: foundFiles = [] foundDirectories = [] - logger.info("Scanning %s", self.path()) + logger.info("Scanning %s with offset=%s", self.path(), offset) # the core command: do an srmls call and parse the results # srmls can only yield max 900 items in a result, hence we can recurse for the next 900 by using the offset - cmd = ["bash", "-c", "source %s;srmls -l -count=900 -offset=%d %s%s" % ('/globalhome/ingest/service/bin/init.sh', offset, self.srmurl, self.directory)] - # logger.debug(' '.join(cmd)) + cmd = ['ssh', '-tt', '-n', '-x', '-q', LEXAR_HOST, "bash", "-c", + "\'source /globalhome/ingest/.grid/.ingest_profile; srmls -l -count=900 -offset=%d %s%s\'" % ( + offset, + self.srmurl, + self.directory) ] + + logger.debug(' '.join(cmd)) p = subprocess.Popen(cmd, stdin=open('/dev/null'), stdout=subprocess.PIPE, stderr=subprocess.PIPE) logs = p.communicate() # logger.debug('Shell command for %s exited with code %s' % (self.path(), p.returncode)) @@ -164,7 +178,7 @@ class Location: raise ParseException("Could not parse dirname from line: %s\nloglines:\n%s" % (pathLineItems[1], logs[0])) - foundDirectories.append(Location(self.srmurl, dirname)) + foundDirectories.append(Location(self.srmurl, dirname.rstrip('/'))) elif entryType.lower() == 'file': try: filesize = int(pathLineItems[0]) @@ -172,7 +186,7 @@ class Location: timestamplines = [x for x in lines if 'ed at:' in x] timestampline = None for line in timestamplines: - if 'created' in line: + if 'created' in line and '1970' not in line: timestampline = line break timestampline = line @@ -230,133 +244,279 @@ class LocationResult: return sum([fileinfo.size for fileinfo in self.files]) -# our lock for safe guarding locations and results -# which will be queried in parallel -lock = threading.Lock() - class ResultGetterThread(threading.Thread): '''Helper class to query Locations asynchronously for results. Gets the result for the first Location in the locations deque and appends it to the results deque Appends the subdirectory Locations at the end of the locations deque for later processing''' - def __init__(self, db, dir_id): + def __init__(self, dbcreds, dir_id, log_queries=False): threading.Thread.__init__(self) self.daemon = True - self.db = db + self.dbcreds = dbcreds + self.log_queries = log_queries self.dir_id = dir_id def run(self): '''A single location is pop\'ed from the locations deque and the results are queried. Resulting subdirectories are appended to the locations deque''' try: - with lock: - dir = self.db.directory(self.dir_id) + with store.LTAStorageDb(self.dbcreds, self.log_queries) as db: + dir = db.directory(self.dir_id) if not dir: return - dir_id = dir[0] - dir_name = dir[1] - self.db.updateDirectoryLastVisitTime(dir_id, datetime.datetime.utcnow()) + dir_id = dir['dir_id'] + dir_name = dir['dir_name'] - site_id = dir[2] - site = self.db.site(site_id) - srm_url = site[2] + site_id = dir['site_id'] + site = db.site(site_id) + srm_url = site['url'] location = Location(srm_url, dir_name) try: + def rescheduleVisit(): + for i in range(5): + try: + with store.LTAStorageDb(self.dbcreds, self.log_queries) as db: + logger.info('Rescheduling %s for new visit.' % (location.path(),)) + db.updateDirectoryLastVisitTime(self.dir_id, datetime.datetime.utcnow() - VISIT_INTERVAL + datetime.timedelta(mins=1)) + break + except: + time.sleep(1) + + # get results... long blocking result = location.getResult() logger.info(result) - with lock: - self.db.insertFileInfos([(file.filename, file.size, file.created_at, dir_id) for file in result.files]) + with store.LTAStorageDb(self.dbcreds, self.log_queries) as db: + # convert the result.files list into a dict + #with (filename, dir_id) as key and a tuple with all file info as value + result_file_tuple_dict = {} + for file in result.files: + filename = file.filename.split('/')[-1] + key = (filename, dir_id) + file_tuple = (filename, int(file.size), file.created_at, dir_id) + result_file_tuple_dict[key] = file_tuple + + # create a dict of all already known files from the db + known_file_dict = {} + for file in db.filesInDirectory(dir_id): + key = (str(file['name']), dir_id) + known_file_dict[key] = file + + # now compare the result and known (filename, dir_id) sets + # and find out which a new, and which are known. + # compare only by (filename, dir_id) because for a given file the size and/or date might have changed, + # but that does not make it a new/unique file. + result_file_key_set = set(result_file_tuple_dict.keys()) + known_file_key_set = set(known_file_dict.keys()) + new_file_key_set = result_file_key_set - known_file_key_set + removed_file_key_set = known_file_key_set - result_file_key_set + + logger.info("%s %s: %d out of %d files are new, and %d are already known", site['name'], + dir_name, + len(new_file_key_set), + len(result_file_key_set), + len(known_file_key_set)) + + if new_file_key_set: + new_file_tuple_set = [result_file_tuple_dict[key] for key in new_file_key_set] + file_ids = db.insertFileInfos(new_file_tuple_set) + + if len(file_ids) != len(new_file_tuple_set): + rescheduleVisit() + + if known_file_key_set: + for key, known_file in known_file_dict.items(): + if key in result_file_tuple_dict: + result_file_tuple = result_file_tuple_dict[key] + + known_size = int(known_file['size']) + + result_size = result_file_tuple[1] + + if known_size != result_size: + logger.info("%s %s: updating %s (id=%d) size from %d to %d", + site['name'], dir_name, known_file['name'], known_file['id'], + known_size, result_size) + db.updateFileInfoSize(known_file['id'], result_size) + + if removed_file_key_set: + for removed_file_key in removed_file_key_set: + db.deleteFileInfoFromDirectory(removed_file_key[0], removed_file_key[1]) # skip empty nikhef dirs filteredSubDirectories = [loc for loc in result.subDirectories - if not ('nikhef' in loc.srmurl and 'generated' in loc.directory) ] + if not ('nikhef' in loc.srmurl and 'generated' in loc.directory) ] - # filteredSubDirectories = [loc for loc in filteredSubDirectories - # if not 'lc3_007' in loc.directory ] + # skip sksp spectroscopy project + filteredSubDirectories = [loc for loc in filteredSubDirectories + if not ('sara' in loc.srmurl and 'sksp' in loc.directory and 'spectro' in loc.directory) ] subDirectoryNames = [loc.directory for loc in filteredSubDirectories] if subDirectoryNames: - self.db.insertSubDirectories(subDirectoryNames, dir_id, - datetime.datetime.utcnow() - datetime.timedelta(days=1000)) + #check for already known subdirectories in the db + known_subDirectoryNames_set = set(subdir['name'] for subdir in db.subDirectories(dir_id)) + + new_subdir_name_set = set(subDirectoryNames) - known_subDirectoryNames_set; + + logger.info("%s %s: %d out of %d subdirs are new, and %d are already known", site['name'], dir_name, len(new_subdir_name_set), len(subDirectoryNames), len(known_subDirectoryNames_set)) + + if new_subdir_name_set: + subdir_ids = db.insertSubDirectories(new_subdir_name_set, dir_id) + + if len(subdir_ids) != len(new_subdir_name_set): + rescheduleVisit() except (SrmlsException, ParseException) as e: logger.error('Error while scanning %s\n%s' % (location.path(), str(e))) - logger.info('Rescheduling %s for new visit.' % (location.path(),)) - self.db.updateDirectoryLastVisitTime(self.dir_id, datetime.datetime.utcnow() - datetime.timedelta(days=1000)) + if 'does not exist' in str(e): + with store.LTAStorageDb(self.dbcreds, self.log_queries) as db: + db.deleteDirectory(self.dir_id) + else: + rescheduleVisit() except Exception as e: logger.error(str(e)) - logger.info('Rescheduling dir_id %d for new visit.' % (self.dir_id,)) - self.db.updateDirectoryLastVisitTime(self.dir_id, datetime.datetime.utcnow() - datetime.timedelta(days=1000)) - -def main(argv): - '''the main function scanning all locations and gathering the results''' - - db = store.LTAStorageDb('/data2/ltastorageoverview.sqlite') + with store.LTAStorageDb(self.dbcreds, self.log_queries) as db: + logger.info('Rescheduling dir_id %d for new visit.' % (self.dir_id,)) + db.updateDirectoryLastVisitTime(self.dir_id, datetime.datetime.utcnow() - VISIT_INTERVAL) +def populateDbWithLTASitesAndRootDirs(db): + """ + Helper method to fill empty database with (hardcoded) information about our LTA partners/sites/quotas + """ if not db.sites(): - db.insertSite('target', 'srm://srm.target.rug.nl:8444') - db.insertSite('nikhef', 'srm://tbn18.nikhef.nl:8446') - db.insertSite('sara', 'srm://srm.grid.sara.nl:8443') - db.insertSite('juelich', 'srm://lofar-srm.fz-juelich.de:8443') - - db.insertRootDirectory('target', '/lofar/ops') - db.insertRootDirectory('target', '/lofar/ops/disk') - db.insertRootDirectory('nikhef', '/dpm/nikhef.nl/home/lofar') - db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/ops') - db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/user') + #db.insertSite('nikhef', 'srm://tbn18.nikhef.nl:8446') + sara_id = db.insertSiteIfNotExists('sara', 'srm://srm.grid.sara.nl:8443') + juelich_id = db.insertSiteIfNotExists('juelich', 'srm://lofar-srm.fz-juelich.de:8443') + poznan_id = db.insertSiteIfNotExists('poznan', 'srm://lta-head.lofar.psnc.pl:8443') + + # insert the LTA site root dir(s) db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/software') + db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/ops') db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/storage') + db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/eor') db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/pulsar') + db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/cosmics') + db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/surveys') + db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/user') + db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/proc') + db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/trans') + db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/lotest') db.insertRootDirectory('juelich', '/pnfs/fz-juelich.de/data/lofar/ops') + db.insertRootDirectory('poznan', '/lofar/ops') + #db.insertRootDirectory('nikhef', '/dpm/nikhef.nl/home/lofar') + + def end_of_year(year): + '''little helper function which returns a datetime timestamp for the end of the given year''' + return datetime.datetime(year, 12, 31, 23, 59, 59) + + # insert quota as given by our LTA partners + db.insertSiteQuota(sara_id, 5e15, end_of_year(2012)) + db.insertSiteQuota(sara_id, 8e15, end_of_year(2013)) + db.insertSiteQuota(sara_id, 11e15, end_of_year(2014)) + db.insertSiteQuota(sara_id, 14e15, end_of_year(2015)) + db.insertSiteQuota(sara_id, 17e15, end_of_year(2016)) + db.insertSiteQuota(sara_id, 20e15, end_of_year(2017)) + db.insertSiteQuota(sara_id, 23e15, end_of_year(2018)) + + db.insertSiteQuota(juelich_id, 2.5e15, end_of_year(2013)) + db.insertSiteQuota(juelich_id, 4.5e15, end_of_year(2014)) + db.insertSiteQuota(juelich_id, 6.5e15, end_of_year(2015)) + db.insertSiteQuota(juelich_id, 8.5e15, end_of_year(2016)) + db.insertSiteQuota(juelich_id, 10.5e15, end_of_year(2017)) + db.insertSiteQuota(juelich_id, 12.5e15, end_of_year(2018)) + + db.insertSiteQuota(poznan_id, 0.5e15, end_of_year(2016)) + db.insertSiteQuota(poznan_id, 3.5e15, end_of_year(2017)) + db.insertSiteQuota(poznan_id, 5.5e15, end_of_year(2018)) + + +def main(): + '''the main function scanning all locations and gathering the results''' - for dir_id in [x[0] for x in db.rootDirectories()]: - db.updateDirectoryLastVisitTime(dir_id, datetime.datetime.utcnow() - datetime.timedelta(days=1000)) + from optparse import OptionParser + from lofar.common import dbcredentials + from lofar.messaging import setQpidLogLevel + from lofar.lta.ltastorageoverview.ingesteventhandler import IngestEventHandler + from lofar.lta.ltastorageoverview.ingesteventhandler import DEFAULT_BROKER + from lofar.lta.ltastorageoverview.ingesteventhandler import DEFAULT_INGEST_NOTIFICATION_QUEUE + from lofar.lta.ltastorageoverview.ingesteventhandler import DEFAULT_INGEST_NOTIFICATION_SUBJECTS + + # Check the invocation arguments + parser = OptionParser("%prog [options]", description='runs the lta scraper and stores results in the speficied database.') + parser.add_option('-j', '--parallel', dest='parallel', type='int', default=8, help='number of parallel srmls jobs to run, default: %default') + + parser.add_option('-q', '--broker', dest='broker', type='string', default=DEFAULT_BROKER, + help='Address of the qpid broker, default: %default') + parser.add_option('--ingest_notification_busname', dest='ingest_notification_busname', type='string', + default=DEFAULT_INGEST_NOTIFICATION_QUEUE, + help='Name of the notification bus exchange on the qpid broker on which the ingest notifications are published, default: %default') + parser.add_option('--ingest_notification_subjects', dest='ingest_notification_subjects', type='string', + default=DEFAULT_INGEST_NOTIFICATION_SUBJECTS, + help='Subject(s) to listen for on the ingest notification bus exchange on the qpid broker, default: %default') + + parser.add_option('-V', '--verbose', dest='verbose', action='store_true', help='verbose logging') + parser.add_option('-Q', '--log-queries', dest='log_queries', action='store_true', help='log all pqsl queries') + parser.add_option_group(dbcredentials.options_group(parser)) + parser.set_defaults(dbcredentials="LTASO") + (options, args) = parser.parse_args() + + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', + level=logging.DEBUG if options.verbose else logging.INFO) + setQpidLogLevel(logging.INFO) + options.parallel = max(1, min(8*multiprocessing.cpu_count(), options.parallel)) + logger.info("Using maximum number of parallel srmls jobs: %d" % options.parallel) + + dbcreds = dbcredentials.parse_options(options) + logger.info("Using dbcreds: %s" % dbcreds.stringWithHiddenPassword()) + + db = store.LTAStorageDb(dbcreds, options.log_queries) + populateDbWithLTASitesAndRootDirs(db) # for each site we want one or more ResultGetterThreads # so make a dict with a list per site based on the locations - getters = dict([(site[1],[]) for site in db.sites()]) + getters = dict([(site['name'],[]) for site in db.sites()]) # some helper functions def numLocationsInQueues(): '''returns the total number of locations in the queues''' - return db.numDirectoriesNotVisitedSince(datetime.datetime.utcnow() - datetime.timedelta(days=1)) + return db.numDirectoriesNotVisitedSince(datetime.datetime.utcnow() - VISIT_INTERVAL) def totalNumGetters(): '''returns the total number of parallel running ResultGetterThreads''' return sum([len(v) for v in getters.values()]) - # only enter main loop if there is anything to process - if numLocationsInQueues() > 0: + def cleanupFinishedGetters(): + # get rid of old finished ResultGetterThreads + finishedGetters = dict([(site_name, [getter for getter in getterList if not getter.isAlive()]) for site_name, getterList in getters.items()]) + for site_name,finishedGetterList in finishedGetters.items(): + for finishedGetter in finishedGetterList: + getters[site_name].remove(finishedGetter) - # the main loop - # loop over the locations and spawn ResultGetterThreads to get the results parallel - # use load balancing over the different sites and with respect to queue lengths - # do not overload this host system - while numLocationsInQueues() > 0 or totalNumGetters() > 0: - # get rid of old finished ResultGetterThreads - finishedGetters = dict([(site_name, [getter for getter in getterList if not getter.isAlive()]) for site_name, getterList in getters.items()]) - for site_name,finishedGetterList in finishedGetters.items(): - for finishedGetter in finishedGetterList: - getters[site_name].remove(finishedGetter) + # the main loop + # loop over the locations and spawn ResultGetterThreads to get the results parallel + # use load balancing over the different sites and with respect to queue lengths + # do not overload this host system + with IngestEventHandler(dbcreds=dbcreds, busname=options.ingest_notification_busname, + subjects=options.ingest_notification_subjects, broker=options.broker): + while True: + + cleanupFinishedGetters() # spawn new ResultGetterThreads # do not overload this host system - while numLocationsInQueues() > 0 and (totalNumGetters() <= 4 or - (os.getloadavg()[0] < 3*multiprocessing.cpu_count() and - totalNumGetters() < 2.5*multiprocessing.cpu_count())): - - with lock: - sitesStats = db.visitStats(datetime.datetime.utcnow() - datetime.timedelta(days=1)) + while (numLocationsInQueues() > 0 and + totalNumGetters() < options.parallel and + os.getloadavg()[0] < 4*multiprocessing.cpu_count()): + sitesStats = db.visitStats(datetime.datetime.utcnow() - VISIT_INTERVAL) for site_name, site_stats in sitesStats.items(): numGetters = len(getters[site_name]) @@ -367,9 +527,9 @@ def main(argv): site_stats['# get'] = numGetters site_stats['weight'] = weight - totalWeight = sum([site_stats['weight'] for site_stats in sitesStats.values()]) + totalWeight = max(1.0, sum([site_stats['weight'] for site_stats in sitesStats.values()])) - #logger.debug("siteStats:\n%s" % str('\n'.join([str((k, v)) for k, v in sitesStats.items()]))) + logger.debug("siteStats:\n%s" % str('\n'.join([str((k, v)) for k, v in sitesStats.items()]))) # now pick a random site using the weights chosen_site_name = None @@ -387,24 +547,29 @@ def main(argv): break chosen_dir_id = sitesStats[chosen_site_name]['least_recent_visited_dir_id'] + db.updateDirectoryLastVisitTime(chosen_dir_id, datetime.datetime.utcnow()) + + logger.debug("chosen_site_name: %s chosen_dir_id: %s", chosen_site_name, chosen_dir_id) # make and start a new ResultGetterThread the location deque of the chosen site - newGetter = ResultGetterThread(db, chosen_dir_id) + newGetter = ResultGetterThread(dbcreds, chosen_dir_id, options.log_queries) newGetter.start() getters[chosen_site_name].append(newGetter) - logger.info('numLocationsInQueues=%d totalNumGetters=%d' % (numLocationsInQueues(), totalNumGetters())) + cleanupFinishedGetters() - # small sleep between starting multiple getters - time.sleep(0.25) + logger.info('numLocationsInQueues=%d totalNumGetters=%d siteQueueLengths: %s load_5min: %.1f' % (numLocationsInQueues(), + totalNumGetters(), + ' '.join(['%s:%d' % (name, stats['queue_length']) for name, stats in sitesStats.items()]), + os.getloadavg()[0])) # sleep before main loop next iteration # to wait for some results - # and some getters to finis - time.sleep(1) + # and some getters to finish + time.sleep(5 if numLocationsInQueues() <= options.parallel else 0.25) # all locations were processed if __name__ == "__main__": - main(sys.argv[1:]) + main() diff --git a/LTA/ltastorageoverview/lib/store.py b/LTA/ltastorageoverview/lib/store.py index 7c53ef89fdc43dca2d87df6d5e54a9e4283d4c59..ea1c7ee88bbd555fe15ca9172728b20697fd78c1 100644 --- a/LTA/ltastorageoverview/lib/store.py +++ b/LTA/ltastorageoverview/lib/store.py @@ -22,393 +22,525 @@ # TODO: add comment to methods # TODO: reuse connection in methods (take care of exceptions closing the connection) # TODO: use generators and yield for faster and more memory efficient processing of results. -# TODO: use other database? MariaDB? instead of sqlite? -import os -import os.path -import sqlite3 import datetime +import logging +from lofar.common import dbcredentials +from lofar.common.postgres import PostgresDatabaseConnection +from lofar.common.postgres import FETCH_NONE,FETCH_ONE,FETCH_ALL + +logger = logging.getLogger(__name__) class EntryNotFoundException(Exception): pass -class LTAStorageDb: - def __init__(self, db_filename, removeIfExisting = False): - self.db_filename = db_filename - - if os.path.exists(self.db_filename) and removeIfExisting: - os.remove(self.db_filename) - if not os.path.exists(self.db_filename): - with sqlite3.connect(self.db_filename) as conn: - create_script_path = os.path.join(os.path.dirname(__file__), 'create_db_ltastorageoverview.sql') +class LTAStorageDb(PostgresDatabaseConnection): + """LTAStorageDb is a python API to the ltaso postgres database.""" - with open(create_script_path) as script: - conn.executescript(script.read()) - - # save created tables and triggers - conn.commit() + def __init__(self, dbcreds=None, log_queries=True): + """Create an instance of a LTAStorageDb + :param dbcredentials.DBCredentials dbcreds: the credential for logging in into the db + :param bool log_queries: do or don't log all queries + """ + super(LTAStorageDb, self).__init__(host=dbcreds.host, + database=dbcreds.database, + username=dbcreds.user, + password=dbcreds.password, + port=dbcreds.port, + log_queries=log_queries) def insertSite(self, siteName, srmurl): - with sqlite3.connect(self.db_filename) as conn: - cursor = conn.cursor() - - site_row = cursor.execute('select id from storage_site where url = ?', [srmurl]).fetchone() - site_id = site_row[0] if site_row else cursor.execute('insert into storage_site (name, url) values (?, ?)', (siteName, srmurl)).lastrowid - - conn.commit() - - return site_id + """insert a site into the database + :param string siteName: the name of the site + :param string srmurls: the srm url to that site + :return int: the new id of the inserted site + """ + site_id = self.executeQuery('insert into lta.site (name, url) values (%s, %s) returning id;', (siteName, srmurl), fetch=FETCH_ONE)['id'] + self.commit() + return site_id + + def insertSiteIfNotExists(self, siteName, srmurl): + """insert a site into the database and return the id. + If the site already exists, then the id of that site is just returned. + :param string siteName: the name of the site + :param string srmurls: the srm url to that site + :return int: the new id of the inserted site + """ + site = self.siteByName(siteName) + + if site: + return site['id'] + + return self.insertSite(siteName, srmurl) def insertRootDirectory(self, siteName, rootDirectory): - with sqlite3.connect(self.db_filename) as conn: - cursor = conn.cursor() - - site_row = cursor.execute('select id from storage_site where name = ?', [siteName]).fetchone() - - if not site_row: - raise EntryNotFoundException() - - site_id = site_row[0] - - dir_id = cursor.execute('insert into directory (name) values (?)', [rootDirectory]).lastrowid - - cursor.execute('insert into storage_site_root (storage_site_id, directory_id) values (?, ?)', (site_id, dir_id)).lastrowid - - conn.commit() - - return dir_id - - def insertRootLocation(self, siteName, srmurl, rootDirectory): - with sqlite3.connect(self.db_filename) as conn: - cursor = conn.cursor() - - site_row = cursor.execute('select id from storage_site where url = ?', [srmurl]).fetchone() - site_id = site_row[0] if site_row else cursor.execute('insert into storage_site (name, url) values (?, ?)', (siteName, srmurl)).lastrowid - - dir_id = cursor.execute('insert into directory (name) values (?)', [rootDirectory]).lastrowid - - cursor.execute('insert into storage_site_root (storage_site_id, directory_id) values (?, ?)', (site_id, dir_id)).lastrowid - - conn.commit() - - return dir_id - - def insertSubDirectory(self, parent_directory_id, sub_directory): - with sqlite3.connect(self.db_filename) as conn: - cursor = conn.cursor() - - dir_id = cursor.execute('insert into directory (name, parent_directory_id) values (?, ?)', (sub_directory, parent_directory_id)).lastrowid - - conn.commit() - - return dir_id - - def insertSubDirectories(self, subDirectoryNames, parentDirId, directoryLastVisitTime = None): - with sqlite3.connect(self.db_filename) as conn: - cursor = conn.cursor() - - cursor.executemany('insert into directory (name, parent_directory_id) values (?, ?)', - [(name, parentDirId) for name in subDirectoryNames]) - - if directoryLastVisitTime: - subDirIds = cursor.execute('''select id from directory - where parent_directory_id = %s - and name in (%s)''' % (parentDirId, ', '.join(["'%s'" % x for x in subDirectoryNames]))).fetchall() + """ + Insert a root directory for a site. Each site has at least one root directory (with no parent). + For all non-root directories, use insertSubDirectory. + Beware: Uniqueness of the root dir for a site is not enforced. + :param string siteName: the name of the site (should already be in the database) + :param string rootDirectory: the full path of the directory + :return integer: the new id of the inserted root directory + """ + site = self.siteByName(siteName) + + if not site: + raise EntryNotFoundException() + + site_id = site['id'] + + dir_id = self.executeQuery('insert into lta.directory (name) values (%s) returning id;', [rootDirectory], fetch=FETCH_ONE)['id'] + + self.executeQuery('insert into lta.site_root_dir (site_id, root_dir_id) values (%s, %s);', (site_id, dir_id)) + self.commit() + return dir_id + + def insertSubDirectory(self, sub_directory_path, parent_dir_id): + """ + Insert a sub directory which is a child of the directory with parent_dir_id + :param int parent_dir_id: the id of this subdirectories parent + :param string sub_directory_path: the full path of the subdirectory + :return integer: the new id of the inserted subdirectory + """ + result = self.executeQuery('insert into lta.directory (name, parent_dir_id) values (%s, %s) returning id;', (sub_directory_path, parent_dir_id), fetch=FETCH_ONE) + + if result and 'id' in result: + self.commit() + return result['id'] + + return None + + def insertSubDirectories(self, subDirectoryPaths, parentDirId, directoryLastVisitTime = None): + """ + Insert multiple sub directories which are all a child of the directory with parent_dir_id + :param int parent_dir_id: the id of this subdirectories parent + :param [string] subDirectoryPaths: a list of full paths of the subdirectories + :return [integer]: a list of new ids of the inserted subdirectories + """ + with self._connection.cursor() as cursor: + insert_values = ','.join(cursor.mogrify('(%s, %s)', (name, parentDirId)) for name in subDirectoryPaths) + + query = '''insert into lta.directory (name, parent_dir_id) + VALUES {values} + RETURNING id;'''.format(values=insert_values) + + subDirIds = [x['id'] for x in self.executeQuery(query, fetch=FETCH_ALL)] + + if [x for x in subDirIds if x < 0]: + logger.error("One or more subDirectoryPaths could not be inserted. Rolling back.") + self.rollback() + return None + + if directoryLastVisitTime: + with self._connection.cursor() as cursor: + insert_values = ','.join(cursor.mogrify('(%s, %s)', (directoryLastVisitTime, id)) for id in subDirIds) + + query = '''insert into scraper.last_directory_visit (visit_date, dir_id) + VALUES {values} + RETURNING id;'''.format(values=insert_values) + + ldvIds = [x['id'] for x in self.executeQuery(query, fetch=FETCH_ALL)] + + if [x for x in ldvIds if x < 0]: + logger.error("One or more scraper.last_directory_visit's could not be inserted. Rolling back.") + self.rollback() + return None + + self.commit() + return subDirIds + + def insert_missing_directory_tree_if_needed(self, dir_path, site_id): + """Insert all directories in the dir_path tree which are not in the database yet. + example: root_dir = '/path/to/root' + known_subdir_1 = '/path/to/root/sub1' + known_subdir_2 = '/path/to/root/sub1/sub2' + (input) dir_path = '/path/to/root/sub1/sub2/sub3/sub4' + would insert '/path/to/root/sub1/sub2/sub3 under known_subdir_2 and + '/path/to/root/sub1/sub2/sub3/sub4' under the new sub3 dir. + :param str dir_path: a full path to a (sub)directory + :param int site_id: the id of the site for which you want to insert the dir tree. + :return: a dict of the inserted directories with their new dir id's. + """ + # for this site (which might have multiple root dirs), find the root_dir under which this dir_path belongs + parent_root_dir = self.get_root_dir_for_dir_path(dir_path, site_id) + + if parent_root_dir is None: + raise LookupError("Could not find parent root dir for site_id=%d for dir_path=%s" % (site_id, dir_path)) + + # find the lowest known dir in the database + # and get the list of missing subdirs for dir_path, which are not in the database ye + missing_child_dirs, lowest_known_db_dir = self._get_lowest_known_directory(dir_path, parent_root_dir) + + # now we should have a known parent dir from the db, and we know which child dirs are missing. + # append the missing children in reverse order + # (from just under the known parent, down to the lowest missing subdir). + result = {} + missing_childs_parent_dir_id = lowest_known_db_dir['dir_id'] + for missing_child_dir in reversed(missing_child_dirs): + missing_childs_parent_dir_id = self.insertSubDirectory(missing_child_dir, missing_childs_parent_dir_id) + result[missing_child_dir] = missing_childs_parent_dir_id + + # return the dict of inserted child dirs with their new dir id's + return result + + def deleteDirectory(self, dir_id, commit=True): + """ + delete the directory with id dir_id. Cascacades and also deletes all subdirs, files and stats under this directory. + :param int dir_id: the id of the directory to be deleted + :param bool commit: optional, commit directly when True + """ + self.executeQuery('DELETE FROM lta.directory where id = %s;', (dir_id,), fetch=FETCH_NONE) + + if commit: + self.commit() + + + def insertFileInfo(self, name, size, creation_date, parent_dir_id, commit=True): + fileinfo_id = self.executeQuery('insert into lta.fileinfo (name, size, creation_date, dir_id) values (%s, %s, %s, %s) returning id;', + (name.split('/')[-1], size, creation_date, parent_dir_id)) + + if commit: + self.commit() + return fileinfo_id - subDirIds = [x[0] for x in subDirIds] - - for subDirId in subDirIds: - cursor.execute('''insert into scraper_last_directory_visit (visit_date, directory_id) - values (?, ?)''', (directoryLastVisitTime, subDirId)) - - conn.commit() - - def insertFileInfo(self, name, size, creation_date, parent_directory_id): - with sqlite3.connect(self.db_filename) as conn: - cursor = conn.cursor() - - fileinfo_id = cursor.execute('insert into fileinfo (name, size, creation_date, directory_id) values (?, ?, ?, ?)', - (name.split('/')[-1], size, creation_date, parent_directory_id)) - - conn.commit() + def insertFileInfos(self, file_infos): + with self._connection.cursor() as cursor: + insert_values = [cursor.mogrify('(%s, %s, %s, %s)', (f[0].split('/')[-1], f[1], f[2], f[3])) for f in file_infos] - return fileinfo_id + insert_values = ','.join([x for x in insert_values]) - def insertFileInfos(self, file_infos): - with sqlite3.connect(self.db_filename) as conn: - conn.executemany('insert into fileinfo (name, size, creation_date, directory_id) values (?, ?, ?, ?)', - [(f[0].split('/')[-1], f[1], f[2], f[3]) for f in file_infos]) + query = '''insert into lta.fileinfo (name, size, creation_date, dir_id) + VALUES {values} + RETURNING id;'''.format(values=insert_values) - conn.commit() + ids = [x['id'] for x in self.executeQuery(query, fetch=FETCH_ALL)] - def insertLocationResult(self, result): - with sqlite3.connect(self.db_filename) as conn: - cursor = conn.cursor() + if [x for x in ids if x < 0]: + logger.error("One or more file_infos could not be inserted. Rolling back.") + self.rollback() + return None - dir_row = cursor.execute('''select directory.id from storage_site - join storage_site_root on storage_site_root.storage_site_id = storage_site.id - join directory on directory.id = storage_site_root.directory_id - where storage_site.url = ? - and directory.name = ? - ''', (result.location.srmurl, result.location.directory)).fetchone() + self.commit() + return ids - if dir_row: - dir_id = dir_row[0] - cursor.executemany('insert into directory (name, parent_directory_id) values (?, ?)', - [(subDir.directory, dir_id) for subDir in result.subDirectories]) + def updateFileInfoSize(self, id, size, commit=True): + fileinfo_id = self.executeQuery('''update lta.fileinfo set size=%s where id=%s;''', (size, id)) - cursor.executemany('insert into fileinfo (name, size, creation_date, directory_id) values (?, ?, ?, ?)', - [(file.filename.split('/')[-1], file.size, datetime.datetime.utcnow(), dir_id) for file in result.files]) + if commit: + self.commit() - conn.commit() + def deleteFileInfoFromDirectory(self, file_name, dir_id, commit=True): + self.executeQuery('DELETE FROM lta.fileinfo where dir_id = %s and name = %s;', (dir_id,file_name), fetch=FETCH_NONE) - def updateDirectoryLastVisitTime(self, directory_id, timestamp): - with sqlite3.connect(self.db_filename) as conn: - cursor = conn.cursor() + if commit: + self.commit() - updated = cursor.execute('''update or ignore scraper_last_directory_visit - set visit_date=? - where directory_id = ?''', (timestamp, directory_id)).rowcount + def updateDirectoryLastVisitTime(self, dir_id, timestamp, commit=True): + self.executeQuery('''update scraper.last_directory_visit + set visit_date=%s + where dir_id = %s;''', (timestamp, dir_id), fetch=FETCH_NONE) - if not updated: - cursor.execute('''insert into scraper_last_directory_visit - (visit_date, directory_id) - values (?, ?)''', (timestamp, directory_id)) + if commit: + self.commit() - conn.commit() + def directoryLastVisitTime(self, dir_id): + """ + get the timestamp when the directory was last visited. + :param int dir_id: the id of the directory + :return datetime: the timestamp when the directory was last visited. + """ + result = self.executeQuery('''select visit_date FROM scraper.last_directory_visit + where dir_id = %s;''', (dir_id,), fetch=FETCH_ONE) + if result is None: + return None + return result.get('visit_date') def sites(self): '''returns list of tuples (id, name, url) of all sites''' - with sqlite3.connect(self.db_filename) as conn: - return conn.execute('''SELECT id, name, url FROM storage_site''').fetchall() + return self.executeQuery('SELECT id, name, url FROM lta.site;', fetch=FETCH_ALL) def site(self, site_id): '''returns tuple (id, name, url) for site with id=site_id''' - with sqlite3.connect(self.db_filename) as conn: - return conn.execute('''SELECT id, name, url FROM storage_site where id = ?''', [site_id]).fetchone() - - def directory(self, directory_id): - '''returns directory tuple (id, name, site_id, site_name) for the given directory_id''' - with sqlite3.connect(self.db_filename) as conn: - return conn.execute('''SELECT dir.id, dir.name, site.id, site.name - FROM storage_site_root - join storage_site site on site.id = storage_site_root.storage_site_id - join directory_closure dc on dc.ancestor_id = storage_site_root.directory_id - join directory dir on dir.id = dc.descendant_id - where dc.descendant_id = ? - ''', [directory_id]).fetchone() - - def directory_id(self, site_id, directory_name): - '''returns directory id for the given site_id, directory_name''' - with sqlite3.connect(self.db_filename) as conn: - result = conn.execute('''SELECT dir.id - FROM storage_site_root - join directory_closure dc on dc.ancestor_id = storage_site_root.directory_id - join directory dir on dir.id = dc.descendant_id - where storage_site_root.storage_site_id = ? - and dir.name = ? - ''', [site_id, directory_name]).fetchone() - - if result: - return result[0] - - return -1 + return self.executeQuery('SELECT id, name, url FROM lta.site where id = %s;', [site_id], FETCH_ONE) + + def siteByName(self, site_name): + '''returns tuple (id, name, url) for site with id=site_id''' + return self.executeQuery('SELECT id, name, url FROM lta.site where name = %s;', [site_name], FETCH_ONE) + + def siteQuota(self, site_id): + '''returns list of quota tuples (site_id, site_name, quota, valid_until_date)''' + return self.executeQuery('SELECT * FROM lta.site_quota;', FETCH_All) + + def insertSiteQuota(self, site_id, quota, valid_until_date, commit=True): + """ + insert the quota for a given site with a date until which this quota is valid. + :param int site_id: the id of the site for which you want to set the quota. + :param int quota: the quota in number of bytes. + :param datetime valid_until_date: the timestamp until which this given quota is valid. + :param bool commit: do/don't commit immediately. + :return: the id of the new quota + """ + id = self.executeQuery('INSERT INTO lta.site_quota(site_id, quota, valid_until_date) values (%s, %s, %s) RETURNING id;', + (site_id, quota, valid_until_date)) + if commit: + self.commit() + return id + + '''returns list of quota tuples (site_id, site_name, quota, valid_until_date)''' + return self.executeQuery('SELECT * FROM lta.site_quota;', FETCH_All) + + def directory(self, dir_id): + '''returns lta.directory (id, name, site_id, site_name) for the given dir_id''' + return self.executeQuery('''SELECT dir.id as dir_id, dir.name as dir_name, site.id as site_id, site.name as site_name + FROM lta.site_root_dir + join lta.site site on site.id = site_root_dir.site_id + join lta.directory_closure dc on dc.ancestor_id = site_root_dir.root_dir_id + join lta.directory dir on dir.id = dc.descendant_id + where dc.descendant_id = %s; + ''', [dir_id], fetch=FETCH_ONE) + + def directoryByName(self, dir_name, site_id=None): + """ + returns lta.directory (id, name, site_id, site_name) for the given dir_name + :param string dir_name: the directory to search for + :param int site_id: optional site_id to limit the search for this given site. + :return: + """ + query = '''SELECT dir.id as dir_id, dir.name as dir_name, site.id as site_id, site.name as site_name + FROM lta.site_root_dir + join lta.site site on site.id = site_root_dir.site_id + join lta.directory_closure dc on dc.ancestor_id = site_root_dir.root_dir_id + join lta.directory dir on dir.id = dc.descendant_id + where dir.name = %s''' + args = [dir_name] + if site_id is not None: + query += " and site.id = %s" + args.append(site_id) + + return self.executeQuery(query, args, fetch=FETCH_ONE) + + def dir_id(self, site_id, directory_name): + '''returns lta.directory id for the given site_id, directory_name''' + result = self.executeQuery('''SELECT dir.id + FROM lta.site_root_dir + join lta.directory_closure dc on dc.ancestor_id = site_root_dir.root_dir_id + join lta.directory dir on dir.id = dc.descendant_id + where site_root_dir.site_id = %s + and dir.name = %s;''', [site_id, directory_name], fetch=FETCH_ONE) + + if result['id']: + return result['id'] + + return -1 def rootDirectories(self): - '''returns list of all root directories (id, name, site_id, site_name) for all sites''' - with sqlite3.connect(self.db_filename) as conn: - return conn.execute(''' - SELECT * - FROM root_directories - ''').fetchall() + '''returns list of all root directories for all sites''' + return self.executeQuery('''SELECT * FROM lta.site_root_directory;''', fetch=FETCH_ALL) def rootDirectoriesForSite(self, site_id): '''returns list of all root directories (id, name) for given site_id''' - with sqlite3.connect(self.db_filename) as conn: - return conn.execute('''SELECT dir_id, dir_name - FROM root_directories - where site_id = ?''', [site_id]).fetchall() - - def subDirectories(self, directory_id, depth = 1, includeSelf=False): - '''returns list of all sub directories up to the given depth (id, name, site_id, site_name, depth) for the given directory_id''' - with sqlite3.connect(self.db_filename) as conn: - return conn.execute(''' - SELECT dir.id, dir.name, dir.parent_directory_id, directory_closure.depth FROM directory_closure - join directory dir on dir.id = directory_closure.descendant_id - where ancestor_id = ? and depth <= ? and depth > ? - order by depth asc - ''', (directory_id, depth, -1 if includeSelf else 0)).fetchall() - - def parentDirectories(self, directory_id): - with sqlite3.connect(self.db_filename) as conn: - return conn.execute(''' - SELECT dir.* FROM directory_closure dc - join directory dir on dir.id = dc.ancestor_id - where dc.descendant_id = ? and depth > 0 - order by depth desc - ''', [directory_id]).fetchall() + return self.executeQuery('''SELECT * FROM lta.site_root_directory where site_id = %s;''', [site_id], fetch=FETCH_ALL) + + def rootDirectory(self, root_dir_id): + '''returns the root directory for the given root_dir_id''' + return self.executeQuery('''SELECT * FROM lta.site_root_directory WHERE root_dir_id = %s;''', + [root_dir_id], fetch=FETCH_ONE) + + def get_root_dir_for_dir_path(self, dir_path, site_id): + """ + find the root_dir under which this dir_path at the given site_id belongs + :param str dir_path: a full path to a (sub)directory + :param int site_id: the id of the site which contains the root dir under which the dir_path resides. + :return: the dict for the root directory under which the given dir_path resides. + """ + root_dirs = self.rootDirectoriesForSite(site_id) + return next((rd for rd in root_dirs if dir_path.startswith(rd['dir_name'])), None) + + def subDirectories(self, dir_id, depth = 1, includeSelf=False): + '''returns list of all sub directories up to the given depth (id, name, parent_dir_id, depth) for the given dir_id''' + if depth == 1 and not includeSelf: + return self.executeQuery(''' + SELECT dir.id as id, dir.name as name, dir.parent_dir_id as parent_dir_id + FROM lta.directory dir + where dir.parent_dir_id = %s; + ''', (dir_id, ), fetch=FETCH_ALL) + return self.executeQuery(''' + SELECT dir.id as id, dir.name as name, dir.parent_dir_id as parent_dir_id, lta.directory_closure.depth as depth + FROM lta.directory_closure + join lta.directory dir on dir.id = lta.directory_closure.descendant_id + where ancestor_id = %s and depth <= %s and depth > %s + order by depth asc; + ''', (dir_id, depth, -1 if includeSelf else 0), fetch=FETCH_ALL) + + def parentDirectories(self, dir_id): + return self.executeQuery(''' + SELECT dir.* FROM lta.directory_closure dc + join lta.directory dir on dir.id = dc.ancestor_id + where dc.descendant_id = %s and depth > 0 + order by depth desc; + ''', [dir_id], fetch=FETCH_ALL) + + def _get_lowest_known_directory(self, dir_path, parent_root_dir): + """ + given the dir_path, find try to find the lowest known dir which is a subdir under the given parent_root_dir + example: root_dir = '/path/to/root' + known_subdir_1 = '/path/to/root/sub1' + known_subdir_2 = '/path/to/root/sub1/sub2' + (input) dir_path = '/path/to/root/sub1/sub2/sub3/sub4' + would return (['/path/to/root/sub1/sub2/sub3/sub4', '/path/to/root/sub1/sub2/sub3'], <dict_for_known_subdir_2>) + :param str dir_path: a full directory path (which should start with the same path as the parent root dir) + :param dict parent_root_dir: a self.rootDirectory() result dict the supposed parent root dir + :return: a tuple (list, dict) where the list is the list of missing full subdir paths, and the dict is the + lowest known subdir, or None if not found. + """ + site_id = parent_root_dir['site_id'] + missing_child_dirs = [] + + # search for dir_path in the database... it might already be known + climbing_dir_path = dir_path + db_dir = self.directoryByName(climbing_dir_path, site_id) + # if climbing_dir_path is not known, then walk up one dir, and repeat until at top. + while db_dir is None and parent_root_dir['dir_name'] != climbing_dir_path: + # climb up one dir, add lowest subdir as missing child + path_parts = climbing_dir_path.split('/') + missing_child_dirs.append(climbing_dir_path) + climbing_dir_path = '/'.join(path_parts[:-1]) + db_dir = self.directoryByName(climbing_dir_path, site_id) + + # return the list of missing_child_dirs (which might be empty) + # and the found lowest found db_dir (which might be None) + return missing_child_dirs, db_dir def _date_bounded(self, query, args, table_column, from_date=None, to_date=None): result_query = query result_args = args if from_date: - result_query += ' and %s >= ?' % table_column + result_query += ' and {column} >= %s'.format(column=table_column) result_args += (from_date,) if to_date: - result_query += ' and %s <= ?' % table_column + result_query += ' and {column} <= %s'.format(column=table_column) result_args += (to_date,) return result_query, result_args - def filesInDirectory(self, directory_id, from_date=None, to_date=None): - with sqlite3.connect(self.db_filename) as conn: - query = '''SELECT * FROM fileinfo - where directory_id = ?''' + def filesInDirectory(self, dir_id, from_date=None, to_date=None): + query = '''SELECT * FROM lta.fileinfo + where dir_id = %s''' - args = (directory_id,) + args = (dir_id,) - query, args = self._date_bounded(query, args, 'fileinfo.creation_date', from_date, to_date) + query, args = self._date_bounded(query, args, 'fileinfo.creation_date', from_date, to_date) - return conn.execute(query, args).fetchall() + return self.executeQuery(query, args, fetch=FETCH_ALL) - def numFilesInDirectory(self, directory_id, from_date=None, to_date=None): - with sqlite3.connect(self.db_filename) as conn: - query = '''SELECT count(id) FROM fileinfo - where directory_id = ?''' + def numFilesInDirectory(self, dir_id, from_date=None, to_date=None): + query = '''SELECT count(id) FROM lta.fileinfo + where dir_id = %s''' - args = (directory_id,) + args = (dir_id,) - query, args = self._date_bounded(query, args, 'fileinfo.creation_date', from_date, to_date) + query, args = self._date_bounded(query, args, 'fileinfo.creation_date', from_date, to_date) - result = conn.execute(query, args).fetchone() + result = self.executeQuery(query, args, fetch=FETCH_ONE) - if result: - return result[0] + if result['count']: + return result['count'] - return 0 + return 0 - def filesInTree(self, base_directory_id, from_date=None, to_date=None): - with sqlite3.connect(self.db_filename) as conn: - query = '''SELECT dir.id, dir.name, dc.depth, fileinfo.id, fileinfo.name, fileinfo.size, fileinfo.creation_date FROM directory_closure dc - join directory dir on dir.id = dc.descendant_id - join fileinfo on fileinfo.directory_id = dc.descendant_id - where dc.ancestor_id = ?''' + def directoryTreeStats(self, dir_id): + query = '''SELECT * FROM metainfo.stats WHERE dir_id = %s''' + args = (dir_id,) - args = (base_directory_id,) + return self.executeQuery(query, args, fetch=FETCH_ONE) - query, args = self._date_bounded(query, args, 'fileinfo.creation_date', from_date, to_date) + def filesInTree(self, base_dir_id, from_date=None, to_date=None): + query = '''SELECT dir.id as dir_id, dir.name as dir_name, dc.depth as dir_depth, fi.id as file_id, fi.name as file_name, fi.size as file_size, fi.creation_date as file_creation_date + FROM lta.directory_closure dc + JOIN lta.directory dir on dir.id = dc.descendant_id + JOIN lta.fileinfo fi on fi.dir_id = dc.descendant_id + WHERE dc.ancestor_id = %s''' - return conn.execute(query, args).fetchall() + args = (base_dir_id,) - def numFilesInTree(self, base_directory_id, from_date=None, to_date=None): - with sqlite3.connect(self.db_filename) as conn: - query = ''' - SELECT sum(directory_stats.num_files) FROM directory_stats - join directory_closure dc on dc.descendant_id = directory_stats.directory_id - where ancestor_id = ? - ''' + query, args = self._date_bounded(query, args, 'fi.creation_date', from_date, to_date) - args = (base_directory_id,) + return self.executeQuery(query, args, fetch=FETCH_ALL) - query, args = self._date_bounded(query, args, 'directory_stats.min_file_creation_date', from_date=from_date) - query, args = self._date_bounded(query, args, 'directory_stats.max_file_creation_date', to_date=to_date) + def totalFileSizeAndNumFilesInSite(self, site_id, from_date=None, to_date=None): + query = '''SELECT * FROM metainfo.get_site_stats(%s, %s, %s)''' + args = (site_id, from_date, to_date) - result = conn.execute(query, args).fetchone() + return self.executeQuery(query, args, fetch=FETCH_ONE) - if result[0]: - return result[0] + def totalFileSizeAndNumFilesInTree(self, base_dir_id, from_date=None, to_date=None): + query = '''SELECT * FROM metainfo.get_tree_stats(%s, %s, %s)''' + args = (base_dir_id, from_date, to_date) - return 0 + return self.executeQuery(query, args, fetch=FETCH_ONE) - def totalFileSizeInTree(self, base_directory_id, from_date=None, to_date=None): - with sqlite3.connect(self.db_filename) as conn: - query = ''' - SELECT sum(directory_stats.total_file_size) FROM directory_stats - join directory_closure dc on dc.descendant_id = directory_stats.directory_id - where ancestor_id = ? - ''' - args = (base_directory_id,) + def totalFileSizeInTree(self, base_dir_id, from_date=None, to_date=None): + return self.totalFileSizeAndNumFilesInTree(base_dir_id, from_date, to_date)['tree_total_file_size'] - query, args = self._date_bounded(query, args, 'directory_stats.min_file_creation_date', from_date=from_date) - query, args = self._date_bounded(query, args, 'directory_stats.max_file_creation_date', to_date=to_date) - - result = conn.execute(query, args).fetchone() - - if result[0]: - return result[0] - return 0 + def numFilesInTree(self, base_dir_id, from_date=None, to_date=None): + return self.totalFileSizeAndNumFilesInTree(base_dir_id, from_date, to_date)['tree_num_files'] def numFilesInSite(self, site_id, from_date=None, to_date=None): - num_files = 0L - - root_dirs = self.rootDirectoriesForSite(site_id) - - for root_dir in root_dirs: - num_files += long(self.numFilesInTree(root_dir[0], from_date, to_date)) - - return num_files + return self.totalFileSizeAndNumFilesInSite(site_id, from_date, to_date)['tree_num_files'] def totalFileSizeInSite(self, site_id, from_date=None, to_date=None): - total_size = 0L - - root_dirs = self.rootDirectoriesForSite(site_id) - - for root_dir in root_dirs: - total_size += long(self.totalFileSizeInTree(root_dir[0], from_date, to_date)) - - return total_size + return self.totalFileSizeAndNumFilesInSite(site_id, from_date, to_date)['tree_total_file_size'] - def datetimeRangeOfFilesInTree(self, base_directory_id = None): - with sqlite3.connect(self.db_filename) as conn: - query = ''' - SELECT min(fileinfo.creation_date) as min_creation_date, - max(fileinfo.creation_date) as max_creation_date - FROM fileinfo - ''' - args = [] + def datetimeRangeOfFilesInTree(self, base_dir_id = None): + query = '''SELECT min(fileinfo.creation_date) as min_creation_date, + max(fileinfo.creation_date) as max_creation_date + FROM lta.fileinfo + LIMIT 1''' + args = None - if base_directory_id: - query += '''\njoin directory_closure dc on dc.descendant_id = fileinfo.directory_id - where ancestor_id = ?''' - args.append(base_directory_id) + if base_dir_id: + query += '''\njoin lta.directory_closure dc on dc.descendant_id = lta.fileinfo.dir_id + where ancestor_id = %s''' + args = [base_dir_id] - result = conn.execute(query, args).fetchone() + result = self.executeQuery(query, args, fetch=FETCH_ONE) - if result[0]: - format = '%Y-%m-%d %H:%M:%S %Z' - return (datetime.datetime.strptime(result[0]+' UTC', format), - datetime.datetime.strptime(result[1]+' UTC', format)) + if result: + return (result['min_creation_date'], result['max_creation_date']) - utcnow = datetime.datetime.utcnow() - return (utcnow, utcnow) + utcnow = datetime.datetime.utcnow() + return (utcnow, utcnow) def mostRecentVisitDate(self): - with sqlite3.connect(self.db_filename) as conn: - result = conn.execute(''' - SELECT visit_date FROM scraper_last_directory_visit - order by visit_date desc - limit 1 - ''').fetchone() + result = self.executeQuery(''' + SELECT visit_date FROM scraper.last_directory_visit + order by visit_date desc + limit 1 + ''', fetch=FETCH_ONE) - if result: - format = '%Y-%m-%d %H:%M:%S.%f %Z' - return datetime.datetime.strptime(result[0]+' UTC', format) + if result: + return result['visit_date'] - return datetime.datetime(2011, 1, 1) + return datetime.datetime(2011, 1, 1) def numDirectoriesNotVisitedSince(self, timestamp): - with sqlite3.connect(self.db_filename) as conn: - result = conn.execute(''' - SELECT count(directory_id) FROM scraper_last_directory_visit - WHERE visit_date < ? - ''', [timestamp]).fetchone() + result = self.executeQuery(''' + SELECT count(dir_id) FROM scraper.last_directory_visit + WHERE visit_date < %s + ''', [timestamp], fetch=FETCH_ONE) - if result: - return result[0] + if result: + return result['count'] - return 0 + return 0 + + def siteQuotaUsages(self): + return self.executeQuery('''SELECT * FROM metainfo.site_quota_usage;''', fetch=FETCH_ALL) + + def siteQuotaRootDirStats(self): + return self.executeQuery('''SELECT * FROM metainfo.site_quota_root_dir_stats;''', fetch=FETCH_ALL) def visitStats(self, before_timestamp = None): if not before_timestamp: @@ -417,24 +549,32 @@ class LTAStorageDb: sites = self.sites() siteStats = {} - with sqlite3.connect(self.db_filename) as conn: - - for site in sites: - site_id = site[0] - site_name = site[1] - siteStats[site_name] = {'id': site_id} + for site in sites: + site_id = site['id'] + site_name = site['name'] + siteStats[site_name] = {'site_id': site_id} - visits = conn.execute(''' - select * - from site_scraper_last_directoy_visit - where site_id = ? - and last_visit < ? - order by last_visit asc - ''', [site_id, before_timestamp]).fetchall() + visits = self.executeQuery(''' + select * + from scraper.site_scraper_last_directory_visit + where site_id = %s + and last_visit < %s + order by last_visit asc + ''', [site_id, before_timestamp], fetch=FETCH_ALL) - siteStats[site_name]['queue_length'] = len(visits) - if len(visits) > 0: - siteStats[site_name]['least_recent_visited_dir_id'] = visits[0][2] - siteStats[site_name]['least_recent_visit'] = visits[0][4] + siteStats[site_name]['queue_length'] = len(visits) + if len(visits) > 0: + siteStats[site_name]['least_recent_visited_dir_id'] = visits[0]['dir_id'] + siteStats[site_name]['least_recent_visit'] = visits[0]['last_visit'] return siteStats + + + +if __name__ == '__main__': + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', + level=logging.INFO) + dbcreds = dbcredentials.DBCredentials().get('LTASO') + with LTAStorageDb(dbcreds, True) as db: + print db.rootDirectoriesForSite(1) + print db.dir_id(1, 'rootDir_0') diff --git a/LTA/ltastorageoverview/lib/webservice/templates/index.html b/LTA/ltastorageoverview/lib/webservice/templates/index.html index 174bac4088bbe1ae53232b36944cc822f6995dc1..0ee88d2638dda72fd041e1fc82d7b821c86ed0a2 100644 --- a/LTA/ltastorageoverview/lib/webservice/templates/index.html +++ b/LTA/ltastorageoverview/lib/webservice/templates/index.html @@ -5,10 +5,21 @@ <head> <meta http-equiv="Content-Type" content="text/html; charset=utf-8"/> <title>{{title}}</title> - + <meta http-equiv="refresh" content="300"> <script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/2.1.3/jquery.min.js"></script> <script type="text/javascript" src="http://code.highcharts.com/highcharts.js"></script> <script type="text/javascript" src="http://code.highcharts.com/modules/exporting.js"></script> + <style> + table, th, td { + border: 1px solid black; + border-collapse: collapse; + text-align: right; + font-size: 12px; + } + th, td { + padding: 8px; + } +</style> </head> <body> <script type="text/javascript"> @@ -21,10 +32,11 @@ $('#usage_piechart_container').highcharts({ chart: { - plotBackgroundColor: null, - plotBorderWidth: null, - plotShadow: false, - type: 'pie' + animation: false, + plotBackgroundColor: null, + plotBorderWidth: null, + plotShadow: false, + type: 'pie' }, title: { text: '<span style="font-size: 22px">{{storagesitetitle|safe}}</span>', @@ -38,6 +50,7 @@ }, plotOptions: { pie: { + animation: false, allowPointSelect: true, cursor: 'pointer', dataLabels: { @@ -56,12 +69,52 @@ colorByPoint: true, data: {{storagesitedata|safe}} }] - }); + }); + $('#free_space_piechart_container').highcharts({ + chart: { + animation: false, + plotBackgroundColor: null, + plotBorderWidth: null, + plotShadow: false, + type: 'pie' + }, + title: { + text: '<span style="font-size: 22px">{{storagesite_free_space_title|safe}}</span>', + }, + subtitle: { + text: '<span style="font-size: 20px">{{storagesite_free_space_subtitle|safe}}</span>', + }, + legend: { itemStyle: { fontSize: '22px' } }, + tooltip: { + pointFormat: '{series.name}: <b>{point.percentage:.1f}%</b>' + }, + plotOptions: { + pie: { + animation: false, + allowPointSelect: true, + cursor: 'pointer', + dataLabels: { + enabled: true, + format: '<b>{point.name}</b>: {point.percentage:.1f} %', + style: { + color: (Highcharts.theme && Highcharts.theme.contrastTextColor) || 'black', + fontFamily: '"Lucida Grande", "Lucida Sans Unicode", Verdana, Arial, Helvetica, sans-serif', // default font + fontSize: '16px' + } + } + } + }, + series: [{ + name: "StorageSiteUsage", + colorByPoint: true, + data: {{storagesite_free_space|safe}} + }] + }); $(function () { $('#usage_trend_container').highcharts({ - chart: { type: 'area'}, + chart: { type: 'area', animation: false}, title: { text: '<span style="font-size: 22px">LTA Storage Site Usage Trend</span>', }, @@ -96,6 +149,7 @@ }, plotOptions: { area: { + animation: false, stacking: 'normal', lineColor: '#666666', lineWidth: 1, @@ -112,13 +166,13 @@ $(function () { $('#usage_deltas_container').highcharts({ - chart: { type: 'column'}, + chart: { type: 'column', animation: false}, title: { - text: '<span style="font-size: 22px">LTA Storage Site Deltas Per Month</span>', + text: '<span style="font-size: 22px">LTA Storage Site Deltas</span>', }, xAxis: { type: 'datetime', - labels: { style: { fontSize: '22px'} }, + labels: { style: { fontSize: '22px'}, align: 'right' } }, yAxis: { title: { text: '<span style="font-size: 22px">TB</span>'}, @@ -145,6 +199,7 @@ }, plotOptions: { column: { + animation: false, stacking:'normal', pointPadding: 0.01, groupPadding: 0.0, @@ -154,13 +209,72 @@ series: {{deltas_per_month_series|safe}} }); }); + }); + $(function () { + $('#quotas_chart_container').highcharts({ + chart: { type: 'column', + animation: false }, + title: { + text: '<span style="font-size: 22px">LTA Storage Site Tape Quota</span>', + }, + xAxis: { + type: 'datetime', + labels: { style: { fontSize: '22px'}, align: 'left' }, + }, + yAxis: { + title: { text: '<span style="font-size: 22px">PB</span>'}, + labels: { + style: { fontSize: '22px'}, + formatter: function () { return this.value / 1e15;}}, + }, + legend: { itemStyle: { fontSize: '14px' } }, + tooltip: { + formatter: function () { + var s = '<b>' + Highcharts.dateFormat('%Y/%m/%d', this.x) + '</b>'; + + $.each(this.points, function () { + s += '<br/><b>' + this.series.name + '</b>: ' + + Highcharts.numberFormat(this.y / 1e15, 2, '.') + ' PB'; + }); + return s; + }, + shared: true + }, + style: { + color: (Highcharts.theme && Highcharts.theme.contrastTextColor) || 'black', + fontFamily: '"Lucida Grande", "Lucida Sans Unicode", Verdana, Arial, Helvetica, sans-serif', // default font + fontSize: '22px' + }, + plotOptions: { + column: { + animation: false, + stacking:'normal', + pointPadding: 0.01, + groupPadding: 0.05, + pointPlacement: -0.45 + } + }, + series: {{quota_series|safe}} + }); }); + </script> <div id="usage_trend_container" style="min-width: 310px; min-height: 600px; width: 95%; height: 100%; margin: 24px; margin-left: auto; margin-right: auto; "></div> <div id="usage_deltas_container" style="min-width: 310px; min-height: 600px; width: 95%; height: 100%; margin: 24px; margin-left: auto; margin-right: auto; "></div> - <div id="usage_piechart_container" style="min-width: 310px; min-height: 600px; width: 80%; height: 100%; margin: 24px; margin-left: auto; margin-right: auto; "></div> + <div style="overflow: hidden; "> + <div id="usage_piechart_container" style="float: left; min-width: 310px; min-height: 600px; width: 48%; height: 100%; margin: 24px; margin-left: auto; "></div> + <div id="free_space_piechart_container" style="float: left; min-width: 310px; min-height: 600px; width: 48%; height: 100%; margin: 24px; margin-right: auto; "></div> + </div> + <div style="overflow: hidden; "> + <div id="quotas_chart_container" style="float: left; min-width: 310px; min-height: 600px; width: 48%; height: 100%; margin: 24px; margin-left: auto; "></div> + <div style="float: left; min-width: 310px; min-height: 600px; width: 48%; height: 100%; margin: 24px; margin-right: auto; "> + <p>Latest quota and usages per site and tape quotable dirs</p> + {{site_tape_usages|safe}} + <p>Please note that Juelich provides us quota- and usage numbers which are 700TB lower than the actual number of bytes on tape, due to a technical issue at their site. The numbers presented here are the actual number of bytes on tape.</p> + </div> + </div> Data last gathered at {{data_gathered_timestamp}}. </body> </html> diff --git a/LTA/ltastorageoverview/lib/webservice/webservice.py b/LTA/ltastorageoverview/lib/webservice/webservice.py index 7acbf8f82691f967d8a4b8dcf83eadf21ca50b8c..1ac8f3f1e210c747e3622c7f2727a229828b95a1 100755 --- a/LTA/ltastorageoverview/lib/webservice/webservice.py +++ b/LTA/ltastorageoverview/lib/webservice/webservice.py @@ -27,15 +27,25 @@ import sys import os import os.path from datetime import datetime +import logging from flask import Flask from flask import render_template from flask import json -from ltastorageoverview import store +from lofar.lta.ltastorageoverview import store from lofar.common.util import humanreadablesize from lofar.common.datetimeutils import monthRanges -app = Flask('LTA storage overview') -app.config.root_path = os.path.dirname(__file__) +logger = logging.getLogger(__name__) + +__root_path = os.path.dirname(os.path.realpath(__file__)) + +'''The flask webservice app''' +app = Flask('LTA storage overview', + instance_path=__root_path, + template_folder=os.path.join(__root_path, 'templates'), + static_folder=os.path.join(__root_path, 'static'), + instance_relative_config=True) + db = None @app.route('/') @@ -44,23 +54,36 @@ def index(): # TODO: serve html first, and let client request data via ajax usages = {} + colors = {'sara': {'used': '#90ed7d', 'free': '#c5f6bc'}, + 'juelich': {'used': '#494950', 'free': '#a1a1aa'}, + 'poznan': {'used': '#7cb5ec', 'free': '#bcdaf5'}} + sites = db.sites() - sites2 = [x for x in sites if x[1] != 'nikhef'] - sites = [sites2[0], sites2[2], sites2[1]] + sitesDict = { s['name']:s for s in sites } + sites = [sitesDict[sitename] for sitename in ['poznan', 'juelich', 'sara'] if sitename in sitesDict] - total = 0.0 - numFiles = 0L + total_lta_size = 0.0 + total_lta_num_files = 0L for site in sites: - site_usage = float(db.totalFileSizeInSite(site[0])) - usages[site[1]] = site_usage - total += site_usage - numFiles += db.numFilesInSite(site[0]) - - storagesitedata='[' + ', '.join(['''{name: "%s %s", y: %.1f}''' % (site[1], humanreadablesize(usages[site[1]]), 100.0*usages[site[1]]/total) for site in sites]) + ']' + totals = db.totalFileSizeAndNumFilesInSite(site['id']) + total_lta_size += totals['tree_total_file_size'] + total_lta_num_files += totals['tree_num_files'] + usages[site['name']] = totals['tree_total_file_size'] + + if total_lta_size > 0: + storagesitedata='[' + ', '.join(['''{name: "%s %s", color:'%s', y: %.2f}''' % (site['name'], humanreadablesize(usages[site['name']]), + colors[site['name']]['used'], + 100.0*usages[site['name']]/total_lta_size) for site in sites]) + ']' + else: + storagesitedata ='[]' min_date, max_date = db.datetimeRangeOfFilesInTree() - min_date = datetime(2012, 1, 1) - month_ranges = monthRanges(min_date, max_date) + if min_date is None: + min_date = datetime(2012, 1, 1) + if max_date is None: + max_date = datetime.utcnow() + min_date = max(datetime(2012, 1, 1), min_date) + month_ranges = monthRanges(min_date, max_date, 3) # convert end-of-month timestamps to milliseconds since epoch epoch = datetime.utcfromtimestamp(0) @@ -69,54 +92,109 @@ def index(): usage_per_month_series='[' deltas_per_month_series='[' for site in sites: - cumulatives = [db.totalFileSizeInSite(site[0], to_date=mr[1]) for mr in month_ranges] + deltas_per_month = [db.totalFileSizeInSite(site['id'], from_date=mr[0], to_date=mr[1]) for mr in month_ranges] + data = ', '.join(['[%s, %s]' % (x[0], str(x[1])) for x in zip(datestamps, deltas_per_month)]) + deltas_per_month_series += '''{name: '%s', color:'%s', data: [%s]},\n''' % (site['name'], colors[site['name']]['used'], data) - data = ', '.join(['[%s, %s]' % (x[0], str(x[1])) for x in zip(datestamps, cumulatives)]) - usage_per_month_series += '''{name: '%s', data: [%s]},\n''' % (site[1], data) + cumulatives = [deltas_per_month[0]] + for delta in deltas_per_month[1:]: + cumulative = cumulatives[-1] + delta + cumulatives.append(cumulative) - deltas = [0] - for i in range(1, len(cumulatives)): - delta = cumulatives[i] - cumulatives[i-1] - deltas.append(delta) + data = ', '.join(['[%s, %s]' % (x[0], str(x[1])) for x in zip(datestamps, cumulatives)]) + usage_per_month_series += '''{name: '%s', color:'%s', data: [%s]},\n''' % (site['name'], colors[site['name']]['used'], data) - data = ', '.join(['[%s, %s]' % (x[0], str(x[1])) for x in zip(datestamps, deltas)]) - deltas_per_month_series += '''{name: '%s', data: [%s]},\n''' % (site[1], data) usage_per_month_series+=']' deltas_per_month_series+=']' + quota_dir_stats = db.siteQuotaRootDirStats() + quota_dir_stats = sorted(quota_dir_stats, reverse=True, key=lambda x: x['tree_total_file_size']) + + site_usages_per_site = {} + latest_usages_per_site = {} + for site_usage in db.siteQuotaUsages(): + site_name = site_usage['site_name'] + if site_name not in site_usages_per_site: + site_usages_per_site[site_name] = [] + site_usages_per_site[site_name].append(site_usage) + if site_name not in latest_usages_per_site: + latest_usages_per_site[site_name] = site_usage + if site_usage['valid_until_date'] > latest_usages_per_site[site_name]['valid_until_date']: + latest_usages_per_site[site_name] = site_usage + + + quota_series='[' + storagesite_free_space='[' + site_tape_usages_table = '<table>\n' + site_tape_usages_table += '<tr><th style="text-align: left;">site</th><th style="text-align: left;">directory</th><th>total #files</th><th>total file size</th><th>quota</th><th>free</th><th>expiration</th></tr>\n' + total_lta_free_space = sum(u['space_left'] for u in latest_usages_per_site.values() if u['space_left'] > 0) + total_lta_quota = sum(u['quota'] for u in latest_usages_per_site.values()) + + for site_name in ['sara','juelich', 'poznan']: + if site_name in latest_usages_per_site: + latest_usage = latest_usages_per_site[site_name] + site_tape_usages_table += '<tr style="font-weight: bold;"><td style="text-align: left;">%s</td><td style="text-align: left;">%s</td><td>%s</td><td>%s</td><td>%s</td><td %s>%s</td><td>%s</td></tr>\n' % (latest_usage['site_name'], '', latest_usage['num_files'], humanreadablesize(latest_usage['total_file_size']), humanreadablesize(latest_usage['quota']), 'style="color: red;"' if latest_usage['space_left'] < 0 else '', humanreadablesize(latest_usage['space_left']), latest_usage['valid_until_date']) + + for qds in quota_dir_stats: + if qds['site_name'] == site_name: + site_tape_usages_table += '<tr><td style="text-align: left;">%s</td><td style="text-align: left;">%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td></tr>\n' % ( + '', qds['dir_name'], qds['tree_num_files'], humanreadablesize(qds['tree_total_file_size']), '', '', '') + + storagesite_free_space += '''{name: "%s %s", color:'%s', y: %.2f}, ''' % (site_name, + humanreadablesize(latest_usage['space_left']), + colors[site_name]['free'], + max(0, 100.0 * latest_usage['space_left']) / total_lta_free_space) + + + site_tape_usages_table += '</table>\n' + + for site_name in ['poznan','juelich', 'sara']: + if site_name in site_usages_per_site: + site_usages_for_site = site_usages_per_site[site_name] + site_usages_for_site = sorted(site_usages_for_site, key=lambda x: x['valid_until_date']) + data = ','.join('[%d, %s]' % ((su['valid_until_date'] - epoch).total_seconds()*1000, su['space_left']) for su in site_usages_for_site) + quota_series+='''{ name:'%s_free', stack:'%s', color:'%s', data:[%s] },''' % (site_name,site_name,colors[site_name]['free'],data) + data = ','.join('[%d, %s]' % ((su['valid_until_date'] - epoch).total_seconds()*1000, su['total_file_size']) for su in site_usages_for_site) + quota_series+='''{ name:'%s_used', stack:'%s', color:'%s', data:[%s] },''' % (site_name,site_name,colors[site_name]['used'], data) + + + quota_series+=']' + storagesite_free_space+=']' + return render_template('index.html', title='LTA storage overview', storagesitetitle='LTA Storage Site Usage', - storagesitesubtitle='Total: %s #dataproducts: %s' % (humanreadablesize(total, 'B', 1000), humanreadablesize(numFiles, '', 1000)), + storagesitesubtitle='Total: %s #dataproducts: %s' % (humanreadablesize(total_lta_size, 'B', 1000), humanreadablesize(total_lta_num_files, '', 1000)), + storagesite_free_space_title='LTA Storage Site Free Space', + storagesite_free_space_subtitle='Total free space: %s Current total quota: %s' % (humanreadablesize(total_lta_free_space, 'B', 1000),humanreadablesize(total_lta_quota, 'B', 1000)), storagesitedata=storagesitedata, + storagesite_free_space=storagesite_free_space, usage_per_month_series=usage_per_month_series, deltas_per_month_series=deltas_per_month_series, + quota_series=quota_series, + site_tape_usages=site_tape_usages_table, data_gathered_timestamp=db.mostRecentVisitDate().strftime('%Y/%m/%d %H:%M:%S')) @app.route('/rest/sites/') def get_sites(): - sites = {'sites': [{'id': x[0], 'name': x[1], 'url': x[2]} for x in db.sites()]} - return json.jsonify(sites) + return json.jsonify({'sites': db.sites()}) @app.route('/rest/sites/<int:site_id>') def get_site(site_id): - site = db.site(site_id) - site_dict = {'id': site[0], 'name': site[1], 'url': site[2]} - return json.jsonify(site_dict) + return json.jsonify(db.site(site_id)) @app.route('/rest/sites/usages') def get_sites_usages(): - sites = {'sites_usages': [{'id': x[0], - 'name': x[1]} for x in db.sites()]} + sites = {'sites_usages': db.sites()} for site in sites['sites_usages']: rootDirs = db.rootDirectoriesForSite(site['id']) site_usage = 0L for rootDir in rootDirs: - usage = long(db.totalFileSizeInTree(rootDir[0])) + usage = long(db.totalFileSizeInTree(rootDir['dir_id'])) site_usage += usage site['usage'] = site_usage site['usage_hr'] = humanreadablesize(site_usage) @@ -125,34 +203,43 @@ def get_sites_usages(): @app.route('/rest/rootdirectories/',) def get_rootDirectories(): - rootDirs = {'rootDirectories': [{'id': x[0], 'name': x[1], 'site_id': x[2], 'site_name': x[3]} for x in db.rootDirectories()]} + rootDirs = {'rootDirectories': db.rootDirectories()} return json.jsonify(rootDirs) @app.route('/rest/directory/<int:dir_id>/subdirectories/',) def get_directoryTree(dir_id): - subDirsList = {'subdirectories': [{'id': x[0], 'name': x[1], 'parent_dir_id': x[2]} for x in db.subDirectories(dir_id, 1, False)]} + subDirsList = {'subdirectories': db.subDirectories(dir_id, 1, False)} return json.jsonify(subDirsList) @app.route('/rest/directory/<int:dir_id>/files') def get_filesInDirectory(dir_id): - files = {'files': [{'id': x[0], 'name': x[1], 'size': x[2], 'creation_date': x[3]} for x in db.filesInDirectory(dir_id)]} + files = {'files': db.filesInDirectory(dir_id)} return json.jsonify(files) -def main(argv): - dbpath = argv[0] if argv else 'ltastorageoverview.sqlite' +def main(): + from optparse import OptionParser + from lofar.common import dbcredentials + + # Check the invocation arguments + parser = OptionParser("%prog [options]", description='runs the lta scraper and stores results in the speficied database.') + parser.add_option('-V', '--verbose', dest='verbose', action='store_true', help='verbose logging') + parser.add_option_group(dbcredentials.options_group(parser)) + parser.set_defaults(dbcredentials="LTASO") + (options, args) = parser.parse_args() + + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', + level=logging.DEBUG if options.verbose else logging.INFO) - if not os.path.exists(dbpath): - print 'No database file found at \'%s\'' % (dbpath,) - sys.exit(-1) + dbcreds = dbcredentials.parse_options(options) - print 'Using database at \'%s\'' % (dbpath,) + logger.info("Using dbcreds: %s" % dbcreds.stringWithHiddenPassword()) global db - db = store.LTAStorageDb(dbpath) + db = store.LTAStorageDb(dbcreds, options.verbose) - app.run(debug=True,host='0.0.0.0') + app.run(debug=False,host='0.0.0.0',port=9632) if __name__ == '__main__': - main(sys.argv[1:]) + main() diff --git a/LTA/ltastorageoverview/ltastorageoverview_build.sh b/LTA/ltastorageoverview/ltastorageoverview_build.sh deleted file mode 100755 index dce9ca20c26a7b64b2f8bef1f67ec7ff33af68d5..0000000000000000000000000000000000000000 --- a/LTA/ltastorageoverview/ltastorageoverview_build.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/bin/bash -# $Id: ltastorageoverview_build.sh 32113 2015-08-03 10:07:57Z schaap $ - -if [ "$#" -ne 1 ]; then - echo "Usage: ./ltastorageoverview_build.sh <tag>" - echo "where tag is a name or version number which is added to the tarballs." - echo "This script creates two build flavours (local_dev/lexar) in source_root_dir/build and builds ltastorageoverview" - echo "Then it performs a local install (in the each specific build dir) and creates a deployable tarball" - echo "Final result is a tarball in source_root_dir/build which can be copied to the ingest servers" - exit 1 -fi - -#get path of this build script and determine source root from there -REL_PATH="`dirname \"$0\"`" -ABS_PATH="`( cd \"$REL_PATH\" && pwd )`" -SOURCE_ROOT="$ABS_PATH/../.." - -echo "Using '$SOURCE_ROOT' as source route" - -BUILD_TAG="$1" -echo "Using Build tag $BUILD_TAG" - -LOCAL_DEV_BUILD_DIR=$SOURCE_ROOT/build/local_dev/gnu_debug -LOCAL_DEV_INSTALL_DIR=$LOCAL_DEV_BUILD_DIR/local_install - -mkdir -p $LOCAL_DEV_BUILD_DIR - -cd $LOCAL_DEV_BUILD_DIR && cmake -DBUILD_PACKAGES=ltastorageoverview -DCMAKE_INSTALL_PREFIX=$LOCAL_DEV_INSTALL_DIR/ltastorageoverview__$BUILD_TAG $SOURCE_ROOT -cd $LOCAL_DEV_BUILD_DIR && make && make local_dev && rm -rf $LOCAL_DEV_INSTALL_DIR && make install - - -LEXAR_BUILD_DIR=$SOURCE_ROOT/build/lexar/gnu_debug -mkdir -p $LEXAR_BUILD_DIR - -cd $LEXAR_BUILD_DIR && cmake -DBUILD_PACKAGES=ltastorageoverview -DCMAKE_INSTALL_PREFIX=/globalhome/ingest/ltastorageoverview_$BUILD_TAG $SOURCE_ROOT -cd $LEXAR_BUILD_DIR && make && rm -rf ./local_install && make DESTDIR=./local_install install -cd $LEXAR_BUILD_DIR/local_install/globalhome/ingest && tar cvzf $SOURCE_ROOT/build/ltastorageoverview_"$BUILD_TAG"_lexar.tgz ltastorageoverview_$BUILD_TAG diff --git a/LTA/ltastorageoverview/test/CMakeLists.txt b/LTA/ltastorageoverview/test/CMakeLists.txt index 4eef4029638ff977f5655c31ff53fbb761fa5cbc..bb3b942d55057994bac70db523300c68aa3b57fe 100644 --- a/LTA/ltastorageoverview/test/CMakeLists.txt +++ b/LTA/ltastorageoverview/test/CMakeLists.txt @@ -2,10 +2,8 @@ include(LofarCTest) lofar_add_test(test_store) +lofar_add_test(test_scraper) lofar_add_test(test_lso_webservice) +lofar_add_test(test_ingesteventhandler) -python_install( - test_store.py - test_lso_webservice.py - DESTINATION ltastorageoverview/test) - +lofar_add_test(integration_test_store) diff --git a/LTA/ltastorageoverview/test/common_test_ltastoragedb.py b/LTA/ltastorageoverview/test/common_test_ltastoragedb.py new file mode 100755 index 0000000000000000000000000000000000000000..4c216f44bd233a59e58f69844b0028dfa9086288 --- /dev/null +++ b/LTA/ltastorageoverview/test/common_test_ltastoragedb.py @@ -0,0 +1,73 @@ +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id$ + +import unittest +import logging +import os, os.path +import psycopg2 +import lofar.common.dbcredentials as dbc + +try: + import testing.postgresql +except ImportError as e: + print str(e) + print 'Please install python package testing.postgresql: sudo pip install testing.postgresql' + exit(3) # special lofar test exit code: skipped test + +logger = logging.getLogger(__name__) + +class CommonLTAStorageDbTest(unittest.TestCase): + def setUp(self): + logger.info('setting up test LTASO database server...') + + # create a test db + logger.info(' creating test postgres server') + self.test_psql = testing.postgresql.Postgresql() + dsn = self.test_psql.dsn() + logger.info(' created test postgres server, dsn=%s', dsn) + + self.dbcreds = dbc.Credentials() + self.dbcreds.user = 'test_user' + self.dbcreds.password = 'test_password' + + with psycopg2.connect(**dsn) as conn: + cursor = conn.cursor() + #use same user/pass as stored in local dbcreds + query = "CREATE USER %s WITH SUPERUSER PASSWORD '%s'" % (self.dbcreds.user, self.dbcreds.password) + cursor.execute(query) + + create_script_path = os.path.normpath(os.path.join(os.environ['LOFARROOT'], 'share', 'ltaso', 'create_db_ltastorageoverview.sql')) + logger.info(' running ltaso create script create_script=%s', create_script_path) + with open(create_script_path, 'r') as script: + cursor.execute(script.read()) + logger.info(' completed ltaso create script') + + # copy the test postgres server settings into dbcreds + # we can use these dbcreds in each test method to connect to the testing ltaso database + self.dbcreds.host = dsn['host'] + self.dbcreds.database = dsn['database'] + self.dbcreds.port = dsn['port'] + + logger.info('finished setting up test LTASO database') + + def tearDown(self): + logger.info('removing test LTASO database server...') + self.test_psql.stop() + logger.info('removed test LTASO database server') + diff --git a/LTA/ltastorageoverview/test/db_performance_test.py b/LTA/ltastorageoverview/test/db_performance_test.py new file mode 100755 index 0000000000000000000000000000000000000000..f224b3ee9c90b7b80dc6a753fc5857cd0d0b1ad2 --- /dev/null +++ b/LTA/ltastorageoverview/test/db_performance_test.py @@ -0,0 +1,108 @@ +#!/usr/bin/python + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +import logging +from datetime import datetime, timedelta +import os + +from lofar.lta.ltastorageoverview import store +from lofar.common.datetimeutils import totalSeconds + +logger = logging.getLogger() + +def main(): + from optparse import OptionParser + from lofar.common import dbcredentials + import testing.postgresql + import psycopg2 + + # Check the invocation arguments + parser = OptionParser("%prog [options]", description='execute a performance test by inserting many files on an empty test database.') + parser.add_option('-V', '--verbose', dest='verbose', action='store_true', help='verbose logging') + (options, args) = parser.parse_args() + + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', + level=logging.DEBUG if options.verbose else logging.INFO) + + + # create a test webservice.db + logger.info(' creating test postgres server') + with testing.postgresql.Postgresql() as test_psql: + dsn = test_psql.dsn() + logger.info(' created test postgres server, dsn=%s', dsn) + + dbcreds = dbcredentials.Credentials() + dbcreds.user = 'test_user' + dbcreds.password = 'test_password' + + with psycopg2.connect(**dsn) as conn: + cursor = conn.cursor() + #use same user/pass as stored in local webservice.dbcreds + query = "CREATE USER %s WITH SUPERUSER PASSWORD '%s'" % (dbcreds.user, dbcreds.password) + cursor.execute(query) + + create_script_path = os.path.normpath(os.path.join(os.environ['LOFARROOT'], 'share', 'ltaso', 'create_db_ltastorageoverview.sql')) + logger.info(' running ltaso create script create_script=%s', create_script_path) + with open(create_script_path, 'r') as script: + cursor.execute(script.read()) + logger.info(' completed ltaso create script') + + # copy the test postgres server settings into webservice.dbcreds + # we can use these webservice.dbcreds in each test method to connect to the testing ltaso database + dbcreds.host = dsn['host'] + dbcreds.database = dsn['database'] + dbcreds.port = dsn['port'] + + logger.info('finished setting up test LTASO database') + + base_date = datetime.utcnow() + + db = store.LTAStorageDb(dbcreds, options.verbose) + + db.insertSiteIfNotExists('sara', 'srm://srm.siteA.nl:8444') + rootdir_id = db.insertRootDirectory('sara', '/pnfs/grid.siteA.nl/data/lofar/ops') + projects_dir_id = db.insertSubDirectory('/pnfs/grid.siteA.nl/data/lofar/ops/projects', rootdir_id) + + total_num_files_inserted = 0 + + with open('db_perf.csv', 'w') as file: + for cycle_nr in range(1, 10): + for project_nr in range(1, 10): + # project_name = 'lc%d_%03d/%d' % (cycle_nr, project_nr, os.getpid()) + project_name = 'lc%d_%03d' % (cycle_nr, project_nr) + projectdir_id = db.insertSubDirectory('/pnfs/grid.siteA.nl/data/lofar/ops/projects/%s' % (project_name,), projects_dir_id) + + obs_base_id = cycle_nr*100000+project_nr*1000 + for obs_nr, obsId in enumerate(range(obs_base_id, obs_base_id+20)): + obsName = 'L%s' % obsId + + obsdir_id = db.insertSubDirectory('/pnfs/grid.siteA.nl/data/lofar/ops/projects/%s/%s' % (project_name, obsName), projectdir_id) + + fileinfos = [('%s_SB%3d' % (obsName, sbNr), 1000+sbNr+project_nr*cycle_nr, base_date + timedelta(days=10*cycle_nr+project_nr, minutes=obs_nr, seconds=sbNr), obsdir_id) for sbNr in range(0, 2)] + now = datetime.utcnow() + file_ids = db.insertFileInfos(fileinfos) + total_num_files_inserted += len(file_ids) + elapsed = totalSeconds(datetime.utcnow() - now) + line = '%s,%s' % (total_num_files_inserted, elapsed) + print line + file.write(line + '\n') + +if __name__ == "__main__": + main() + diff --git a/LTA/ltastorageoverview/test/integration_test_store.py b/LTA/ltastorageoverview/test/integration_test_store.py new file mode 100755 index 0000000000000000000000000000000000000000..b141011a2d23bd0377f55a8e9307f2d6f6f77007 --- /dev/null +++ b/LTA/ltastorageoverview/test/integration_test_store.py @@ -0,0 +1,204 @@ +#!/usr/bin/python + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id$ + +import logging +from datetime import datetime, timedelta +import time +from common_test_ltastoragedb import * +from lofar.lta.ltastorageoverview import store + +logger = logging.getLogger(__name__) + +class IntegrationTestLTAStorageDb(CommonLTAStorageDbTest): + """ + Bigger tests for the lofar.lta.ltastorageoverview.store.LTAStorageDb + which test more complex behaviour with bigger amounts of data. + """ + + def testDirectoryTreesAndStats(self): + """Quite a big test, almost an integration test. + It consists of two stages/phases: + 1) inserts a tree of directories and files in various sites and projects, + 2) test if the automatically computed tree- and dirstats are correct. + """ + + with store.LTAStorageDb(self.dbcreds, True) as db: + base_time = datetime.utcnow() + base_time -= timedelta(seconds=base_time.second, microseconds=base_time.microsecond) + + ########################################################### + # STAGE 1: insertion and check phase. + # insert the sites, directories, and files + # and check the dir- and tree stats directly after insertion + ########################################################### + NUM_SITES = 2 + NUM_PROJECTS = 3 + NUM_PROJECT_SUBDIRS = 4 + NUM_SUB_SUBDIRS = 5 + + # helper dict to store all subdir id's for each dir. + dir2subdir = {} + + for site_nr in range(NUM_SITES): + site_name = 'site%d' % site_nr + site_url = 'srm://%s.org' % site_name + db.insertSiteIfNotExists(site_name, site_url) + + for project_nr in range(NUM_PROJECTS): + rootDir_id = db.insertRootDirectory(site_name, 'rootDir_%d' % project_nr) + dir2subdir[rootDir_id] = [] + + for subdir_nr in range(NUM_PROJECT_SUBDIRS): + subDir_id = db.insertSubDirectory('subDir_%d' % subdir_nr, rootDir_id) + dir2subdir[subDir_id] = [] + dir2subdir[rootDir_id].append(subDir_id) + for file_nr in range(project_nr*subdir_nr): + db.insertFileInfo('file_%d' % file_nr, 271*(file_nr+1), base_time + timedelta(days=10*site_nr+project_nr, hours=subdir_nr, seconds=file_nr), subDir_id) + + dir_files = db.filesInDirectory(subDir_id) + dir_stats = db.directoryTreeStats(subDir_id) + + self.assertEqual(sum(f['size'] for f in dir_files), dir_stats['dir_total_file_size']) + self.assertEqual(len(dir_files), dir_stats['dir_num_files']) + if dir_files: + self.assertEqual(min(f['size'] for f in dir_files), dir_stats['dir_min_file_size']) + self.assertEqual(max(f['size'] for f in dir_files), dir_stats['dir_max_file_size']) + self.assertEqual(min(f['creation_date'] for f in dir_files), dir_stats['dir_min_file_creation_date']) + self.assertEqual(max(f['creation_date'] for f in dir_files), dir_stats['dir_max_file_creation_date']) + + for subsubdir_nr in range(NUM_SUB_SUBDIRS): + subsubDir_id = db.insertSubDirectory('subsubDir_%d' % subsubdir_nr, subDir_id) + dir2subdir[subsubDir_id] = [] + dir2subdir[subDir_id].append(subsubDir_id) + for kk in range(project_nr*subdir_nr*subsubdir_nr): + db.insertFileInfo('file_%d_%d' % (subdir_nr,kk), 314*(kk+1), base_time + timedelta(days=10*site_nr+project_nr, hours=10*subdir_nr+subsubdir_nr+2, seconds=kk), subsubDir_id) + + dir_files = db.filesInDirectory(subsubDir_id) + dir_stats = db.directoryTreeStats(subsubDir_id) + + self.assertEqual(sum(f['size'] for f in dir_files), dir_stats['dir_total_file_size']) + self.assertEqual(len(dir_files), dir_stats['dir_num_files']) + if dir_files: + self.assertEqual(min(f['size'] for f in dir_files), dir_stats['dir_min_file_size']) + self.assertEqual(max(f['size'] for f in dir_files), dir_stats['dir_max_file_size']) + self.assertEqual(min(f['creation_date'] for f in dir_files), dir_stats['dir_min_file_creation_date']) + self.assertEqual(max(f['creation_date'] for f in dir_files), dir_stats['dir_max_file_creation_date']) + + tree_totals = db.totalFileSizeAndNumFilesInTree(subDir_id, dir_stats['dir_min_file_creation_date'], dir_stats['dir_max_file_creation_date']) + self.assertEqual(tree_totals['tree_num_files'], dir_stats['dir_num_files']) + self.assertEqual(tree_totals['tree_total_file_size'], dir_stats['dir_total_file_size']) + + # test 1st level subdir again, and also check inclusion of 2nd level subdirs in tree stats + dir_files = db.filesInDirectory(subDir_id) + dir_stats = db.directoryTreeStats(subDir_id) + # this dir only... + self.assertEqual(sum(f['size'] for f in dir_files), dir_stats['dir_total_file_size']) + self.assertEqual(len(dir_files), dir_stats['dir_num_files']) + if dir_files: + self.assertEqual(min(f['size'] for f in dir_files), dir_stats['dir_min_file_size']) + self.assertEqual(max(f['size'] for f in dir_files), dir_stats['dir_max_file_size']) + self.assertEqual(min(f['creation_date'] for f in dir_files), dir_stats['dir_min_file_creation_date']) + self.assertEqual(max(f['creation_date'] for f in dir_files), dir_stats['dir_max_file_creation_date']) + + # including subdirs in tree... + self.assertEqual(sum(f['file_size'] for f in db.filesInTree(subDir_id)), dir_stats['tree_total_file_size']) + self.assertEqual(len(db.filesInTree(subDir_id)), dir_stats['tree_num_files']) + + #################################################################################### + # STAGE 2: reporting phase. + # loop over the sites, directories, and files now that the database has been filled. + # and check the dir- and tree stats totals + #################################################################################### + for site in db.sites(): + site_id = site['id'] + + rootDirs = db.rootDirectoriesForSite(site_id) + self.assertEquals(NUM_PROJECTS, len(rootDirs)) + + for root_dir_id in [x['root_dir_id'] for x in rootDirs]: + subDirs = db.subDirectories(root_dir_id, 1, False) + self.assertEquals(NUM_PROJECT_SUBDIRS, len(subDirs)) + + for subDir in subDirs: + subDir_parent_id = subDir['parent_dir_id'] + self.assertEquals(root_dir_id, subDir_parent_id) + self.assertTrue(subDir['id'] in dir2subdir[root_dir_id]) + + subsubDirs = db.subDirectories(subDir['id'], 1, False) + self.assertEquals(NUM_SUB_SUBDIRS, len(subsubDirs)) + + for subsubDir in subsubDirs: + subsubDir_parent_id = subsubDir['parent_dir_id'] + self.assertEquals(subDir['id'], subsubDir_parent_id) + self.assertTrue(subsubDir['id'] in dir2subdir[subDir['id']]) + + # check various selects of files in the tree, for each file + tree_files = sorted(db.filesInTree(root_dir_id), key=lambda f: f['file_creation_date']) + for file in tree_files: + # check if filesInTree return this one file when time delimited for this specific file_creation_date + file_creation_date = file['file_creation_date'] + selected_tree_files = db.filesInTree(root_dir_id, file_creation_date, file_creation_date) + self.assertEqual(1, len(selected_tree_files)) + self.assertEqual(file['file_creation_date'], selected_tree_files[0]['file_creation_date']) + self.assertEqual(file['file_size'], selected_tree_files[0]['file_size']) + + # get the 'totals' for this root_dir, but select only this file by date. + # should return 1 file. + tree_totals = db.totalFileSizeAndNumFilesInTree(root_dir_id, file_creation_date, file_creation_date) + self.assertEqual(1, tree_totals['tree_num_files']) + self.assertEqual(file['file_size'], tree_totals['tree_total_file_size']) + + # check some ranges files/times + for idx, file in enumerate(tree_files): + file_creation_date = file['file_creation_date'] + + #select any file >= file_creation_date + expected_selected_tree_files = tree_files[idx:] + selected_tree_files = db.filesInTree(root_dir_id, file_creation_date, None) + self.assertEqual(len(expected_selected_tree_files), len(selected_tree_files)) + selected_tree_files_ids = set([f['file_id'] for f in selected_tree_files]) + for expected_file in expected_selected_tree_files: + self.assertTrue(expected_file['file_id'] in selected_tree_files_ids) + + # and check the totals as well + tree_totals = db.totalFileSizeAndNumFilesInTree(root_dir_id, file_creation_date, None) + self.assertEqual(len(expected_selected_tree_files), tree_totals['tree_num_files']) + self.assertEqual(sum(f['file_size'] for f in expected_selected_tree_files), tree_totals['tree_total_file_size']) + + #select any file <= file_creation_date + expected_selected_tree_files = tree_files[:idx+1] + selected_tree_files = db.filesInTree(root_dir_id, None, file_creation_date) + self.assertEqual(len(expected_selected_tree_files), len(selected_tree_files)) + selected_tree_files_ids = set([f['file_id'] for f in selected_tree_files]) + for expected_file in expected_selected_tree_files: + self.assertTrue(expected_file['file_id'] in selected_tree_files_ids) + + # and check the totals as well + tree_totals = db.totalFileSizeAndNumFilesInTree(root_dir_id, None, file_creation_date) + self.assertEqual(len(expected_selected_tree_files), tree_totals['tree_num_files']) + self.assertEqual(sum(f['file_size'] for f in expected_selected_tree_files), tree_totals['tree_total_file_size']) + +# run tests if main +if __name__ == '__main__': + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', + level=logging.INFO) + + unittest.main() diff --git a/LTA/ltastorageoverview/test/integration_test_store.run b/LTA/ltastorageoverview/test/integration_test_store.run new file mode 100755 index 0000000000000000000000000000000000000000..ae46b39f4670c0a34490300cf75ddeb5c497d9ef --- /dev/null +++ b/LTA/ltastorageoverview/test/integration_test_store.run @@ -0,0 +1,4 @@ +#!/bin/bash + +source python-coverage.sh +python_coverage_test "ltas*" integration_test_store.py diff --git a/LTA/ltastorageoverview/test/integration_test_store.sh b/LTA/ltastorageoverview/test/integration_test_store.sh new file mode 100755 index 0000000000000000000000000000000000000000..de706b2bcf98d4158a1fb85a54f3e5ca959eb7e1 --- /dev/null +++ b/LTA/ltastorageoverview/test/integration_test_store.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +./runctest.sh integration_test_store diff --git a/LTA/ltastorageoverview/test/test_ingesteventhandler.py b/LTA/ltastorageoverview/test/test_ingesteventhandler.py new file mode 100755 index 0000000000000000000000000000000000000000..39adb104ce2158e50aae5d42686329ecf4fa2153 --- /dev/null +++ b/LTA/ltastorageoverview/test/test_ingesteventhandler.py @@ -0,0 +1,303 @@ +#!/usr/bin/python + +# Copyright (C) 2018 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id$ + +from datetime import datetime + +from common_test_ltastoragedb import * +from lofar.lta.ltastorageoverview import store +from lofar.lta.ltastorageoverview.ingesteventhandler import IngestEventHandler + +import logging +logger = logging.getLogger(__name__) + +class TestIngestEventHandler(CommonLTAStorageDbTest): + def setUp(self): + # allow superclass to setup empty database + super(TestIngestEventHandler, self).setUp() + + # fill empty database with simple sites and root dirs + with store.LTAStorageDb(self.dbcreds, True) as db: + db.insertSiteIfNotExists('siteA', 'srm://siteA.foo.bar:8443') + db.insertSiteIfNotExists('siteB', 'srm://siteB.foo.bar:8443') + + db.insertRootDirectory('siteA', '/root_dir_1') + db.insertRootDirectory('siteA', '/root_dir_2') + db.insertRootDirectory('siteA', '/long/path/to/root_dir_3') + db.insertRootDirectory('siteB', '/root_dir_1') + + self._markAllDirectoriesRecentlyVisited() + + def _markAllDirectoriesRecentlyVisited(self): + """pretend that all dirs were recently visited + """ + with store.LTAStorageDb(self.dbcreds, True) as db: + db.executeQuery('''update scraper.last_directory_visit + set visit_date=%s;''', (datetime.utcnow(), )) + db.commit() + + def test_01_schedule_srmurl_for_visit_unknown_site(self): + """ try to schedule some unknown site's surl. Should raise. + """ + with store.LTAStorageDb(self.dbcreds, True) as db: + handler = IngestEventHandler(dbcreds=self.dbcreds) + + with self.assertRaises(LookupError) as context: + surl = 'srm://foo.bar:1234/fdjsalfja5h43535h3oiu/5u905u3f' + handler._schedule_srmurl_for_visit(surl) + self.assertTrue('Could not find site' in context.exception.message) + + def test_02_mark_directory_for_a_visit(self): + """ Test core method _mark_directory_for_a_visit for all known root dirs. + Should set the last visit time for each dir way in the past. + """ + with store.LTAStorageDb(self.dbcreds, True) as db: + handler = IngestEventHandler(dbcreds=self.dbcreds) + now = datetime.utcnow() + + for site in db.sites(): + for root_dir in db.rootDirectoriesForSite(site['id']): + dir_id = root_dir['root_dir_id'] + # make sure the dir's last visit time is recent + db.updateDirectoryLastVisitTime(dir_id, now) + timestamp_before_mark = db.directoryLastVisitTime(dir_id) + self.assertEqual(now, timestamp_before_mark) + + # let the handler mark the dir for a next visit... + handler._mark_directory_for_a_visit(dir_id) + + # by marking the dir for a next visit, the dir's last visit time is set way in the past. + timestamp_after_mark = db.directoryLastVisitTime(dir_id) + self.assertLess(timestamp_after_mark, timestamp_before_mark) + + def test_03_insert_missing_directory_tree_if_needed(self): + """ Test core method _insert_missing_directory_tree_if_needed for all known root dirs. + Should result in new directory entries in the database for the new sub directories only. + """ + with store.LTAStorageDb(self.dbcreds, True) as db: + handler = IngestEventHandler(dbcreds=self.dbcreds) + + for site in db.sites(): + site_surl = site['url'] + site_id = site['id'] + for root_dir in db.rootDirectoriesForSite(site_id): + dir_path = root_dir['dir_name'] + surl = site_surl + dir_path + + # root dir should already exist + dir = db.directoryByName(dir_path, site_id) + self.assertIsNotNone(dir) + + # let the handler insert the not-so-missing dirs. + # nothing should happen, because the root dir already exists + new_dir_ids = handler._insert_missing_directory_tree_if_needed(surl) + self.assertEqual(0, len(new_dir_ids)) + + # now insert some new subdirs, with multiple levels. + for subdir_path in ['/foo', '/bar/xyz']: + dir_path = root_dir['dir_name'] + subdir_path + surl = site_surl + dir_path + # dir should not exist yet + self.assertIsNone(db.directoryByName(dir_path, site_id)) + + # let the handler insert the missing dirs. + handler._insert_missing_directory_tree_if_needed(surl) + + # dir should exist now + dir = db.directoryByName(dir_path, site_id) + self.assertIsNotNone(dir) + + # check if new dir has expected root dir + parents = db.parentDirectories(dir['dir_id']) + self.assertEqual(root_dir['root_dir_id'], parents[0]['id']) + + def test_04_insert_missing_directory_tree_if_needed_for_path_with_unknown_rootdir(self): + """ Test core method _insert_missing_directory_tree_if_needed for a path with an unknown root dir + Should raise LookupError. + """ + with store.LTAStorageDb(self.dbcreds, True) as db: + handler = IngestEventHandler(dbcreds=self.dbcreds) + + for site in db.sites(): + with self.assertRaises(LookupError) as context: + surl = site['url'] + '/fdjsalfja5h43535h3oiu/5u905u3f' + handler._insert_missing_directory_tree_if_needed(surl) + self.assertTrue('Could not find parent root dir' in context.exception.message) + + def test_05_schedule_srmurl_for_visit_for_root_dir(self): + """ Test higher level method _schedule_srmurl_for_visit for all known root dirs. + Should result in marking the dir matching the surl as being the dir which should be visited next. + """ + with store.LTAStorageDb(self.dbcreds, True) as db: + handler = IngestEventHandler(dbcreds=self.dbcreds) + + for site in db.sites(): + for root_dir in db.rootDirectoriesForSite(site['id']): + self._markAllDirectoriesRecentlyVisited() + now = datetime.utcnow() + + dir_id = root_dir['root_dir_id'] + surl = site['url'] + root_dir['dir_name'] + handler._schedule_srmurl_for_visit(surl) + + # surl was scheduled for a visit, so this dir should be the least_recent_visited_dir + site_visit_stats = db.visitStats(datetime.utcnow())[site['name']] + self.assertEqual(dir_id, site_visit_stats['least_recent_visited_dir_id']) + + # mimick a directory visit by the scraper, by setting the last visit time to now. + db.updateDirectoryLastVisitTime(dir_id, now) + + # we faked a visit, so this dir should not be the least_recent_visited_dir anymore + site_visit_stats = db.visitStats(now)[site['name']] + self.assertNotEqual(dir_id, site_visit_stats.get('least_recent_visited_dir_id')) + + def test_06_schedule_srmurl_for_visit_for_new_root_sub_dir(self): + """ Test higher level method _schedule_srmurl_for_visit for all new unknown subdirs of the known root dirs. + Should result in marking the dir matching the surl as being the dir which should be visited next. + """ + with store.LTAStorageDb(self.dbcreds, True) as db: + handler = IngestEventHandler(dbcreds=self.dbcreds) + + for site in db.sites(): + for root_dir in db.rootDirectoriesForSite(site['id']): + self._markAllDirectoriesRecentlyVisited() + now = datetime.utcnow() + + # create the subdir surl + sub_dir_name = '/foo' + sub_dir_path = root_dir['dir_name'] + sub_dir_name + surl = site['url'] + sub_dir_path + + # call the method under test + handler._schedule_srmurl_for_visit(surl) + + # surl was scheduled for a visit, all other dir's were marked as visited already... + # so there should be a new dir for this surl, and it should be the least_recent_visited_dir + site_visit_stats = db.visitStats(datetime.utcnow())[site['name']] + + least_recent_visited_dir_id = site_visit_stats.get('least_recent_visited_dir_id') + self.assertIsNotNone(least_recent_visited_dir_id) + + least_recent_visited_dir = db.directory(least_recent_visited_dir_id) + self.assertEqual(sub_dir_path, least_recent_visited_dir['dir_name']) + + # mimick a directory visit by the scraper, by setting the last visit time to now. + db.updateDirectoryLastVisitTime(least_recent_visited_dir_id, now) + + # we faked a visit, so this dir should not be the least_recent_visited_dir anymore + site_visit_stats = db.visitStats(now)[site['name']] + self.assertNotEqual(least_recent_visited_dir_id, site_visit_stats.get('least_recent_visited_dir_id')) + + def test_07_schedule_srmurl_for_visit_for_path_with_unknown_rootdir(self): + """ Test higher level method _schedule_srmurl_for_visit for a path with an unknown root dir + Should raise LookupError. + """ + with store.LTAStorageDb(self.dbcreds, True) as db: + handler = IngestEventHandler(dbcreds=self.dbcreds) + + for site in db.sites(): + with self.assertRaises(LookupError) as context: + surl = site['url'] + '/fdjsalfja5h43535h3oiu/5u905u3f' + handler._schedule_srmurl_for_visit(surl) + self.assertTrue('Could not find parent root dir' in context.exception.message) + + def test_08_integration_test_with_messagebus(self): + """ Full blown integration test listening for notifications on the bus, + and checking which dir is up for a visit next. + Needs a working local qpid broker. Test is skipped if qpid not available. + """ + try: + broker = None + connection = None + + import uuid + from threading import Event + from qpid.messaging import Connection, ConnectError + from qpidtoollibs import BrokerAgent + from lofar.messaging.messagebus import ToBus + from lofar.messaging.messages import EventMessage + from lofar.lta.ingest.common.config import DEFAULT_INGEST_NOTIFICATION_PREFIX + + # setup broker connection + connection = Connection.establish('127.0.0.1') + broker = BrokerAgent(connection) + + # add test service bus + busname = 'test-ingesteventhandler-%s' % (uuid.uuid1()) + broker.addExchange('topic', busname) + + sync_event = Event() + + class SyncedIngestEventHandler(IngestEventHandler): + """This derived IngestEventHandler behaves exactly like the normal + object under test IngestEventHandler, but it also sets a sync_event + to sync between the listener thread and this main test thread""" + def _handleMessage(self, msg): + super(SyncedIngestEventHandler, self)._handleMessage(msg) + sync_event.set() + + with SyncedIngestEventHandler(self.dbcreds, busname=busname): + with store.LTAStorageDb(self.dbcreds, True) as db: + for site in db.sites(): + for root_dir in db.rootDirectoriesForSite(site['id']): + self._markAllDirectoriesRecentlyVisited() + + # create the subdir surl + sub_dir_name = '/foo' + sub_dir_path = root_dir['dir_name'] + sub_dir_name + surl = site['url'] + sub_dir_path + + with ToBus(busname) as sender: + msg = EventMessage(context=DEFAULT_INGEST_NOTIFICATION_PREFIX+"TaskFinished", + content={'srm_url': surl}) + sender.send(msg) + + # wait for the handler to have processed the message + self.assertTrue(sync_event.wait(2)) + sync_event.clear() + + # surl should have been scheduled for a visit, all other dir's were marked as visited already... + # so there should be a new dir for this surl, and it should be the least_recent_visited_dir + site_visit_stats = db.visitStats(datetime.utcnow())[site['name']] + + least_recent_visited_dir_id = site_visit_stats.get('least_recent_visited_dir_id') + self.assertIsNotNone(least_recent_visited_dir_id) + + least_recent_visited_dir = db.directory(least_recent_visited_dir_id) + self.assertEqual(sub_dir_path, least_recent_visited_dir['dir_name']) + + except ImportError as e: + logger.warning("skipping test due to: %s", e) + except ConnectError as e: + logger.warning("skipping test due to: %s", e) + finally: + # cleanup test bus and exit + if broker: + broker.delExchange(busname) + if connection: + connection.close() + + +# run tests if main +if __name__ == '__main__': + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', + level=logging.INFO) + + unittest.main() diff --git a/LTA/ltastorageoverview/test/test_ingesteventhandler.run b/LTA/ltastorageoverview/test/test_ingesteventhandler.run new file mode 100755 index 0000000000000000000000000000000000000000..8b7d318ffffefcf07ed482a95250975ba792aa39 --- /dev/null +++ b/LTA/ltastorageoverview/test/test_ingesteventhandler.run @@ -0,0 +1,4 @@ +#!/bin/bash + +source python-coverage.sh +python_coverage_test "*ingesteventhandler*" test_ingesteventhandler.py diff --git a/LTA/ltastorageoverview/test/test_ingesteventhandler.sh b/LTA/ltastorageoverview/test/test_ingesteventhandler.sh new file mode 100755 index 0000000000000000000000000000000000000000..4f5d35a30389fe0c9cf66adc6add483dc6e32a0b --- /dev/null +++ b/LTA/ltastorageoverview/test/test_ingesteventhandler.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +./runctest.sh test_ingesteventhandler diff --git a/LTA/ltastorageoverview/test/test_lso_webservice.py b/LTA/ltastorageoverview/test/test_lso_webservice.py index 4831f3bec65e4c025873d10ff7fdb2368aefc906..c81e140777b5da48c96e59bdbd3459ff0311f8d1 100755 --- a/LTA/ltastorageoverview/test/test_lso_webservice.py +++ b/LTA/ltastorageoverview/test/test_lso_webservice.py @@ -28,21 +28,70 @@ import tempfile import urllib2 import json import datetime +import psycopg2 from StringIO import StringIO -from flask.ext.testing import LiveServerTestCase as FlaskLiveTestCase -from ltastorageoverview import store -from ltastorageoverview.webservice import webservice as webservice +import lofar.common.dbcredentials as dbc +from lofar.lta.ltastorageoverview import store +from lofar.lta.ltastorageoverview.webservice import webservice as webservice + +import logging +logger = logging.getLogger(__name__) + +try: + from flask.ext.testing import LiveServerTestCase as FlaskLiveTestCase +except ImportError as e: + print str(e) + print 'Please install python-flask-testing: sudo apt-get install python-flask-testing' + exit(3) #special lofar skip test return code + +try: + import testing.postgresql +except ImportError as e: + print str(e) + print 'Please install python package testing.test_psql: sudo pip install testing.test_psql' + exit(3) # special lofar test exit code: skipped test + +test_psql = None def setUpModule(): - tmpfile = os.path.join(tempfile.gettempdir(), 'test.sqlite') + logger.info('setting up test LTASO database server...') + + # create a test webservice.db + logger.info(' creating test postgres server') + global test_psql + test_psql = testing.postgresql.Postgresql() + dsn = test_psql.dsn() + logger.info(' created test postgres server, dsn=%s', dsn) + + dbcreds = dbc.Credentials() + dbcreds.user = 'test_user' + dbcreds.password = 'test_password' - if os.path.exists(tmpfile): - os.remove(tmpfile) + with psycopg2.connect(**dsn) as conn: + cursor = conn.cursor() + #use same user/pass as stored in local webservice.dbcreds + query = "CREATE USER %s WITH SUPERUSER PASSWORD '%s'" % (dbcreds.user, dbcreds.password) + cursor.execute(query) - webservice.db = store.LTAStorageDb(tmpfile) + create_script_path = os.path.normpath(os.path.join(os.environ['LOFARROOT'], 'share', 'ltaso', 'create_db_ltastorageoverview.sql')) + logger.info(' running ltaso create script create_script=%s', create_script_path) + with open(create_script_path, 'r') as script: + cursor.execute(script.read()) + logger.info(' completed ltaso create script') - webservice.db.insertSite('siteA', 'srm://siteA.org') - webservice.db.insertSite('siteB', 'srm://siteB.org') + # copy the test postgres server settings into webservice.dbcreds + # we can use these webservice.dbcreds in each test method to connect to the testing ltaso database + dbcreds.host = dsn['host'] + dbcreds.database = dsn['database'] + dbcreds.port = dsn['port'] + + logger.info('finished setting up test LTASO database') + + webservice.db = store.LTAStorageDb(dbcreds, True) + + logger.info('filling test LTASO database with test data') + webservice.db.insertSiteIfNotExists('siteA', 'srm://siteA.org') + webservice.db.insertSiteIfNotExists('siteB', 'srm://siteB.org') rootDir_ids = [] rootDir_ids.append(webservice.db.insertRootDirectory('siteA', 'rootDir1')) @@ -51,20 +100,24 @@ def setUpModule(): for rootDir_id in rootDir_ids: for j in range(2): - subDir_id = webservice.db.insertSubDirectory(rootDir_id, 'subDir_%d' % j) + subDir_id = webservice.db.insertSubDirectory('subDir_%d' % j, rootDir_id) if j == 0: - webservice.db.insertFileInfo('file_%d' % j, 271*(j+1), datetime.datetime.utcnow(), subDir_id) + webservice.db.insertFileInfo('file_%d' % j, 271 * (j + 1), datetime.datetime.utcnow(), subDir_id) for k in range(2): - subsubDir_id = webservice.db.insertSubDirectory(subDir_id, 'subsubDir_%d' % k) + subsubDir_id = webservice.db.insertSubDirectory('subsubDir_%d' % k, subDir_id) - for l in range((j+1)*(k+1)): - webservice.db.insertFileInfo('file_%d' % l, 314*(l+1), datetime.datetime.utcnow(), subsubDir_id) + for l in range((j + 1) * (k + 1)): + webservice.db.insertFileInfo('file_%d' % l, 314 * (l + 1), datetime.datetime.utcnow(), subsubDir_id) + + logger.info('finished filling test LTASO database with test data') def tearDownModule(): - if os.path.exists(webservice.db.db_filename): - os.remove(webservice.db.db_filename) + logger.info('removing test LTASO database server...') + test_psql.stop() + logger.info('removed test LTASO database server') + class TestLTAStorageWebService(FlaskLiveTestCase): def create_app(self): @@ -108,15 +161,16 @@ class TestLTAStorageWebService(FlaskLiveTestCase): rootDirectories = content['rootDirectories'] self.assertEqual(3, len(rootDirectories)) - rootDirsDict = dict([(x['name'], x) for x in rootDirectories]) + rootDirsDict = dict([(x['dir_name'], x) for x in rootDirectories]) self.assertEqual('siteA', rootDirsDict['rootDir1']['site_name']) self.assertEqual('siteA', rootDirsDict['rootDir2']['site_name']) self.assertEqual('siteB', rootDirsDict['path/to/rootDir3']['site_name']) - def main(argv): + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', + level=logging.INFO) unittest.main() # run tests if main diff --git a/LTA/ltastorageoverview/test/test_scraper.py b/LTA/ltastorageoverview/test/test_scraper.py new file mode 100755 index 0000000000000000000000000000000000000000..ce7d1ff18897b6828aee55698c9787ba9d7c7adb --- /dev/null +++ b/LTA/ltastorageoverview/test/test_scraper.py @@ -0,0 +1,55 @@ +#!/usr/bin/python + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id$ + +import logging + +from common_test_ltastoragedb import * +from lofar.lta.ltastorageoverview import scraper + +logger = logging.getLogger(__name__) + +class TestLocation(unittest.TestCase): + def test_isRoot(self): + loc = scraper.Location('srm://srm.grid.sara.nl:8443', '/foo/bar') + self.assertFalse(loc.isRoot()) + + loc = scraper.Location('srm://srm.grid.sara.nl:8443', '/') + self.assertTrue(loc.isRoot()) + + def test_malformed_location(self): + with self.assertRaises(ValueError) as context: + scraper.Location('http://astron.nl', '/foo/bar') + self.assertTrue('malformed srm url' in str(context.exception)) + + with self.assertRaises(ValueError) as context: + scraper.Location('srm://srm.grid.sara.nl:8443', 'some_dir_name') + self.assertTrue('malformed directory' in str(context.exception)) + + +class TestScraper(CommonLTAStorageDbTest): + pass + +# run tests if main +if __name__ == '__main__': + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', + level=logging.INFO) + + unittest.main() diff --git a/LTA/ltastorageoverview/test/test_scraper.run b/LTA/ltastorageoverview/test/test_scraper.run new file mode 100755 index 0000000000000000000000000000000000000000..b47e4dcc95958dcd57c07bc6eec3b5085de62ea1 --- /dev/null +++ b/LTA/ltastorageoverview/test/test_scraper.run @@ -0,0 +1,4 @@ +#!/bin/bash + +source python-coverage.sh +python_coverage_test "ltas*" test_scraper.py diff --git a/LTA/ltastorageoverview/test/test_scraper.sh b/LTA/ltastorageoverview/test/test_scraper.sh new file mode 100755 index 0000000000000000000000000000000000000000..66ce2e9f7a39c004d9ab8b955db0d59078a7bb2d --- /dev/null +++ b/LTA/ltastorageoverview/test/test_scraper.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +./runctest.sh test_scraper diff --git a/LTA/ltastorageoverview/test/test_store.py b/LTA/ltastorageoverview/test/test_store.py index 30b08fa0325956534ad5ca7da396c61c888f55ad..358bef808d798ebf003bd205c89e7f4dc958e2d8 100755 --- a/LTA/ltastorageoverview/test/test_store.py +++ b/LTA/ltastorageoverview/test/test_store.py @@ -19,113 +19,233 @@ # $Id$ -import unittest -import datetime +from datetime import datetime import time -import os -import os.path -import tempfile -from ltastorageoverview import store +from pprint import pformat +from common_test_ltastoragedb import * +from lofar.lta.ltastorageoverview import store +from lofar.common.postgres import FETCH_ALL -class TestLTAStorageDb(unittest.TestCase): - def setUp(self): - tmpfile = os.path.join(tempfile.gettempdir(), 'test.sqlite') - self.db = store.LTAStorageDb(tmpfile, True) +import logging +logger = logging.getLogger(__name__) - self.assertTrue(os.path.exists(self.db.db_filename)) - - #def tearDown(self): - #if os.path.exists(self.db.db_filename): - #os.remove(self.db.db_filename) +class TestLTAStorageDb(CommonLTAStorageDbTest): def testSites(self): - self.db.insertSite('siteA', 'srm://siteA.org') - self.db.insertSite('siteB', 'srm://siteB.org') + with store.LTAStorageDb(self.dbcreds, True) as db: + siteA_id = db.insertSiteIfNotExists('siteA', 'srm://siteA.org') + siteB_id = db.insertSiteIfNotExists('siteB', 'srm://siteB.org') - sites = self.db.sites() - siteNames = [x[1] for x in sites] - self.assertEquals(2, len(siteNames)) - self.assertTrue('siteA' in siteNames) - self.assertTrue('siteB' in siteNames) + sites = db.sites() + siteNames = [x['name'] for x in sites] + self.assertEquals(2, len(siteNames)) + self.assertTrue('siteA' in siteNames) + self.assertTrue('siteB' in siteNames) - site = self.db.site(1) - self.assertTrue('siteA' in site[1]) + site = db.site(siteA_id) + self.assertEqual('siteA', site['name']) - site = self.db.site(2) - self.assertTrue('siteB' in site[1]) + site = db.site(siteB_id) + self.assertEqual('siteB', site['name']) def testRootDirs(self): - siteA_id = self.db.insertSite('siteA', 'srm://siteA.org') - siteB_id = self.db.insertSite('siteB', 'srm://siteB.org') - - dirA1_id = self.db.insertRootDirectory('siteA', 'rootDir1') - dirA2_id = self.db.insertRootDirectory('siteA', 'rootDir2') - dirA3_id = self.db.insertRootDirectory('siteA', 'path/to/rootDir3') - - dirB1_id = self.db.insertRootDirectory('siteB', 'rootDir1') - dirB2_id = self.db.insertRootDirectory('siteB', 'path/to/otherRootDir') - - rootDirs = self.db.rootDirectories() - self.assertEquals(5, len(rootDirs)) - self.assertTrue((dirA1_id, 'rootDir1', siteA_id, 'siteA') in rootDirs) - self.assertTrue((dirA2_id, 'rootDir2', siteA_id, 'siteA') in rootDirs) - self.assertTrue((dirA3_id, 'path/to/rootDir3', siteA_id, 'siteA') in rootDirs) - self.assertTrue((dirB1_id, 'rootDir1', siteB_id, 'siteB') in rootDirs) - self.assertTrue((dirB2_id, 'path/to/otherRootDir', siteB_id, 'siteB') in rootDirs) + with store.LTAStorageDb(self.dbcreds, True) as db: + siteA_id = db.insertSiteIfNotExists('siteA', 'srm://siteA.org') + siteB_id = db.insertSiteIfNotExists('siteB', 'srm://siteB.org') - def testDirectoryTrees(self): - siteA_id = self.db.insertSite('siteA', 'srm://siteA.org') - siteB_id = self.db.insertSite('siteB', 'srm://siteB.org') + dirA1_id = db.insertRootDirectory('siteA', 'rootDir1') + dirA2_id = db.insertRootDirectory('siteA', 'rootDir2') + dirA3_id = db.insertRootDirectory('siteA', 'path/to/rootDir3') - for i in range(2): - rootDir_id = self.db.insertRootDirectory('siteA', 'rootDir_%d' % i) + dirB1_id = db.insertRootDirectory('siteB', 'rootDir1') + dirB2_id = db.insertRootDirectory('siteB', 'path/to/otherRootDir') - for j in range(2): - subDir_id = self.db.insertSubDirectory(rootDir_id, 'subDir_%d' % j) - self.db.insertFileInfo('file_%d' % j, 271*(j+1), datetime.datetime.utcnow(), subDir_id) + rootDirs = db.rootDirectories() + self.assertEquals(5, len(rootDirs)) - for k in range(2): - subsubDir_id = self.db.insertSubDirectory(subDir_id, 'subsubDir_%d' % k) - self.db.insertFileInfo('file_%d_%d' % (j,k), 314*(k+1), datetime.datetime.utcnow(), subsubDir_id) + rootDirsDict = {rd['root_dir_id']:rd for rd in rootDirs} - rootDirs = self.db.rootDirectories() - self.assertEquals(2, len(rootDirs)) + self.assertEqual('rootDir1', rootDirsDict[dirA1_id]['dir_name']) + self.assertEqual(siteA_id, rootDirsDict[dirA1_id]['site_id']) + self.assertEqual('siteA', rootDirsDict[dirA1_id]['site_name']) - for (id, name, site_id, site_name) in rootDirs: - subDirs = self.db.subDirectories(id, 1, False) - for subDir in subDirs: - subDir_parent_id = subDir[2] - self.assertEquals(id, subDir_parent_id) + self.assertEqual('rootDir2', rootDirsDict[dirA2_id]['dir_name']) + self.assertEqual(siteA_id, rootDirsDict[dirA2_id]['site_id']) + self.assertEqual('siteA', rootDirsDict[dirA2_id]['site_name']) - print '\n'.join([str(x) for x in self.db.filesInTree(rootDir_id)]) + self.assertEqual('path/to/rootDir3', rootDirsDict[dirA3_id]['dir_name']) + self.assertEqual(siteA_id, rootDirsDict[dirA3_id]['site_id']) + self.assertEqual('siteA', rootDirsDict[dirA3_id]['site_name']) - def testLeastRecentlyVisitedDirectory(self): - siteA_id = self.db.insertSite('siteA', 'srm://siteA.org') - - dir_ids = [] - for i in range(3): - dir_id = self.db.insertRootDirectory('siteA', 'rootDir_%d' % i) - dir_ids.append(dir_id) + self.assertEqual('rootDir1', rootDirsDict[dirB1_id]['dir_name']) + self.assertEqual(siteB_id, rootDirsDict[dirB1_id]['site_id']) + self.assertEqual('siteB', rootDirsDict[dirB1_id]['site_name']) - self.db.updateDirectoryLastVisitTime(dir_id, datetime.datetime.utcnow()) - time.sleep(0.002) + self.assertEqual('path/to/otherRootDir', rootDirsDict[dirB2_id]['dir_name']) + self.assertEqual(siteB_id, rootDirsDict[dirB2_id]['site_id']) + self.assertEqual('siteB', rootDirsDict[dirB2_id]['site_name']) - visitStats = self.db.visitStats() - self.assertTrue('siteA' in visitStats) - self.assertTrue('least_recent_visited_dir_id' in visitStats['siteA']) + root_dir_ids_siteA = set(d['root_dir_id'] for d in db.rootDirectoriesForSite(siteA_id)) + self.assertEqual(set([dirA1_id, dirA2_id, dirA3_id]), root_dir_ids_siteA) - lvr_dir_id = visitStats['siteA']['least_recent_visited_dir_id'] - self.assertEquals(dir_ids[0], lvr_dir_id) + root_dir_ids_siteB = set(d['root_dir_id'] for d in db.rootDirectoriesForSite(siteB_id)) + self.assertEqual(set([dirB1_id, dirB2_id]), root_dir_ids_siteB) - self.db.updateDirectoryLastVisitTime(dir_ids[0], datetime.datetime.utcnow()) - self.db.updateDirectoryLastVisitTime(dir_ids[1], datetime.datetime.utcnow()) + root_dirs_non_existing_site = db.rootDirectoriesForSite(999) + self.assertEqual([], root_dirs_non_existing_site) - visitStats = self.db.visitStats() - lvr_dir_id = visitStats['siteA']['least_recent_visited_dir_id'] - self.assertEquals(dir_ids[2], lvr_dir_id) + def testNonExistingDir(self): + with store.LTAStorageDb(self.dbcreds, True) as db: + dir = db.directoryByName('fjsdka;58432aek5843rfsjd8-sa') + self.assertEqual(None, dir) + def testLeastRecentlyVisitedDirectory(self): + with store.LTAStorageDb(self.dbcreds, True) as db: + db.insertSiteIfNotExists('siteA', 'srm://siteA.org') + + dir_ids = [] + for i in range(3): + dir_id = db.insertRootDirectory('siteA', 'rootDir_%d' % i) + dir_ids.append(dir_id) + + db.updateDirectoryLastVisitTime(dir_id, datetime.utcnow()) + time.sleep(0.002) + + visitStats = db.visitStats() + self.assertTrue('siteA' in visitStats) + self.assertTrue('least_recent_visited_dir_id' in visitStats['siteA']) + + lvr_dir_id = visitStats['siteA']['least_recent_visited_dir_id'] + self.assertEquals(dir_ids[0], lvr_dir_id) + + db.updateDirectoryLastVisitTime(dir_ids[0], datetime.utcnow()) + db.updateDirectoryLastVisitTime(dir_ids[1], datetime.utcnow()) + + visitStats = db.visitStats() + lvr_dir_id = visitStats['siteA']['least_recent_visited_dir_id'] + self.assertEquals(dir_ids[2], lvr_dir_id) + + def testDuplicateSubDirs(self): + with store.LTAStorageDb(self.dbcreds, True) as db: + db.insertSiteIfNotExists('siteA', 'srm://siteA.org') + db.insertSiteIfNotExists('siteB', 'srm://siteB.org') + + dirA_id = db.insertRootDirectory('siteA', 'rootDir1') + dirB_id = db.insertRootDirectory('siteB', 'rootDir1') + + subDirA1_id = db.insertSubDirectory('foo', dirA_id) + subDirA2_id = db.insertSubDirectory('bar', dirA_id) + subDirB1_id = db.insertSubDirectory('foo', dirB_id) + + self.assertNotEquals(None, subDirA1_id) + self.assertNotEquals(None, subDirA2_id) + self.assertNotEquals(None, subDirB1_id) + + subDirA1a_id = db.insertSubDirectory('foo', dirA_id) + self.assertEquals(None, subDirA1a_id) + + def _fill_test_db_with_sites_and_root_dirs(self, db): + """ + helper method to fill empty database with simple sites and root dirs + """ + db.insertSiteIfNotExists('siteA', 'srm://siteA.foo.bar:8443') + db.insertSiteIfNotExists('siteB', 'srm://siteB.foo.bar:8443') + + db.insertRootDirectory('siteA', '/root_dir_1') + db.insertRootDirectory('siteA', '/root_dir_2') + db.insertRootDirectory('siteA', '/long/path/to/root_dir_3') + db.insertRootDirectory('siteB', '/root_dir_1') + + + def test_insert_missing_directory_tree_if_needed(self): + """ Test core method _insertMissingDirectoryTreeIfNeeded for all known root dirs. + Should result in new directory entries in the database for the new sub directories only. + """ + with store.LTAStorageDb(self.dbcreds, True) as db: + self._fill_test_db_with_sites_and_root_dirs(db) + + for site in db.sites(): + site_id = site['id'] + for root_dir in db.rootDirectoriesForSite(site_id): + root_dir_path = root_dir['dir_name'] + + # root dir should already exist + dir = db.directoryByName(root_dir_path, site_id) + self.assertIsNotNone(dir) + + # insert the not-so-missing root dir. + # nothing should happen, because the root dir already exists + new_dir_ids = db.insert_missing_directory_tree_if_needed(root_dir_path, site_id) + self.assertEqual(0, len(new_dir_ids)) + + # now insert some new subdirs, with multiple levels. + for subdir_path in ['/foo', '/bar/xyz']: + dir_path = root_dir_path + subdir_path + # dir should not exist yet + self.assertIsNone(db.directoryByName(dir_path, site_id)) + + # let the handler insert the missing dirs. + db.insert_missing_directory_tree_if_needed(dir_path, site_id) + + # dir should exist now + dir = db.directoryByName(dir_path, site_id) + self.assertIsNotNone(dir) + + # check if new dir has expected root dir + parents = db.parentDirectories(dir['dir_id']) + self.assertEqual(root_dir['root_dir_id'], parents[0]['id']) + + def test_insert_missing_directory_tree_if_needed_for_path_with_unknown_rootdir(self): + """ Test core method _insertMissingDirectoryTreeIfNeeded for a path with an unknown root dir + Should raise LookupError. + """ + with store.LTAStorageDb(self.dbcreds, True) as db: + self._fill_test_db_with_sites_and_root_dirs(db) + + for site in db.sites(): + site_id = site['id'] + with self.assertRaises(LookupError) as context: + incorrect_dir_path = '/fdjsalfja5h43535h3oiu/5u905u3f' + db.insert_missing_directory_tree_if_needed(incorrect_dir_path, site_id) + self.assertTrue('Could not find parent root dir' in context.exception.message) + + def testProjectsAndObservations(self): + with store.LTAStorageDb(self.dbcreds, True) as db: + #first insert a lot of data... + db.insertSiteIfNotExists('juelich', 'srm://lofar-srm.fz-juelich.de:8443') + db.insertSiteIfNotExists('sara', 'srm://srm.grid.sara.nl:8443') + + juelich_root_dir_id = db.insertRootDirectory('juelich', '/pnfs/fz-juelich.de/data/lofar/ops/') + sara_root_dir_id = db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/ops') + + juelich_projects_dir_id = db.insertSubDirectory('/pnfs/fz-juelich.de/data/lofar/ops/projects', juelich_root_dir_id) + sara_projects_dir_id = db.insertSubDirectory('/pnfs/grid.sara.nl/data/lofar/ops/projects', sara_root_dir_id) + + for project_nr, project_name in enumerate(['lc8_001', '2017lofarobs', 'ddt5_001']): + # projects are sometimes stored at multiple sites + for projects_dir_id in [juelich_projects_dir_id, sara_projects_dir_id]: + project_dir_id = db.insertSubDirectory('/pnfs/fz-juelich.de/data/lofar/ops/projects/' + project_name, + projects_dir_id) + for obs_nr in range(3): + obs_name = 'L%06d' % ((project_nr+1)*1000 + obs_nr) + obs_dir_id = db.insertSubDirectory('/pnfs/fz-juelich.de/data/lofar/ops/projects/' + project_name + '/' + obs_name, + project_dir_id) + + for sb_nr in range(244): + file_name = '%s_SB%03d.MS.tar' % (obs_name, sb_nr) + db.insertFileInfo(file_name, 1, datetime.utcnow(), obs_dir_id, False) + db.commit() + + # then check the results + # TODO check the results + logger.info(pformat(db.executeQuery('select * from metainfo.project_directory', fetch=FETCH_ALL))) + logger.info(pformat(db.executeQuery('select * from metainfo.project_stats', fetch=FETCH_ALL))) + logger.info(pformat(db.executeQuery('select * from metainfo.project_observation_dataproduct', fetch=FETCH_ALL))) # run tests if main if __name__ == '__main__': + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', + level=logging.INFO) + unittest.main() diff --git a/LTA/ltastorageoverview/test/test_store.run b/LTA/ltastorageoverview/test/test_store.run index b2e574673fcffe8730b24a19d2b3ccd8ba1a5be7..952bff78ec3045c9679e29a7b3b29d5ecefacc4c 100755 --- a/LTA/ltastorageoverview/test/test_store.run +++ b/LTA/ltastorageoverview/test/test_store.run @@ -1,3 +1,4 @@ #!/bin/bash -python test_store.py +source python-coverage.sh +python_coverage_test "ltas*" test_store.py diff --git a/LTA/sip/CMakeLists.txt b/LTA/sip/CMakeLists.txt index 53622add96610abbb6ec6926dee9b9cbf28a27cd..2fbdfba82df502eac38b8e877e3bb81f91dd95ee 100644 --- a/LTA/sip/CMakeLists.txt +++ b/LTA/sip/CMakeLists.txt @@ -1,7 +1,7 @@ # $Id: CMakeLists.txt 32985 2015-11-26 11:10:57Z schaap $ lofar_find_package(Python 2.7 REQUIRED) -lofar_package(sip 0.1 DEPENDS PyCommon) +lofar_package(sip 0.1 DEPENDS PyCommon LTACommon) include(PythonInstall) diff --git a/LTA/sip/lib/CMakeLists.txt b/LTA/sip/lib/CMakeLists.txt index f0c9f79189c61ae6603d1e58f71c2b651856ed5d..c6b1cdbe38e21439d36b3ea169f8a9e792c0a30f 100644 --- a/LTA/sip/lib/CMakeLists.txt +++ b/LTA/sip/lib/CMakeLists.txt @@ -17,7 +17,6 @@ set(_py_files set(resource_files station_coordinates.conf - LTA-SIP.xsd ) diff --git a/LTA/sip/lib/query.py b/LTA/sip/lib/query.py index c00b253c1d03d4e2fdfbdff9081e419cc4ae3952..2c10f9b9f5402629bfeb0ac6553777af11e7b0ea 100644 --- a/LTA/sip/lib/query.py +++ b/LTA/sip/lib/query.py @@ -3,7 +3,7 @@ import urllib import requests -from os.path import expanduser +from os.path import expanduser, exists import xml.etree.ElementTree as ET import xmlrpclib import uuid @@ -15,6 +15,13 @@ passw = None #host = "lta-ingest-test.lofar.eu:19443" host = "lofar-ingest.target.rug.nl:9443" +if not exists(path): + # write default file + with open(path, 'w') as file: + file.write("user=\n") + file.write("password=\n") + file.write("host=\n") + with open(path,'r') as file: print "Parsing user credentials from",path for line in file: diff --git a/LTA/sip/lib/validator.py b/LTA/sip/lib/validator.py index 095063e8ddc6708284649739f8b3e63c3e2fed24..6a60919f6a942e1f82be370c0404b276adaac8d3 100644 --- a/LTA/sip/lib/validator.py +++ b/LTA/sip/lib/validator.py @@ -6,7 +6,9 @@ import ltasip d = os.path.dirname(os.path.realpath(__file__)) XSDPATH = d+"/LTA-SIP.xsd" -def validate(xmlpath, xsdpath=XSDPATH): +DEFAULT_SIP_XSD_PATH = os.path.join(os.environ.get('LOFARROOT', '/opt/lofar'), 'etc', 'lta', 'LTA-SIP.xsd') + +def validate(xmlpath, xsdpath=DEFAULT_SIP_XSD_PATH): '''validates given xml file against given xsd file''' print "validating", xmlpath, "against", xsdpath @@ -103,7 +105,7 @@ def main(xml): try: xml = xml - xsd = XSDPATH + xsd = DEFAULT_SIP_XSD_PATH valid = validate(xml, xsd) consistent = check_consistency(xml) return valid and consistent diff --git a/LTA/sip/test/test_feedback.py b/LTA/sip/test/test_feedback.py index 07f0158cd2ad04c466393b2fefd13e2ec7a3e5a1..4e7994e9eaea2b63ae7369afe9af0f3f3070040c 100755 --- a/LTA/sip/test/test_feedback.py +++ b/LTA/sip/test/test_feedback.py @@ -19,6 +19,13 @@ # $Id: $ +try: + import pyxb +except ImportError as e: + print str(e) + print 'Please install python package pyxb: sudo apt-get install python-pyxb' + exit(3) # special lofar test exit code: skipped test + import unittest from lofar.lta.sip import siplib from lofar.lta.sip import validator diff --git a/LTA/sip/test/test_siplib.py b/LTA/sip/test/test_siplib.py index dbc121391f743bcb933f497442bf99ef9c679c24..4b4ceef82b19b83500a91f16e97f72163766f02b 100755 --- a/LTA/sip/test/test_siplib.py +++ b/LTA/sip/test/test_siplib.py @@ -20,6 +20,14 @@ # $Id: $ import unittest + +try: + import pyxb +except ImportError as e: + print str(e) + print 'Please install python package pyxb: sudo apt-get install python-pyxb' + exit(3) # special lofar test exit code: skipped test + from lofar.lta.sip import siplib from lofar.lta.sip import validator from lofar.lta.sip import constants diff --git a/LTA/sip/test/test_validator.py b/LTA/sip/test/test_validator.py index 33077d7e7acb6c088666794bcb8d6e1422557404..cc678d9fa1507be90afbe1e885475b270d795c92 100644 --- a/LTA/sip/test/test_validator.py +++ b/LTA/sip/test/test_validator.py @@ -19,6 +19,13 @@ # $Id: $ +try: + import pyxb +except ImportError as e: + print str(e) + print 'Please install python package pyxb: sudo apt-get install python-pyxb' + exit(3) # special lofar test exit code: skipped test + import unittest from lofar.lta.sip import validator diff --git a/LTA/sip/test/test_visualizer.py b/LTA/sip/test/test_visualizer.py index 352077c4eaa6c906164c4d623d007d2bda4603b1..6ac71ab3487b7d712323cf2919c8f462460e13d0 100755 --- a/LTA/sip/test/test_visualizer.py +++ b/LTA/sip/test/test_visualizer.py @@ -19,6 +19,13 @@ # $Id: $ +try: + import pyxb +except ImportError as e: + print str(e) + print 'Please install python package pyxb: sudo apt-get install python-pyxb' + exit(3) # special lofar test exit code: skipped test + import unittest import time import os diff --git a/MAC/APL/APLCommon/src/swlevel b/MAC/APL/APLCommon/src/swlevel index 29946e869d3f319cbce23feeea30e77e7e059a50..724411e57847eaab6e83d4718b9b1c72551be221 100755 --- a/MAC/APL/APLCommon/src/swlevel +++ b/MAC/APL/APLCommon/src/swlevel @@ -549,7 +549,8 @@ goto_level() echo "set rcumode to 0 (power save)" timeout 5 $BINDIR/rspctl --rcumode=0 1>/dev/null 2>&1 # Wait for setting to take effect before killing RSPDriver - sleep 2 + # 3 seconds is menno's 'golden rule', less than 3 seconds causes issues sometimes + sleep 3 else echo "Beware: NOT going to rcumode 0 as images are still being initialized" fi diff --git a/MAC/APL/PAC/ITRFBeamServer/src/CMakeLists.txt b/MAC/APL/PAC/ITRFBeamServer/src/CMakeLists.txt index 45bf2de6d4fc2d9c5fbcb0ee051872574701ce69..e3e6453a0ea52d43380313da017c72022b407821 100644 --- a/MAC/APL/PAC/ITRFBeamServer/src/CMakeLists.txt +++ b/MAC/APL/PAC/ITRFBeamServer/src/CMakeLists.txt @@ -25,3 +25,6 @@ install(FILES ${CMAKE_CURRENT_BINARY_DIR}/BeamServer.conf ${CMAKE_CURRENT_BINARY_DIR}/beamctl.conf DESTINATION etc) + +lofar_add_sysconf_files(beamctl.log_prop) + diff --git a/MAC/APL/PAC/ITRFBeamServer/src/beamctl.cc b/MAC/APL/PAC/ITRFBeamServer/src/beamctl.cc index f33330f7c6454035a28508113ce3e7a55d34db0e..e75394fa83ada2c332b8cd343285bb22eeed81f0 100644 --- a/MAC/APL/PAC/ITRFBeamServer/src/beamctl.cc +++ b/MAC/APL/PAC/ITRFBeamServer/src/beamctl.cc @@ -528,7 +528,7 @@ void beamctl::usage() const " beamctl <rcuspec> <anapointing> [<anapointing> ...] [<dataspec> <digpointing> [<digpointing> ...]] FOR HBA ANTENNAS\n" " beamctl --calinfo\n" "where:\n" - " <rcuspec> = --antennaset --rcus --band \n" + " <rcuspec> = --antennaset --rcus --band (or --antennaset --rcus --rcumode)\n" " <dataspec> = --subbands --beamlets \n" " <digpointing> = --digdir \n" " <anapointing> = --anadir \n" @@ -550,7 +550,10 @@ void beamctl::usage() const " # SKYSCAN will scan the sky with a L x M grid in the (l,m) plane\n" " --anadir=longitude,latitude,type[,duration]\n" " # direction of the analogue HBA beam\n" - " --rcumode=0..7 # OBSOLETE, RCU mode to use (may not conflict with antennaset)\n" + " --rcumode=0..7 # Old-style RCU mode to use (DEPRECATED; only available for\n" + " compatibility with existing scripts. Please use antenna-\n" + " set + band selection. The rcumode selected here must not\n" + " conflict with the selected antennaset)\n" " --help # print this usage\n" "\n" "The order of the arguments is trivial.\n" diff --git a/MAC/APL/PAC/ITRFBeamServer/src/beamctl.log_prop b/MAC/APL/PAC/ITRFBeamServer/src/beamctl.log_prop new file mode 100644 index 0000000000000000000000000000000000000000..e0137d61002e5aace460dff3cb509012251c249e --- /dev/null +++ b/MAC/APL/PAC/ITRFBeamServer/src/beamctl.log_prop @@ -0,0 +1,48 @@ +# +# Adapted log_prop file for beamctl. All runs of beamctl will add logging to +# an existing Rolling logfile, instead of creating a new one each run. +# As the buffer was limited, many older files were overwritten which +# made debugging or finding out what happened in case of a problem impossible. +# +# setup the right levels for logging and tracing +# +# Note: In production you don't want to loose any information so a daily rolling-file +# is used and tracing is switched off. +# For debugging purposes the daily rolling-file becomes too large so a size-based +# rolling file is used and tracing is switched on. +# +# Please do not change the logger lines below, only comment them in or out. + +# For PRODUCTION: +# - Select the appropriate log-level for the rootLogger (DEBUG or INFO) +# - Leave the TRC logger on DEBUG,DUMP +# - Comment out the rootLogger and the TRC logger in the TEST section of this file +#log4cplus.rootLogger=DEBUG, DAILYFILE +log4cplus.rootLogger=INFO, ROLFILE +log4cplus.logger.TRC=DEBUG, DUMP + +# For TESTING: +# - Select the appropriate trace level for the TRC logger +# - Leave the rootLogger on DEBUG, ROLFILE +# - Comment out the rootLogger and the TRC logger in the PRODUCTION section of this file +#log4cplus.rootLogger=DEBUG, ROLFILE +#log4cplus.logger.TRC=TRACE5, ROLFILE + + +# The next line should always be active. +log4cplus.additivity.TRC=FALSE + +# Definitions of the output channels +log4cplus.appender.STDOUT=log4cplus::ConsoleAppender +log4cplus.appender.STDOUT.layout=log4cplus::PatternLayout +log4cplus.appender.STDOUT.layout.ConversionPattern=%D{%Y-%m-%d %H:%M:%S.%q} %-5p %c{3} - %m [%.25l]%n + +log4cplus.appender.ROLFILE=log4cplus::RollingFileAppender +log4cplus.appender.ROLFILE.File=${LOFARROOT}/var/log/${LOG4CPLUS_LOGFILENAME}.log +log4cplus.appender.ROLFILE.MaxFileSize=10MB +log4cplus.appender.ROLFILE.MaxBackupIndex=999 +log4cplus.appender.ROLFILE.layout=log4cplus::PatternLayout +log4cplus.appender.ROLFILE.layout.ConversionPattern=%x %D{%Y-%m-%d %H:%M:%S.%q} %-5p %c{3} - %m [%.25l]%n + +log4cplus.appender.DUMP=log4cplus::NullAppender + diff --git a/MAC/APL/PIC/RSP_Driver/src/CMakeLists.txt b/MAC/APL/PIC/RSP_Driver/src/CMakeLists.txt index 9c6cb265b4d8db28a22ff063ba9f13f5cd103b7e..55239b312398d59904f96973281a3d06a69ebf10 100644 --- a/MAC/APL/PIC/RSP_Driver/src/CMakeLists.txt +++ b/MAC/APL/PIC/RSP_Driver/src/CMakeLists.txt @@ -128,3 +128,5 @@ lofar_add_bin_program(RSPDriver SDOWrite.cc SDORead.cc) +lofar_add_sysconf_files(rspctl.log_prop) + diff --git a/MAC/APL/PIC/RSP_Driver/src/rspctl.log_prop b/MAC/APL/PIC/RSP_Driver/src/rspctl.log_prop new file mode 100644 index 0000000000000000000000000000000000000000..ee871ac86a28ebc40f87d1eed5c5b02c6c1142c6 --- /dev/null +++ b/MAC/APL/PIC/RSP_Driver/src/rspctl.log_prop @@ -0,0 +1,49 @@ +# +# Adapted log_prop file for rspctl. All runs of rspctl will add logging to +# an existing Rolling logfile, instead of creating a new one each run. +# As rspctl is often called during observations, this would create too many +# instances of rspctl.log files, and many older files were overwritten which +# made debugging or finding out what happened in case of a problem impossible. +# +# setup the right levels for logging and tracing +# +# Note: In production you don't want to loose any information so a daily rolling-file +# is used and tracing is switched off. +# For debugging purposes the daily rolling-file becomes too large so a size-based +# rolling file is used and tracing is switched on. +# +# Please do not change the logger lines below, only comment them in or out. + +# For PRODUCTION: +# - Select the appropriate log-level for the rootLogger (DEBUG or INFO) +# - Leave the TRC logger on DEBUG,DUMP +# - Comment out the rootLogger and the TRC logger in the TEST section of this file +#log4cplus.rootLogger=DEBUG, DAILYFILE +log4cplus.rootLogger=INFO, ROLFILE +log4cplus.logger.TRC=DEBUG, DUMP + +# For TESTING: +# - Select the appropriate trace level for the TRC logger +# - Leave the rootLogger on DEBUG, ROLFILE +# - Comment out the rootLogger and the TRC logger in the PRODUCTION section of this file +#log4cplus.rootLogger=DEBUG, ROLFILE +#log4cplus.logger.TRC=TRACE5, ROLFILE + + +# The next line should always be active. +log4cplus.additivity.TRC=FALSE + +# Definitions of the output channels +log4cplus.appender.STDOUT=log4cplus::ConsoleAppender +log4cplus.appender.STDOUT.layout=log4cplus::PatternLayout +log4cplus.appender.STDOUT.layout.ConversionPattern=%D{%Y-%m-%d %H:%M:%S.%q} %-5p %c{3} - %m [%.25l]%n + +log4cplus.appender.ROLFILE=log4cplus::RollingFileAppender +log4cplus.appender.ROLFILE.File=${LOFARROOT}/var/log/${LOG4CPLUS_LOGFILENAME}.log +log4cplus.appender.ROLFILE.MaxFileSize=10MB +log4cplus.appender.ROLFILE.MaxBackupIndex=999 +log4cplus.appender.ROLFILE.layout=log4cplus::PatternLayout +log4cplus.appender.ROLFILE.layout.ConversionPattern=%x %D{%Y-%m-%d %H:%M:%S.%q} %-5p %c{3} - %m [%.25l]%n + +log4cplus.appender.DUMP=log4cplus::NullAppender + diff --git a/MAC/APL/PIC/TBB_Driver/src/CMakeLists.txt b/MAC/APL/PIC/TBB_Driver/src/CMakeLists.txt index 823d72f203dc82a9c44b1ea37d8fc53651aeab31..f6cca805155ff444d82e2481af59008bd8e90b1e 100644 --- a/MAC/APL/PIC/TBB_Driver/src/CMakeLists.txt +++ b/MAC/APL/PIC/TBB_Driver/src/CMakeLists.txt @@ -77,4 +77,4 @@ target_link_libraries(tbbdriver tp_protocol) lofar_add_bin_program(versiontbb_driver versiontbb_driver.cc) lofar_add_bin_program(tbbctl tbbctl.cc) lofar_add_bin_program(TBBDriver TBBDriver.cc) - +lofar_add_sysconf_files(tbbctl.log_prop) diff --git a/MAC/APL/PIC/TBB_Driver/src/tbbctl.log_prop b/MAC/APL/PIC/TBB_Driver/src/tbbctl.log_prop new file mode 100644 index 0000000000000000000000000000000000000000..96fbb859d92924752bbef542b8a41f3cdf873a88 --- /dev/null +++ b/MAC/APL/PIC/TBB_Driver/src/tbbctl.log_prop @@ -0,0 +1,49 @@ +# +# Adapted log_prop file for tbbctl. All runs of tbbctl will add logging to +# an existing Rolling logfile, instead of creating a new one each run. +# As tbbctl is often called during observations, this would create too many +# instances of tbbctl.log files, and many older files were overwritten which +# made debugging or finding out what happened in case of a problem impossible. +# +# setup the right levels for logging and tracing +# +# Note: In production you don't want to loose any information so a daily rolling-file +# is used and tracing is switched off. +# For debugging purposes the daily rolling-file becomes too large so a size-based +# rolling file is used and tracing is switched on. +# +# Please do not change the logger lines below, only comment them in or out. + +# For PRODUCTION: +# - Select the appropriate log-level for the rootLogger (DEBUG or INFO) +# - Leave the TRC logger on DEBUG,DUMP +# - Comment out the rootLogger and the TRC logger in the TEST section of this file +#log4cplus.rootLogger=DEBUG, DAILYFILE +log4cplus.rootLogger=INFO, ROLFILE +log4cplus.logger.TRC=DEBUG, DUMP + +# For TESTING: +# - Select the appropriate trace level for the TRC logger +# - Leave the rootLogger on DEBUG, ROLFILE +# - Comment out the rootLogger and the TRC logger in the PRODUCTION section of this file +#log4cplus.rootLogger=DEBUG, ROLFILE +#log4cplus.logger.TRC=TRACE5, ROLFILE + + +# The next line should always be active. +log4cplus.additivity.TRC=FALSE + +# Definitions of the output channels +log4cplus.appender.STDOUT=log4cplus::ConsoleAppender +log4cplus.appender.STDOUT.layout=log4cplus::PatternLayout +log4cplus.appender.STDOUT.layout.ConversionPattern=%D{%Y-%m-%d %H:%M:%S.%q} %-5p %c{3} - %m [%.25l]%n + +log4cplus.appender.ROLFILE=log4cplus::RollingFileAppender +log4cplus.appender.ROLFILE.File=${LOFARROOT}/var/log/${LOG4CPLUS_LOGFILENAME}.log +log4cplus.appender.ROLFILE.MaxFileSize=10MB +log4cplus.appender.ROLFILE.MaxBackupIndex=999 +log4cplus.appender.ROLFILE.layout=log4cplus::PatternLayout +log4cplus.appender.ROLFILE.layout.ConversionPattern=%x %D{%Y-%m-%d %H:%M:%S.%q} %-5p %c{3} - %m [%.25l]%n + +log4cplus.appender.DUMP=log4cplus::NullAppender + diff --git a/MAC/Deployment/data/Coordinates/CoordMenu.py b/MAC/Deployment/data/Coordinates/CoordMenu.py index 717b37f801163be634f1cee32b5a8a36a0052c63..dd5c125600a62ea8c699cb5be5678f0cf30b2be5 100755 --- a/MAC/Deployment/data/Coordinates/CoordMenu.py +++ b/MAC/Deployment/data/Coordinates/CoordMenu.py @@ -1,5 +1,6 @@ #!/usr/bin/python # P.Donker ASTRON +# and Arno Schoenmakers the Great import sys,pgdb,pg from subprocess import * @@ -10,7 +11,7 @@ from database import * dbName=getDBname() dbHost=getDBhost() -VERSION = '0.0.1' # version of this script +VERSION = '0.0.2' # version of this script default_targetdate='2009.5' def menu(): diff --git a/MAC/Deployment/data/Coordinates/CoordMenu_Arno.py b/MAC/Deployment/data/Coordinates/CoordMenu_Arno.py new file mode 100755 index 0000000000000000000000000000000000000000..717b37f801163be634f1cee32b5a8a36a0052c63 --- /dev/null +++ b/MAC/Deployment/data/Coordinates/CoordMenu_Arno.py @@ -0,0 +1,211 @@ +#!/usr/bin/python +# P.Donker ASTRON + +import sys,pgdb,pg +from subprocess import * +import os,sys,time +from database import * + +# get info from database.py +dbName=getDBname() +dbHost=getDBhost() + +VERSION = '0.0.1' # version of this script +default_targetdate='2009.5' + +def menu(): + print """ + |=====================================| + | Coordinates menu | + |=====================================| + | 0 do all (1,2,3,4,5,6,7,9,11) | + | 1 destroy and create CDB | + | 2 create CDB objects | + | 3 load all normal-vectors | + | 4 load all rotation matrices | + | 5 load all hba_rotations | + | 6 calculate all HBADeltas | + | 7 load all ETRF(expected) files | + | 8 load one measurement file | + | 9 transform all ETRF to ITRF | + | 10 transform one ETRF to ITRF | + | 11 make all conf files | + | 12 make one conf file | + | Q quit | + |_____________________________________| + """ + +def getInputWithDefault(prompt, defaultValue): + answer = defaultValue + answer = raw_input(prompt+" ["+str(defaultValue)+"]: ") + if (len(answer)==0): answer=defaultValue + return answer + +def create_CDB(): + print 'Creating new database' + res = Popen('./create_CDB.sh').wait() + print res + +def create_CDB_objects(): + print 'Creating database objects' + res = Popen('./create_CDB_objects.py').wait() + print res + +def load_normal_vectors(): + print 'Loading normal vectors' + filename = getInputWithDefault("enter filename to load","data/normal_vectors.dat") + if len(filename) == 0: + print 'Error, No filename given' + sys.exit() + if not os.path.exists(filename): + print "File does not exist" + sys.exit() + res = Popen(['./load_normal_vectors.py',filename]).wait() + if (res != 0): sys.exit(1) + #time.sleep(3) + +def load_rotation_matrices(): + print 'Loading rotation matrices' + filename = getInputWithDefault("enter filename to load","data/rotation_matrices.dat") + if len(filename) == 0: + print 'Error, No filename given' + sys.exit() + if not os.path.exists(filename): + print "File does not exist" + sys.exit() + res = Popen(['./load_rotation_matrices.py',filename]).wait() + if (res != 0): sys.exit(1) + #time.sleep(3) + +def load_hba_rotations(): + print 'Loading hba field rotations' + filename = getInputWithDefault("enter filename to load","data/hba-rotations.csv") + if len(filename) == 0: + print 'Error, No filename given' + sys.exit() + if not os.path.exists(filename): + print "File does not exist" + sys.exit() + res = Popen(['./load_hba_rotations.py',filename]).wait() + if (res != 0): sys.exit(1) + #time.sleep(3) + +def calculate_hba_deltas(): + print 'calculating hba-deltas' + #time.sleep(3) + res = Popen(['./calc_hba_deltas.py']).wait() + if (res != 0): sys.exit(1) + +def load_all_ETRF(): + print 'loading all ETRF files from .//ETRF_FILES' + os.chdir(os.curdir+'/ETRF_FILES') + dirs = os.listdir(os.curdir) + for dir in dirs: + os.chdir(os.curdir+'/'+dir) + files = os.listdir(os.curdir) + for filename in files: + if not os.path.exists(filename): + print "File ",filename,"does not exist" + sys.exit() + res = Popen(['../../load_expected_pos.py',filename]).wait() + if (res != 0): sys.exit(1) + os.chdir(os.pardir) + os.chdir(os.pardir) + +def load_measurement(): + print 'load one measurement file' + filename = getInputWithDefault("enter filename to load","") + if len(filename) == 0: + print 'Error, No filename given' + sys.exit() + if not os.path.exists(filename): + print "File ",filename,"does not exist" + sys.exit() + res = Popen(['./load_measurementfile.py',filename]).wait() + if (res != 0): sys.exit(1) + +def transform_all(): + db = pg.connect(user="postgres", host=dbHost, dbname=dbName) + print 'Transform all ETRF coordinates to ITRF coordinates for given date' + target = getInputWithDefault("Enter target_date",default_targetdate) + all_stations=db.query("select distinct o.stationname from object o inner join field_rotations r on r.id = o.id").getresult(); + ref_stations=db.query("select distinct o.stationname from object o inner join reference_coord r on r.id = o.id").getresult(); + + for stationname in ref_stations: + station = stationname[0] + if 0 != Popen(['./calc_coordinates.py',station,"LBA",target]).wait(): sys.exit(1) + if 0 != Popen(['./calc_coordinates.py',station,"CLBA",target]).wait(): sys.exit(1) + #if station[:1] == 'C': # core station + if 0 != Popen(['./calc_coordinates.py',station,"HBA0",target]).wait(): sys.exit(1) + if 0 != Popen(['./calc_coordinates.py',station,"CHBA0",target]).wait(): sys.exit(1) + if 0 != Popen(['./calc_coordinates.py',station,"HBA1",target]).wait(): sys.exit(1) + if 0 != Popen(['./calc_coordinates.py',station,"CHBA1",target]).wait(): sys.exit(1) + #else: #remote or international station + if 0 != Popen(['./calc_coordinates.py',station,"HBA",target]).wait(): sys.exit(1) + if 0 != Popen(['./calc_coordinates.py',station,"CHBA",target]).wait(): sys.exit(1) + + db.close() + missing_stations=list(set(all_stations) - set(ref_stations)) + for stationname in missing_stations: + station = stationname[0] + print "Station with known HBA rotation but no ETRF: ",station + + +def transform_one(): + print 'Transform ETRF coordinates to ITRF coordinates for given station and date' + station = getInputWithDefault("Enter station ","") + anttype = getInputWithDefault("Enter type (LBA|HBA|HBA0|HBA1|CLBA|CHBA0|CHBA1|CHBA)","") + target = getInputWithDefault("Enter target_date ",default_targetdate) + res = Popen(['./calc_coordinates.py',station,anttype,target]).wait() + if (res != 0): sys.exit(1) + +def make_all_conf_files(): + db = pg.connect(user="postgres", host=dbHost, dbname=dbName) + print 'Make all AntennaField.conf and iHBADeltas.conf files for given date' + target = getInputWithDefault("Enter target_date",default_targetdate) + for stationname in db.query("select distinct o.stationname from object o inner join reference_coord r on r.id = o.id").getresult(): + station = stationname[0] + res = Popen(['./make_conf_files.py',station,target]).wait() + if (res != 0): sys.exit(1) + res = Popen(['./make_all_station_file.py',target]).wait() + if (res != 0): sys.exit(1) + db.close() + +def make_one_conf_file(): + print 'Make one AntennaField.conf and iHBADeltas.conf file for given date' + station = getInputWithDefault("Enter station ","") + target = getInputWithDefault("Enter target_date",default_targetdate) + res = Popen(['./make_conf_files.py',station,target]).wait() + if (res != 0): sys.exit(1) + + +if __name__ == "__main__": + while(1): + menu() + sel = raw_input('Enter choice :') + if sel.upper() == 'Q': sys.exit(1) + if sel == '1': create_CDB() + if sel == '2': create_CDB_objects() + if sel == '3': load_normal_vectors() + if sel == '4': load_rotation_matrices() + if sel == '5': load_hba_rotations() + if sel == '6': calculate_hba_deltas() + if sel == '7': load_all_ETRF() + if sel == '8': load_measurement() + if sel == '9': transform_all() + if sel == '10': transform_one() + if sel == '11': make_all_conf_files() + if sel == '12': make_one_conf_file() + if sel == '0': + create_CDB() + create_CDB_objects() + load_normal_vectors() + load_rotation_matrices() + load_hba_rotations() + calculate_hba_deltas() + load_all_ETRF() + transform_all() + make_all_conf_files() + + + diff --git a/MAC/Deployment/data/OTDB/DPPP.comp b/MAC/Deployment/data/OTDB/DPPP.comp index e9cb7f00021d7d3ec4cc91f6ab65a0b75fd86c17..e94ace2ce62834918bc67761c80f2065822b8e0f 100644 --- a/MAC/Deployment/data/OTDB/DPPP.comp +++ b/MAC/Deployment/data/OTDB/DPPP.comp @@ -31,6 +31,9 @@ par startchan I text - 10 0 "nchan/32" - "First channel to use in each #par starttime I text - 10 0 "" - "Format: 19Feb2010/14:01:23.817" par useflag I bool - 10 0 TRUE - "Use the current flags in the MS" +node storagemanager 4.0.0 development 'node constraint' "StorageManager settings" +par name I ptext - 10 0 "|dysco;dysco" - "What storage manager to use. When empty (DPPP default), the data will be stored uncompressed. When set to 'dysco' (OTDB default), the data will be compressed." + node msout 4.0.0 development 'node constraint' "Output MeasurementSet" #par clusterdesc I text - 10 0 "-" - "If not empty, create the VDS file using this ClusterDesc file" #par datacolumn I text - 10 0 "DATA" - "The column in which to write the data" @@ -40,6 +43,7 @@ par tilenchan I int - 10 0 0 - "For expert user: maximum number of par tilesize I int - 10 0 4096 - "For expert user: tile size (in Kbytes) for the data columns in the output MS" par vdsdir I text - 10 0 "A" - "Directory where to put the VDS file; if empty, the MS directory is used." par writefullresflag I bool - 10 0 T - "Write the full resolution flags" +uses storagemanager 4.0.0 development 1 "StorageManager" node counter 4.0.0 development 'node constraint' "Count flags" par type I text - 10 0 "counter" - "Type of the flagger, do not change" diff --git a/MAC/Deployment/data/OTDB/feedBackNodes/Output_Beamformed_.comp b/MAC/Deployment/data/OTDB/feedBackNodes/Output_Beamformed_.comp index dfb7a5cbd1cfbb49c3327fc2b14eae7d854425a2..cad2ca5ba12f5c2cdd52e8e1836110f888205a8f 100644 --- a/MAC/Deployment/data/OTDB/feedBackNodes/Output_Beamformed_.comp +++ b/MAC/Deployment/data/OTDB/feedBackNodes/Output_Beamformed_.comp @@ -46,6 +46,8 @@ par nrOfCoherentStokesBeams O uint - 10 0 0 - '' par nrOfIncoherentStokesBeams O uint - 10 0 0 - '' par nrOfFlysEyeBeams O uint - 10 0 0 - '' par beamTypes O vtext - 10 0 '[]' - 'vector that indicates the beamtype order' +par storageWriter O ptext - 10 0 'CASA|LOFAR|DYSCO|HDF5DEFAULT|UNKNOWN;HDF5DEFAULT' - '' +par storageWriterVersion O text - 10 0 '' - '' # name vers qual constr. descr. diff --git a/MAC/Deployment/data/OTDB/feedBackNodes/Output_Correlated_.comp b/MAC/Deployment/data/OTDB/feedBackNodes/Output_Correlated_.comp index 7737da3af56e0669c62ffc64d5d9c7ca2e19cfa0..004e5d059f1c2cb571e723ca6e5078f23bce9748 100644 --- a/MAC/Deployment/data/OTDB/feedBackNodes/Output_Correlated_.comp +++ b/MAC/Deployment/data/OTDB/feedBackNodes/Output_Correlated_.comp @@ -51,4 +51,6 @@ par channelWidth O dbl Hz 10 0 0 - 'In Hz. Using base par channelsPerSubband O uint - 10 0 0 - '' par subband O uint - 10 0 0 - 'Index given by OLAP, purely administative' par stationSubband O uint - 10 0 0 - 'Index of subband on the station, selects freq. range' -par SAP O uint - 10 0 0 - 'Index of SubArrayPointing' \ No newline at end of file +par SAP O uint - 10 0 0 - 'Index of SubArrayPointing' +par storageWriter O ptext - 10 0 'CASA|LOFAR|DYSCO|HDF5DEFAULT|UNKNOWN' - '' +par storageWriterVersion O text - 10 0 '' - '' \ No newline at end of file diff --git a/MAC/Deployment/data/OTDB/feedBackNodes/Output_InstrumentModel_.comp b/MAC/Deployment/data/OTDB/feedBackNodes/Output_InstrumentModel_.comp index 4f9f25be05f6d96577dd6677ffd7594aab0d8f7a..b7041e7d4423f3ab67b621373aee08dd59f3f75e 100644 --- a/MAC/Deployment/data/OTDB/feedBackNodes/Output_InstrumentModel_.comp +++ b/MAC/Deployment/data/OTDB/feedBackNodes/Output_InstrumentModel_.comp @@ -42,4 +42,5 @@ par fileFormat O ptext - 10 0 'FITS|AIPS++/CASA| par filename O text - 10 0 '' - '' par location O text - 10 0 '' - '' par percentageWritten O uint - 10 0 0 - '' - +par storageWriter O ptext - 10 0 'CASA|LOFAR|DYSCO|HDF5DEFAULT|UNKNOWN;CASA' - '' +par storageWriterVersion O text - 10 0 '' - '' diff --git a/MAC/Deployment/data/OTDB/feedBackNodes/Output_Pulsar_.comp b/MAC/Deployment/data/OTDB/feedBackNodes/Output_Pulsar_.comp index 34a0c07db6eb5de71fc4e55ae617c0ecefa7b011..83e482e9aacfd43f91e52348da585e66b8d7565b 100644 --- a/MAC/Deployment/data/OTDB/feedBackNodes/Output_Pulsar_.comp +++ b/MAC/Deployment/data/OTDB/feedBackNodes/Output_Pulsar_.comp @@ -44,6 +44,8 @@ par location O text - 10 0 '' - ' par percentageWritten O uint - 10 0 0 - '' par datatype O ptext - 10 0 'CoherentStokes|IncoherentStokes|ComplexVoltages|SummaryCoherentStokes|SummaryIncoherentStokes|SummaryComplexVoltages' - '' par fileContent 0 vtext - 10 0 '' - 'A list of files in the tar ball' +par storageWriter O ptext - 10 0 'CASA|LOFAR|DYSCO|HDF5DEFAULT|UNKNOWN;UNKNOWN' - '' +par storageWriterVersion O text - 10 0 '' - '' # Note: for summary information no beam will be present below. # Depending on the type of TAB, _one_ of the structs below should be present if it's not Summary information. @@ -53,3 +55,4 @@ uses CoherentStokesBeam 4.0.0 development '1' "CoherentStokesBeam uses IncoherentStokesBeam 4.0.0 development '1' "IncoherentStokesBeam specification" uses FlysEyeBeam 4.0.0 development '1' "FlysEyeBeam specification" + diff --git a/MAC/Deployment/data/OTDB/feedBackNodes/Output_SkyImages_.comp b/MAC/Deployment/data/OTDB/feedBackNodes/Output_SkyImages_.comp index 21df9d53361017083466b31daf4373c66360199b..c1fb408edf450e641619d054bf8a3939bfaa8527 100644 --- a/MAC/Deployment/data/OTDB/feedBackNodes/Output_SkyImages_.comp +++ b/MAC/Deployment/data/OTDB/feedBackNodes/Output_SkyImages_.comp @@ -54,6 +54,8 @@ par restoringBeamMinorValue O dbl - 10 0 '' par restoringBeamMinorUnit O text - 10 0 '' - 'From imageinfo:restoringbeam.minor' par rmsNoiseValue O dbl - 10 0 0 - 'Average StokesI RMS noise' par rmsNoiseUnit O text - 10 0 0 - 'Average StokesI RMS noise' +par storageWriter O ptext - 10 0 'CASA|LOFAR|DYSCO|HDF5DEFAULT|UNKNOWN' - '' +par storageWriterVersion O text - 10 0 '' - '' # name vers qual constr. descr. #-------------------------------------------------------------------------------------------------------- diff --git a/MAC/Deployment/data/StaticMetaData/RSPConnections_local.dat b/MAC/Deployment/data/StaticMetaData/RSPConnections_local.dat index 27f5bb66ca09145c25d78c525efc78642d40a02a..c4f3ee971564d58487312da919ac2db5afd257be 100644 --- a/MAC/Deployment/data/StaticMetaData/RSPConnections_local.dat +++ b/MAC/Deployment/data/StaticMetaData/RSPConnections_local.dat @@ -35,8 +35,8 @@ DE605_02 172.20.101.116 90:1b:0e:43:1b:d5 lofarD2-10-GbE DE605_03 172.20.101.114 90:1b:0e:43:1c:15 lofarD1-10-GbE FR606_00 10.211.6.2 00:25:90:92:58:CC ALLEGRO1-FR606 -FR606_01 10.212.6.2 00:25:90:61:77:40 ALLEGRO2-FR606 -FR606_02 10.213.6.2 00:25:90:61:78:14 ALLEGRO3-FR606 +FR606_01 10.212.6.2 0C:C4:7A:2B:02:8D ALLEGRO2-FR606-since-2018-07 +FR606_02 10.213.6.2 00:25:90:61:77:40 ALLEGRO3-FR606-since-2018-07 FR606_03 10.214.6.2 00:25:90:61:77:86 ALLEGRO4-FR606 SE607_00 10.211.7.2 00:60:dd:45:66:67 Dvalin-eth10 diff --git a/MAC/Deployment/data/StaticMetaData/StationInfo.dat b/MAC/Deployment/data/StaticMetaData/StationInfo.dat index 059a6d4ab52af2b45802fec1b7534090e9838ced..8d42cf36723b5cc25cec745b50263d9403d1c7ae 100644 --- a/MAC/Deployment/data/StaticMetaData/StationInfo.dat +++ b/MAC/Deployment/data/StaticMetaData/StationInfo.dat @@ -153,6 +153,10 @@ CCU199 84 ## spare (90-99) ## 90 .. 99 +## Concentrator node +CN001 0 C 6.8666964 52.9102978 58.60 0 0 0 0 0 No No No + +## ## Test systems # name ID ring long lat height nrRSP nrTBB nrLBA nrHBA nrPowec HBAsplit LBAcal Aartfaac #---------------------------------------------------------------------------------------------------------------- diff --git a/MAC/GCF/TM/src/mac.log_prop b/MAC/GCF/TM/src/mac.log_prop index 3fdbd9935578d0f234880d200c624126f7c1ae9e..2e73cf7b86b1f3e45e54bf6619a1a55d99391a26 100644 --- a/MAC/GCF/TM/src/mac.log_prop +++ b/MAC/GCF/TM/src/mac.log_prop @@ -35,14 +35,14 @@ log4cplus.appender.STDOUT.layout.ConversionPattern=%D{%Y-%m-%d %H:%M:%S.%q} %-5p log4cplus.appender.DAILYFILE=log4cplus::DailyRollingFileAppender log4cplus.appender.DAILYFILE.File=${LOFARROOT}/var/log/${LOG4CPLUS_LOGFILENAME}.log log4cplus.appender.DAILYFILE.Schedule=DAILY -log4cplus.appender.DAILYFILE.MaxBackupIndex=14 +log4cplus.appender.DAILYFILE.MaxBackupIndex=99 log4cplus.appender.DAILYFILE.layout=log4cplus::PatternLayout log4cplus.appender.DAILYFILE.layout.ConversionPattern=%x %D{%Y-%m-%d %H:%M:%S.%q} %-5p %c{3} - %m [%.25l]%n log4cplus.appender.ROLFILE=log4cplus::RollingFileAppender log4cplus.appender.ROLFILE.File=${LOFARROOT}/var/log/${LOG4CPLUS_LOGFILENAME}.log log4cplus.appender.ROLFILE.MaxFileSize=10MB -log4cplus.appender.ROLFILE.MaxBackupIndex=9 +log4cplus.appender.ROLFILE.MaxBackupIndex=999 log4cplus.appender.ROLFILE.layout=log4cplus::PatternLayout log4cplus.appender.ROLFILE.layout.ConversionPattern=%x %D{%Y-%m-%d %H:%M:%S.%q} %-5p %c{3} - %m [%.25l]%n diff --git a/MAC/GCF/TM/src/mac_debug.log_prop b/MAC/GCF/TM/src/mac_debug.log_prop index 8b5d96975323571a9ef3008a40bc0de653c7b746..74aa4297b6daaaf795daedf3f0b0a101fc82af2a 100644 --- a/MAC/GCF/TM/src/mac_debug.log_prop +++ b/MAC/GCF/TM/src/mac_debug.log_prop @@ -35,14 +35,14 @@ log4cplus.appender.STDOUT.layout.ConversionPattern=%D{%Y-%m-%d %H:%M:%S.%q} %-5p log4cplus.appender.DAILYFILE=log4cplus::DailyRollingFileAppender log4cplus.appender.DAILYFILE.File=${LOFARROOT}/var/log/${LOG4CPLUS_LOGFILENAME}.log log4cplus.appender.DAILYFILE.Schedule=DAILY -log4cplus.appender.DAILYFILE.MaxBackupIndex=14 +log4cplus.appender.DAILYFILE.MaxBackupIndex=99 log4cplus.appender.DAILYFILE.layout=log4cplus::PatternLayout log4cplus.appender.DAILYFILE.layout.ConversionPattern=%x %D{%Y-%m-%d %H:%M:%S.%q} %-5p %c{3} - %m [%.25l]%n log4cplus.appender.ROLFILE=log4cplus::RollingFileAppender log4cplus.appender.ROLFILE.File=${LOFARROOT}/var/log/${LOG4CPLUS_LOGFILENAME}.log log4cplus.appender.ROLFILE.MaxFileSize=10MB -log4cplus.appender.ROLFILE.MaxBackupIndex=9 +log4cplus.appender.ROLFILE.MaxBackupIndex=999 log4cplus.appender.ROLFILE.layout=log4cplus::PatternLayout log4cplus.appender.ROLFILE.layout.ConversionPattern=%x %D{%Y-%m-%d %H:%M:%S.%q} %-5p %c{3} - %m [%.25l]%n diff --git a/MAC/GCF/TM/src/mac_nopvss.log_prop b/MAC/GCF/TM/src/mac_nopvss.log_prop index f13c38b9a7c954d126d3f0c85de76ef18bc6eaeb..35d061ada0f53aead772c17443fd2635b95009ec 100644 --- a/MAC/GCF/TM/src/mac_nopvss.log_prop +++ b/MAC/GCF/TM/src/mac_nopvss.log_prop @@ -39,14 +39,14 @@ log4cplus.appender.STDOUT.layout.ConversionPattern=%D{%Y-%m-%d %H:%M:%S.%q} %-5p log4cplus.appender.DAILYFILE=log4cplus::DailyRollingFileAppender log4cplus.appender.DAILYFILE.File=${LOFARROOT}/var/log/${LOG4CPLUS_LOGFILENAME}.log log4cplus.appender.DAILYFILE.Schedule=DAILY -log4cplus.appender.DAILYFILE.MaxBackupIndex=14 +log4cplus.appender.DAILYFILE.MaxBackupIndex=99 log4cplus.appender.DAILYFILE.layout=log4cplus::PatternLayout log4cplus.appender.DAILYFILE.layout.ConversionPattern=%x %D{%Y-%m-%d %H:%M:%S.%q} %-5p %c{3} - %m [%.25l]%n log4cplus.appender.ROLFILE=log4cplus::RollingFileAppender log4cplus.appender.ROLFILE.File=${LOFARROOT}/var/log/${LOG4CPLUS_LOGFILENAME}.log log4cplus.appender.ROLFILE.MaxFileSize=10MB -log4cplus.appender.ROLFILE.MaxBackupIndex=9 +log4cplus.appender.ROLFILE.MaxBackupIndex=999 log4cplus.appender.ROLFILE.layout=log4cplus::PatternLayout log4cplus.appender.ROLFILE.layout.ConversionPattern=%x %D{%Y-%m-%d %H:%M:%S.%q} %-5p %c{3} - %m [%.25l]%n diff --git a/MAC/Navigator2/panels/Alerts/alarmsWinCCOA_Filtering.pnl b/MAC/Navigator2/panels/Alerts/alarmsWinCCOA_Filtering.pnl new file mode 100644 index 0000000000000000000000000000000000000000..3eba37b99ded14e534e07a45b6805359f111e2a9 --- /dev/null +++ b/MAC/Navigator2/panels/Alerts/alarmsWinCCOA_Filtering.pnl @@ -0,0 +1,691 @@ +V 13 +1 +LANG:1 0 +PANEL,-1 -1 1050 974 N "_3DFace" 1 +"$SYSTEM" +"main() +{ + Init(); +}" 0 + E E E E 1 -1 -1 0 -350 1100 +""0 1 +E "bool g_alertRow; +dyn_string g_systemSelections, g_closedIdf; +bool g_checkAll; +int g_tabType = AESTAB_TOP; +int g_screenType = AESTYPE_EVENTS; +string g_propDpName = aes_getPropDpName(AES_DPTYPE_PROPERTIES, true ); + + +void Init() +{ + dyn_string dsSystemNames; + dyn_int diSystemIds; + + FILT_BEGIN.dateTime = getCurrentTime() - ( 1 * 86400 ); + FILT_END.dateTime = getCurrentTime() + 3600; + + // Default filter on 'Broken' + FILT_PRIO.state(5) = true; + + getSystemNamesConnected( dsSystemNames, diSystemIds ); + dynAppend( dsSystemNames, getSystemName() ); + + for( int i=1; i<=dynlen(dsSystemNames); i++ ) + { + strreplace( dsSystemNames[i], \":\", \"\" ); + } + + dynSort( dsSystemNames ); + dynInsertAt( dsSystemNames, \"All\", 1 ); // No filtering on all systems! + + FILT_SYSTEM.items = dsSystemNames; + + if( isDollarDefined( \"$SYSTEM\") ) + { + FILT_SYSTEM.text = $SYSTEM; + } + + FILT_STATECOMMENT.items = makeDynString( \"\", + \"REPAIRED (REPAIRED)\", + \"OSCILLATION (OSC)\", + \"FLAT (FLAT)\", + \"HIGH NOISE (HN)\", + \"SHORT (SHORT)\", + \"SPURIOUS (SPUR)\", + \"LOW NOISE (LN)\", + \"DOWN (DOWN)\", + \"SUMMATOR NOISE (SN)\", + \"C-SUMMATOR (CSUM)\", + \"JITTER > 3dB (JIT)\", + \"RF element FAIL (E_FAIL)\", + \"MODEM element FAIL (MODEM)\", + \"MISSING (MISSING)\" ); +} + +void FilterSet( bool bStart = false) +{ + dyn_string dsFiltSystems, dsPrios; + +// DebugTN( \"g_propDpName = \" + g_propDpName ); + + aes_doStop( g_propDpName ); + delay(0,100); + + // FILTER MODE AND TIMERANGE ==================================================== + if( FILT_MODE.number == 0 ) + { + // Actual alerts + dpSetCache( g_propDpName + \".Both.Timerange.Type\", AES_MODE_CURRENT, + g_propDpName + \".Both.Timerange.Type\", 0, + g_propDpName + \".Both.Timerange.Begin\", FILT_BEGIN.dateTime, + g_propDpName + \".Both.Timerange.End\", FILT_END.dateTime ); + } + else + { + // Historical alerts + dpSetCache( g_propDpName + \".Both.Timerange.Type\", AES_MODE_CLOSED, + g_propDpName + \".Both.Timerange.Type\", 2, + g_propDpName + \".Both.Timerange.Selection\", 6, + g_propDpName + \".Both.Timerange.Begin\", FILT_BEGIN.dateTime, + g_propDpName + \".Both.Timerange.End\", FILT_END.dateTime ); + } + + + // FILTER SYSTEMS =============================================================== + g_checkAll = ( FILT_SYSTEM.selectedText == \"All\"); + + // Set selected system + if( g_checkAll ) + { +// g_systemSelections = dsFiltSystems; + dpSetCache( g_propDpName + \".Both.Systems.CheckAllSystems\", true, + g_propDpName + \".Both.Systems.Selections\", \"MCU001\" ); + } + else + { + dsFiltSystems = makeDynString( FILT_SYSTEM.selectedText ); + g_systemSelections = dsFiltSystems; + dpSetCache( g_propDpName + \".Both.Systems.CheckAllSystems\", false, + g_propDpName + \".Both.Systems.Selections\", dsFiltSystems ); + } + + + // Now set DPE's to filter selected systems + + + + + // FILTER PRIO's/STATES ========================================================= + if( FILT_PRIO.state(0) ) + dynAppend( dsPrios, \"1\" ); + + if( FILT_PRIO.state(1) ) + dynAppend( dsPrios, \"10\" ); + + if( FILT_PRIO.state(2) ) + dynAppend( dsPrios, \"20\" ); + + if( FILT_PRIO.state(3) ) + dynAppend( dsPrios, \"30\" ); + + if( FILT_PRIO.state(4) ) + dynAppend( dsPrios, \"40\" ); + + if( FILT_PRIO.state(5) ) + dynAppend( dsPrios, \"50\" ); + + if( FILT_PRIO.state(6) ) + dynAppend( dsPrios, \"60\" ); + +// DebugN( \" g_checkAll: \" + g_checkAll ); +// DebugN( \" dsFiltSystems: \" + dynStringToString(dsFiltSystems ) ); +// DebugN( \" dsPrios: \" + dynStringToString(dsPrios) ); + + dpSetCache( g_propDpName + \".Alerts.Filter.Prio\", dynStringToString( dsPrios, \",\" ) ); + + + // FILTER ALERT TEXT ========================================================= +// dpSetCache( g_propDpName + \".Alerts.Filter.AlertText\", FILT_ALERTTEXT.text ); + string strStateComment = FILT_STATECOMMENT.selectedText; + DebugTN(\"strStateComment: \" + strStateComment ); + if( strStateComment != \"\" ) + { + + if( patternMatch( \"*(*)*\", strStateComment ) ) + { + strStateComment = strip_comment( strStateComment ); + } + + dpSetCache( g_propDpName + \".Alerts.Filter.Add_Value_Index\", makeDynInt( 5 ), + g_propDpName + \".Alerts.Filter.Add_Value_Combine\", makeDynString( \"\" ), + g_propDpName + \".Alerts.Filter.Add_Value_Compare\", makeDynString( \"LIKE\" ), + g_propDpName + \".Alerts.Filter.Add_Value\", makeDynString( strStateComment ) ); + } + else + { + dpSetCache( g_propDpName + \".Alerts.Filter.Add_Value_Index\", makeDynInt(), + g_propDpName + \".Alerts.Filter.Add_Value_Combine\", makeDynString(), + g_propDpName + \".Alerts.Filter.Add_Value_Compare\", makeDynString(), + g_propDpName + \".Alerts.Filter.Add_Value\", makeDynString() ); + } + + delay(1,100); + + // GO !!!!!!! =================================================================== + if( bStart ) + { + // aes_doStop( g_propDpName ); + aes_doStart( g_propDpName ); + } +} + + + + +string strip_comment(string txt) { + string code = substr(txt,strpos(txt,\"(\")+1,(strpos(txt,\")\")) - (strpos(txt,\"(\")+1)); + return code; +} + + +" 0 + 3 +"CBRef" "1" +"EClose" E +"dpi" "96" +0 0 0 +"" +DISPLAY_LAYER, 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 +LAYER, 0 +1 +LANG:1 0 +1 51 0 "" 26 +0 +30 17 +"FRAME1" +"" +1 5 804 E E E 1 E 1 E N "_WindowText" E N {0,0,0} E E + E E +0 0 0 0 0 0 +E E E +1 +1 +LANG:1 0 + +2 +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 2 1 E 1.068558423320904 0 1.781249999999999 4.592897323714042 -1432.124999999999 0 E 5 804 971 901 +1 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +0 1 +LANG:1 9 Filtering +13 18 +"FILT_GO" +"" +1 20 146.0000000000002 E E E 1 E 1 E N "_ButtonText" E N "_Button" E E + E E +1 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +0 +1 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +0 18 140 112 164 + +T +1 +LANG:1 5 Go ! + +"main() +{ + FilterSet( true ); +} +" 0 + E E E +2 37 +"PRIMITIVE_TEXT1" +"" +1 380 21.00000000000023 E E E 1 E 1 E N "_WindowText" E N "_Window" E E + E E +4 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 2 1 E U 0 E 380 21.00000000000023 431 36 +0 2 2 "0s" 0 0 0 192 0 0 380 21.00000000000023 1 +1 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +0 1 +LANG:1 7 System: +2 38 +"PRIMITIVE_TEXT2" +"" +1 520 21.00000000000023 E E E 1 E 1 E N "_WindowText" E N "_Window" E E + E E +5 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 2 1 E U 0 E 520 21.00000000000023 584 36 +0 2 2 "0s" 0 0 0 192 0 0 520 21.00000000000023 1 +1 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +0 1 +LANG:1 11 Prio/State: +20 39 +"FILT_PRIO" +"" +1 520 36.00000000000046 E E E 1 E 1 E N "_3DText" E N "_3DFace" E E + E E +6 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +0 +1 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +0 518 34 652 158 +7 +T +1 +LANG:1 7 Off (0) + +0 +1 +LANG:1 0 +E E +0 0 0 0 0 +T +1 +LANG:1 16 Operational (10) + +0 +1 +LANG:1 0 +E E +0 0 0 0 0 +T +1 +LANG:1 16 Maintenance (20) + +0 +1 +LANG:1 0 +E E +0 0 0 0 0 +T +1 +LANG:1 9 Test (30) + +0 +1 +LANG:1 0 +E E +0 0 0 0 0 +T +1 +LANG:1 15 Suspicious (40) + +0 +1 +LANG:1 0 +E E +0 0 0 0 0 +T +1 +LANG:1 11 Broken (50) + +0 +1 +LANG:1 0 +E E +0 0 0 0 0 +T +1 +LANG:1 15 DP Offline (60) + +0 +1 +LANG:1 0 +E E +0 0 0 0 0 +0 +EE2 41 +"PRIMITIVE_TEXT3" +"" +1 670 21.00000000000023 E E E 1 E 1 E N "_WindowText" E N "_Window" E E + E E +8 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 2 1 E U 0 E 670 21.00000000000023 765 36 +0 2 2 "0s" 0 0 0 192 0 0 670 21.00000000000023 1 +1 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +0 1 +LANG:1 14 State comment: +2 42 +"PRIMITIVE_TEXT4" +"" +1 20 21.00000000000023 E E E 1 E 1 E N "_WindowText" E N "_Window" E E + E E +9 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 2 1 E U 0 E 20 21.00000000000023 58 36 +0 2 2 "0s" 0 0 0 192 0 0 20 21.00000000000023 1 +1 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +0 1 +LANG:1 5 Mode: +19 43 +"FILT_MODE" +"" +1 20 36.00000000000023 E E E 1 E 1 E N "_3DText" E N "_3DFace" E E + E E +10 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +0 +1 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +0 18 34 107 73 +2 +T +1 +LANG:1 6 Actual + +1 +1 +LANG:1 0 +E E +0 0 0 0 0 +T +1 +LANG:1 10 Historical + +0 +1 +LANG:1 0 +E E +0 0 0 0 0 +1 +E "main(int button) +{ + + if( FILT_MODE.number == 0 ) + { + FILT_BEGIN.enabled = false; + FILT_END.enabled = false; + } + else + { + FILT_BEGIN.enabled = true; + FILT_END.enabled = true; + } +}" 0 + +29 44 +"FILT_BEGIN" +"" +1 165 41.00000000000023 E E E 1 E 1 E N "_3DText" E N "_3DFace" E E + E E +11 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +1 +"displayFormat" "string yyyy-M-d hh:mm:ss" +1 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +0 165 41 315 61 +16 DateTimeEdit.ewo +0 +E29 45 +"FILT_END" +"" +1 165 66.00000000000023 E E E 1 E 1 E N "_3DText" E N "_3DFace" E E + E E +12 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +1 +"displayFormat" "string yyyy-M-d hh:mm:ss" +1 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +0 165 66 315 86 +16 DateTimeEdit.ewo +0 +E2 46 +"PRIMITIVE_TEXT5" +"" +1 130 21.00000000000023 E E E 1 E 1 E N "_WindowText" E N "_Window" E E + E E +13 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 2 1 E U 0 E 130 21.00000000000023 196 36 +0 2 2 "0s" 0 0 0 192 0 0 130 21.00000000000023 1 +1 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +0 1 +LANG:1 10 Timerange: +2 47 +"PRIMITIVE_TEXT6" +"" +1 130 43.00000000000023 E E E 1 E 1 E N "_WindowText" E N "_Window" E E + E E +14 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 2 1 E U 0 E 130 43.00000000000023 169 58 +0 2 2 "0s" 0 0 0 192 0 0 130 43.00000000000023 1 +1 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +0 1 +LANG:1 6 Begin: +2 48 +"PRIMITIVE_TEXT7" +"" +1 130 68.00000000000023 E E E 1 E 1 E N "_WindowText" E N "_Window" E E + E E +15 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 2 1 E U 0 E 130 68.00000000000023 159 83 +0 2 2 "0s" 0 0 0 192 0 0 130 68.00000000000023 1 +1 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +0 1 +LANG:1 4 End: +13 49 +"PUSH_BUTTON1" +"" +1 320 43.58181818181822 E E E 1 E 1 E N "_ButtonText" E N "_Button" E E + E E +16 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +0 +1 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +0 318 37 362 64 + +T +1 +LANG:1 3 Now +"main() +{ + FILT_BEGIN.dateTime = getCurrentTime(); +} +" 0 + E E E +13 50 +"PUSH_BUTTON2" +"" +1 320 69.8090909090912 E E E 1 E 1 E N "_ButtonText" E N "_Button" E E + E E +17 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +0 +1 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +0 318 63 362 89 + +T +1 +LANG:1 3 Now +"main() +{ + FILT_END.dateTime = getCurrentTime(); +} +" 0 + E E E +22 67 +"FILT_SYSTEM" +"" +1 379.9999999999999 44 E E E 1 E 1 E N "_WindowText" E N "_Window" E E + E E +18 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +0 +1 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +0 378 38 502 62 +0 + +E +E +E + 0 0 +22 68 +"FILT_STATECOMMENT" +"" +1 668 44 E E E 1 E 1 E N "_WindowText" E N "_Window" E E + E E +19 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +0 +1 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +0 666 38 922 62 +0 + +E +E +E + 1 0 +0 +LAYER, 1 +1 +LANG:1 0 +0 +LAYER, 2 +1 +LANG:1 0 +0 +LAYER, 3 +1 +LANG:1 0 +0 +LAYER, 4 +1 +LANG:1 0 +0 +LAYER, 5 +1 +LANG:1 0 +0 +LAYER, 6 +1 +LANG:1 0 +0 +LAYER, 7 +1 +LANG:1 0 +0 +3 0 "PANEL_REF0" -1 +"" "" +"vision/aes/AEScreen.pnl" -350 1100 T 0 1 0 1 418 311 +3 +"$ACTION""0" +"$FILENAME""" +"$SCREENTYPE""aes_alerts_LOFAR" +0 diff --git a/MAC/Navigator2/panels/Hardware/Station.pnl b/MAC/Navigator2/panels/Hardware/Station.pnl index 6d6af281f5c8ea86250adbe7a6b6b48697fda555..4c93becf04881d127ec8ebe733593a806ce34306 100644 --- a/MAC/Navigator2/panels/Hardware/Station.pnl +++ b/MAC/Navigator2/panels/Hardware/Station.pnl @@ -473,7 +473,7 @@ E E 0 1 1 2 1 E U 1 E 1002 23 1077 35 LANG:1 33 MS Shell Dlg,-1,11,5,75,0,0,0,0,0 0 1 LANG:1 13 Select Array: -1 167 5 "" 2190 +1 194 5 "" 2190 0 19 53 "arrayList" @@ -833,7 +833,7 @@ LANG:1 11 ShiftFactor LAYER, 1 1 LANG:1 6 Layer2 -1 145 2 "" 40 +1 172 2 "" 40 0 0 LAYER, 2 diff --git a/MAC/Navigator2/panels/Hardware/Station_List.pnl b/MAC/Navigator2/panels/Hardware/Station_List.pnl index d91fd47116d8bf7b2339be745f507d7ae493d75e..085fb72fc89444c5d14141996d26b2b24d07f3ab 100644 --- a/MAC/Navigator2/panels/Hardware/Station_List.pnl +++ b/MAC/Navigator2/panels/Hardware/Station_List.pnl @@ -62,6 +62,7 @@ PANEL,-1 -1 1200 823 N "_3DFace" 0 } } } + } // @@ -148,6 +149,10 @@ float HBA1Angle=0; string strPanelName; string strModuleName; + + + + void clearRefNames() { // remove all symbols because new ones will be added for (int i=1; i <= dynlen(refNames); i++) { @@ -299,6 +304,10 @@ void updateField() { } } + + + + " 0 3 "CBRef" "1" @@ -311,7 +320,7 @@ DISPLAY_LAYER, 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 LAYER, 0 1 LANG:1 6 Layer1 -1 1 5 "" 2190 +1 27 5 "" 2190 0 2 5 "PRIMITIVE_TEXT1" @@ -355,6 +364,41 @@ E E 0 1 3 2 1 E 2.666666666666665 0 2.625000000000001 233.0303030303033 -152.499 LANG:1 35 MS Shell Dlg 2,-1,11,5,75,0,0,0,0,0 0 1 LANG:1 3 HBA +29 26 +"EMBEDDED_MODULE_ALARMS" +"" +1 0 680 E E E 1 E 1 E N "_3DText" E N "_3DFace" E E + E E +18 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +11 +"ModuleName" "string StationAlarms" +"oldPanelAnimType" "string None" +"newPanelAnimType" "string None" +"oldPanelAnimateSize" "bool TRUE" +"newPanelAnimateSize" "bool TRUE" +"oldPanelAnimateOpacity" "bool TRUE" +"newPanelAnimateOpacity" "bool TRUE" +"oldPanelAnimDuration" "int 0" +"newPanelAnimDuration" "int 0" +"horizontalScrollBarPolicy" "enum 1" +"verticalScrollBarPolicy" "enum 1" +1 +LANG:1 35 MS Shell Dlg 2,-1,11,5,75,0,0,0,0,0 +0 0 680 1200 820 +15 Canvas.Canvas.1 +0 +"main() +{ + RootPanelOnModule( \"objects/Alerts/Station_Alerts.pnl\", + \"StationAlerts\", + this.ModuleName(), + makeDynString() ); +}" 0 0 LAYER, 1 1 diff --git a/MAC/Navigator2/panels/Popups/Popup_SetState.pnl b/MAC/Navigator2/panels/Popups/Popup_SetState.pnl index d0adbdb1f3cfc0510807c3033d40c2206e988e9a..5d27f8f841d71a7eeeb7bfca789cf083a3bd5546 100644 --- a/MAC/Navigator2/panels/Popups/Popup_SetState.pnl +++ b/MAC/Navigator2/panels/Popups/Popup_SetState.pnl @@ -1,7 +1,7 @@ V 13 1 LANG:1 0 -PANEL,-1 -1 555 360 N "_3DFace" 1 +PANEL,-1 -1 504 576 N "_3DFace" 1 "$baseDP" "main() { @@ -97,6 +97,7 @@ void Init() _mStateNr[stateText] = stateNr; } + GetHistory(); } void fill_cb_comment() @@ -244,6 +245,58 @@ delay(0,100); PanelOff(); } + + + +void GetHistory() +{ + string strQuery; + dyn_dyn_anytype ddaHist; + time tNow = getCurrentTime(); + + DebugTN( __FUNCTION__, _baseDP ); + + strQuery = \"SELECT ALERT '_alert_hdl.._prior', '_alert_hdl.._add_value_5', '_alert_hdl.._direction', '_alert_hdl.._sum' \" + + \" FROM '\" + _baseDP + \".status.state' \" + + \" REMOTE '\" + dpSubStr( _baseDP, DPSUB_SYS ) + \"'\" + + \" WHERE ('_alert_hdl.._direction' == 1) AND ('_alert_hdl.._sum' == 0)\" + + \" TIMERANGE(\\\"\" + (string)tNow + \"\\\",\\\"\" + (string)tNow + \"\\\",1,50)\"; + + dpQuery( strQuery, ddaHist ); + +// DebugTN( strQuery, ddaHist ); + + for( int i=2; i<=dynlen(ddaHist); i++ ) + { + int iPrio = ddaHist[i][3]; + string strColor = ( iPrio == 1 ) ? getStateColor( 0 ) : getStateColor( iPrio ); // Prio state 1 means state 0 + time tCameTime = (time)ddaHist[i][2]; + string strStateText = ddaHist[i][4]; + + TABLEHIST.appendLine( \"PRIO\", makeDynString( iPrio, strColor ), + \"TIME\", (string)tCameTime, + \"STATE\", strStateText, + \"STATETYPE\", ( iPrio <= 10 ) ? \"NORMAL\" : \"\" ); + } + + TABLEHIST.sort( true, \"TIME\" ); + + ShowAllStates( false ); +} + + +void ShowAllStates( bool bShowAll ) +{ + if( bShowAll ) + { + TABLEHIST.filterRows( makeDynString( \"STATETYPE\" ), makeDynString( \"*\" ), true ); + } + else + { + TABLEHIST.filterRows( makeDynString( \"STATETYPE\" ), makeDynString( \"NORMAL\" ), false ); + } +} + " 0 3 "CBRef" "1" @@ -259,7 +312,7 @@ LANG:1 0 22 0 "cb_state" "" -1 20 35 E E E 1 E 1 E N "_WindowText" E N "_Window" E E +1 10 270 E E E 1 E 1 E N "_WindowText" E N "_Window" E E E E 0 0 0 0 0 0 E E E @@ -270,7 +323,7 @@ LANG:1 0 0 1 LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 -0 18 35 242 64 +0 8 270 232 299 0 E @@ -284,7 +337,7 @@ E 2 2 "PRIMITIVE_TEXT2" "" -1 20 15 E E E 1 E 1 E N "_WindowText" E N "_Window" E E +1 10 250 E E E 1 E 1 E N "_WindowText" E N "_Window" E E E E 2 0 0 0 0 0 E E E @@ -296,8 +349,8 @@ LANG:1 0 "sizePolicy" "Fixed Fixed" "dashclr"N "_Transparent" "antiAliased" "0" -E E 0 2 3 2 1 E U 0 E 20 15 133 30 -0 2 2 "0s" 0 0 0 192 0 0 20 15 1 +E E 0 2 3 2 1 E U 0 E 10 250 123 265 +0 2 2 "0s" 0 0 0 192 0 0 10 250 1 1 LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 0 1 @@ -305,7 +358,7 @@ LANG:1 20 Select state to set: 20 3 "check_recursive" "" -1 20 70 E E E 1 E 1 E N "_3DText" E N "_Transparent" E E +1 10 305 E E E 1 E 1 E N "_3DText" E N "_Transparent" E E E E 3 0 0 0 0 0 E E E @@ -316,7 +369,7 @@ LANG:1 0 0 1 LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 -0 18 68 237 92 +0 8 303 227 327 1 T 1 @@ -335,7 +388,7 @@ E"main(int button, int state) 13 4 "nok" "" -1 405 300.5 E E E 1 E 1 E N "_ButtonText" E N "_Button" E E +1 395 535.5 E E E 1 E 1 E N "_ButtonText" E N "_Button" E E E E 5 0 0 0 27 0 E E E @@ -352,7 +405,7 @@ LANG:1 26 arial,-1,13,5,50,0,0,0,0,0 LANG:10000 26 arial,-1,13,5,50,0,0,0,0,0 LANG:10001 26 arial,-1,13,5,50,0,0,0,0,0 LANG:10027 26 arial,-1,13,5,50,0,0,0,0,0 -0 405 301 503 329 +0 395 536 493 564 T 4 @@ -369,7 +422,7 @@ LANG:10027 16 Отменить 13 5 "ok" "" -1 295 300.5 E E E 0 E 1 E N "_ButtonText" E N "_Button" E E +1 285 535.5 E E E 0 E 1 E N "_ButtonText" E N "_Button" E E E E 6 0 0 0 16777221 0 E E E @@ -386,7 +439,7 @@ LANG:1 26 arial,-1,13,5,50,0,0,0,0,0 LANG:10000 26 arial,-1,13,5,50,0,0,0,0,0 LANG:10001 26 arial,-1,13,5,50,0,0,0,0,0 LANG:10027 26 arial,-1,13,5,50,0,0,0,0,0 -0 295 301 394 329 +0 285 536 384 564 T 4 @@ -400,12 +453,12 @@ LANG:10027 2 OK } " 0 E E E -1 19 0 "" 0 +1 23 0 "" 0 0 2 9 "PRIMITIVE_TEXT3" "" -1 21.99999999999994 113 E E E 1 E 1 E N "_WindowText" E N "_Window" E E +1 11.99999999999994 348 E E E 1 E 1 E N "_WindowText" E N "_Window" E E E E 8 0 0 0 0 0 E E E @@ -417,8 +470,8 @@ LANG:1 0 "sizePolicy" "Fixed Fixed" "dashclr"N "_Transparent" "antiAliased" "0" -E E 0 2 3 2 1 E U 0 E 21.99999999999994 113 173 128 -0 2 2 "0s" 0 0 0 192 0 0 21.99999999999994 113 1 +E E 0 2 3 2 1 E U 0 E 11.99999999999994 348 163 363 +0 2 2 "0s" 0 0 0 192 0 0 11.99999999999994 348 1 1 LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 0 1 @@ -426,7 +479,7 @@ LANG:1 22 Enter comment/message: 29 12 "message" "" -1 19.99999999999989 156 E E E 1 E 1 E N "_WindowText" E N "_Window" E E +1 9.999999999999998 372 E E E 1 E 1 E N "_WindowText" E N "_Window" E E E E 11 0 0 0 0 0 E E "main(bool enter) @@ -443,13 +496,13 @@ LANG:1 0 "layoutAlignment" "AlignTop" 1 LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 -0 20 156 495 271 +0 10 372 490 487 15 PVSS.TextEdit.1 0 E22 15 "cb_comment" "" -1 281 35 E E E 1 E 1 E N "_WindowText" E N "_Window" E E +1 271 270 E E E 1 E 1 E N "_WindowText" E N "_Window" E E E E 12 0 0 0 0 0 E E E @@ -460,7 +513,7 @@ LANG:1 0 0 1 LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 -0 279 35 503 64 +0 269 270 493 299 0 E @@ -474,7 +527,7 @@ E 2 18 "txt_ant" "" -1 283 18 E E E 1 E 1 E N "_WindowText" E N "_Window" E E +1 273 253 E E E 1 E 1 E N "_WindowText" E N "_Window" E E E E 13 0 0 0 0 0 E E E @@ -486,12 +539,108 @@ LANG:1 0 "sizePolicy" "Fixed Fixed" "dashclr"N "_Transparent" "antiAliased" "0" -E E 0 2 3 2 1 E U 0 E 283 18 481 33 -0 2 2 "0s" 0 0 0 192 0 0 283 18 1 +E E 0 2 3 2 1 E U 0 E 273 253 471 268 +0 2 2 "0s" 0 0 0 192 0 0 273 253 1 1 LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 0 1 LANG:1 30 Select LBA/HBA comment to set: +25 21 +"TABLEHIST" +"" +1 10 30 E E E 1 E 1 E N "_WindowText" E N "_Window" E E + E E +14 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +0 +1 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +0 8 28 492 222 +EE 1 0 1 4 0 "PRIO" 2 1 0 "s" 1 +LANG:1 4 Prio +E +1 +LANG:1 0 + +40 "TIME" 14 1 0 "s" 1 +LANG:1 4 Time +E +1 +LANG:1 0 + +162 "STATE" 24 1 0 "s" 1 +LANG:1 13 State comment +E +1 +LANG:1 0 + +277 "STATETYPE" 6 0 0 "s" 1 +LANG:1 2 #1 +E +1 +LANG:1 0 + +277 +17 17 10 0 +1 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +0 0 1 1 1 7 +1 0 +2 22 +"PRIMITIVE_TEXT4" +"" +1 12 13 E E E 1 E 1 E N "_WindowText" E N "_Window" E E + E E +15 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 2 3 2 1 E U 0 E 12 13 83 28 +0 2 2 "0s" 0 0 0 192 0 0 12 13 1 +1 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +0 1 +LANG:1 12 Last states: +20 25 +"CHECK_SHOWALL" +"" +1 383 7 E E E 1 E 1 E N "_3DText" E N "_Transparent" E E + E E +16 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +0 +1 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +0 381 5 505 30 +1 +T +1 +LANG:1 15 show all states + +0 +1 +LANG:1 0 +E E +0 0 0 0 0 +0 +E"main(int button, int state) +{ + ShowAllStates( (bool)state ); +}" 0 0 LAYER, 1 1 @@ -523,6 +672,6 @@ LANG:1 0 0 3 0 "PANEL_REF0" -1 "" "" -"objects_parts/STD_OBJECTS/ButtonBarBackground.pnl" 10 60 T 4 1.038076152304609 0 1.25 -4.380761523046093 212 +"objects_parts/STD_OBJECTS/ButtonBarBackground.pnl" 10 60 T 4 1.038076152304609 0 1.25 -14.38076152304609 447 0 0 diff --git a/MAC/Navigator2/panels/navigator.pnl b/MAC/Navigator2/panels/navigator.pnl index 73dea10b86fe01945a7a8faf8b4119fb612dca39..ef10d39821e689e1f38c79157d70c4a9a5836386 100644 --- a/MAC/Navigator2/panels/navigator.pnl +++ b/MAC/Navigator2/panels/navigator.pnl @@ -7,9 +7,7 @@ PANEL,-1 -1 1503 998 N "_3DFace" 0 // LOG_TRACE(\"navigator.pnl:Initialize| init start baseDP: \"+$baseDP); // initialize Navigator (instance) navigator_handleEventInitialize(); - - - + // Wait till initialising process ended while (g_initializing) { } @@ -87,6 +85,7 @@ PANEL,-1 -1 1503 998 N "_3DFace" 0 + setValue( \"PANEL_REF21.PANEL_REF0..table_top\", \"rowHeight\", 17 ); @@ -156,6 +155,7 @@ void fw_fastJumperEvent(string dp, string event) { navCtrl_handleFastJumperEvent(dp,event); } } + " 0 "main() { diff --git a/MAC/Navigator2/panels/navigator_iltswitch.pnl b/MAC/Navigator2/panels/navigator_iltswitch.pnl new file mode 100644 index 0000000000000000000000000000000000000000..7d01b49dfe1170cbc85db76f40bf1c198bf5f2c0 --- /dev/null +++ b/MAC/Navigator2/panels/navigator_iltswitch.pnl @@ -0,0 +1,502 @@ +V 13 +1 +LANG:1 9 Navigator +PANEL,-1 -1 1503 998 N "_3DFace" 0 +"main() +{ + // LOG_TRACE(\"navigator.pnl:Initialize| init start baseDP: \"+$baseDP); + // initialize Navigator (instance) + navigator_handleEventInitialize(); + + + + // Wait till initialising process ended + while (g_initializing) { + } + + // connect to all event DP's from the objects so when something happens it can be + // seen by the framework and the suited actions can be taken + // Only the framework knows the connections between it's different objects. And decides + // what each of them has to do. + + if (dpExists(DPNAME_NAVIGATOR + g_navigatorID + \".fw_viewBox\")) { + if (dpConnect(\"fw_viewBoxEvent\",false,DPNAME_NAVIGATOR + g_navigatorID + \".fw_viewBox.event\")== -1) { + DebugTN(\"Navigator.pnl:main|Couldn't connect to \"+DPNAME_NAVIGATOR + g_navigatorID + \".fw_viewBox.event \"+getLasteError()); + } + } else { + setValue(fw_viewBox,\"backCol\",\"Lofar_dpdoesnotexist\"); + } + if (dpExists(DPNAME_NAVIGATOR + g_navigatorID + \".fw_viewSelection\")) { + if (dpConnect(\"fw_viewSelectionEvent\",false,DPNAME_NAVIGATOR + g_navigatorID + \".fw_viewSelection.event\") == -1) { + DebugTN(\"Navigator.pnl:main|Couldn't connect to \"+DPNAME_NAVIGATOR + g_navigatorID + \".fw_viewSelection.event \"+getLasteError()); + } + } else { + setValue(fw_viewSelection,\"backCol\",\"Lofar_dpdoesnotexist\"); + } + if (dpExists(DPNAME_NAVIGATOR + g_navigatorID + \".fw_topDetailSelection\")) { + if (dpConnect(\"fw_topDetailSelectionEvent\",false,DPNAME_NAVIGATOR + g_navigatorID + \".fw_topDetailSelection.event\")== -1) { + DebugTN(\"Navigator.pnl:main|Couldn't connect to \"+DPNAME_NAVIGATOR + g_navigatorID + \".fw_topDetailSelection.event \"+getLasteError()); + } + } else { + setValue(fw_topDetailSelection,\"backCol\",\"Lofar_dpdoesnotexist\"); + } + if (dpExists(DPNAME_NAVIGATOR + g_navigatorID + \".fw_bottomDetailSelection\")) { + if (dpConnect(\"fw_bottomDetailSelectionEvent\",false,DPNAME_NAVIGATOR + g_navigatorID + \".fw_bottomDetailSelection.event\")== -1) { + DebugTN(\"Navigator.pnl:main|Couldn't connect to \"+DPNAME_NAVIGATOR + g_navigatorID + \".fw_bottomDetailSelection.event \"+getLasteError()); + } + } else { + setValue(fw_bottomDetailSelection,\"backCol\",\"Lofar_dpdoesnotexist\"); + } + if (dpExists(DPNAME_NAVIGATOR + g_navigatorID + \".fw_locator\")) { + if (dpConnect(\"fw_locatorEvent\",false,DPNAME_NAVIGATOR + g_navigatorID + \".fw_locator.event\")== -1) { + DebugTN(\"Navigator.pnl:main|Couldn't connect to \"+DPNAME_NAVIGATOR + g_navigatorID + \".fw_locator.event \"+getLasteError()); + } + } else { + setValue(fw_locator,\"backCol\",\"Lofar_dpdoesnotexist\"); + } + if (dpExists(DPNAME_NAVIGATOR + g_navigatorID + \".fw_progressBar\")) { + if (dpConnect(\"fw_progressBarEvent\",false,DPNAME_NAVIGATOR + g_navigatorID + \".fw_progressBar.event\")== -1) { + DebugTN(\"Navigator.pnl:main|Couldn't connect to \"+DPNAME_NAVIGATOR + g_navigatorID + \".fw_progressBar.event \"+getLasteError()); + } + } else { + setValue(fw_progressBar,\"backCol\",\"Lofar_dpdoesnotexist\"); + } + if (dpExists(DPNAME_NAVIGATOR + g_navigatorID + \".fw_headLines\")) { + if (dpConnect(\"fw_headLinesEvent\",false,DPNAME_NAVIGATOR + g_navigatorID + \".fw_headLines.event\")== -1) { + DebugTN(\"Navigator.pnl:main|Couldn't connect to \"+DPNAME_NAVIGATOR + g_navigatorID + \".fw_headLines.event \"+getLasteError()); + } + } else { + setValue(fw_headLines,\"backCol\",\"Lofar_dpdoesnotexist\"); + } + /* + if (dpExists(DPNAME_NAVIGATOR + g_navigatorID + \".fw_alerts\")) { + if (dpConnect(\"fw_alertsEvent\",false,DPNAME_NAVIGATOR + g_navigatorID + \".fw_alerts.event\")== -1) { + DebugTN(\"Navigator.pnl:main|Couldn't connect to \"+DPNAME_NAVIGATOR + g_navigatorID + \".fw_alerts.event \"+getLasteError()); + } + } else { + setValue(fw_alerts,\"backCol\",\"Lofar_dpdoesnotexist\"); + } + */ + if (dpExists(DPNAME_NAVIGATOR + g_navigatorID + \".fw_fastJumper\")) { + if (dpConnect(\"fw_fastJumperEvent\",false,DPNAME_NAVIGATOR + g_navigatorID + \".fw_fastJumper.event\")== -1) { + DebugTN(\"Navigator.pnl:main|Couldn't connect to \"+DPNAME_NAVIGATOR + g_navigatorID + \".fw_fastJumper.event \"+getLasteError()); + } + } else { + setValue(fw_fastJumper,\"backCol\",\"Lofar_dpdoesnotexist\"); + } + + + + + + + LOG_TRACE(\"navigator.pnl:Initialize|end\"); +} + + +void fw_viewBoxEvent(string dp, string event) { + LOG_TRACE(\"navigator.pnl:fw_viewBoxEvent| trigger: \" + event); + if (event != \"\") { + navCtrl_handleViewBoxEvent(dp,event); + + } +} + + +void fw_viewSelectionEvent(string dp, string event) { + LOG_TRACE(\"navigator.pnl:fw_viewSelectionEvent| trigger: \" + event); + if (event != \"\") { + navCtrl_handleViewSelectionEvent(dp,event); + } +} + +void fw_topDetailSelectionEvent(string dp, string event) { + LOG_TRACE(\"navigator.pnl:fw_topDetailSelectionEvent| trigger: \" + event); + if (event != \"\") { + navCtrl_handleTopDetailSelectionEvent(dp,event); + } +} + +void fw_bottomDetailSelectionEvent(string dp, string event) { + LOG_TRACE(\"navigator.pnl:fw_bottomDetailSelectionEvent| trigger: \" + event); + if (event != \"\") { + navCtrl_handleBottomDetailSelectionEvent(dp,event); + } +} + +void fw_locatorEvent(string dp, string event) { + LOG_TRACE(\"navigator.pnl:fw_locatorEvent| trigger: \" + event); + if (event != \"\") { + navCtrl_handleLocatorEvent(dp,event); + } +} + +void fw_progressBarEvent(string dp, string event) { + LOG_TRACE(\"navigator.pnl:fw_progressBarEvent| trigger: \" + event); + if (event != \"\") { + navCtrl_handleProgressBarEvent(dp,event); + } +} + +void fw_headLinesEvent(string dp, string event) { + LOG_TRACE(\"navigator.pnl:fw_headLinesEvent| trigger: \" + event); + if (event != \"\") { + navCtrl_handleHeadLinesEvent(dp,event); + } +} +void fw_alertsEvent(string dp, string event) { + LOG_TRACE(\"navigator.pnl:fw_alertsEvent| trigger: \" + event); + if (event != \"\") { + navCtrl_handleAlertsEvent(dp,event); + } +} +void fw_fastJumperEvent(string dp, string event) { + LOG_TRACE(\"navigator.pnl:fw_fastJumperEvent| trigger: \" + event); + if (event != \"\") { + navCtrl_handleFastJumperEvent(dp,event); + } +} +" 0 + "main() +{ + navigator_handleEventTerminate(); +}" 0 + E E E 1 -1 -1 0 325 170 +""0 1 +E "#uses \"navigator.ctl\" + + +string progressBarDPE;" 0 + 3 +"CBRef" "1" +"EClose" "main() +{ + navigator_handleEventClose(); +}" 0 + +"dpi" "96" +0 0 0 +"" +NC +DISPLAY_LAYER, 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 +LAYER, 0 +1 +LANG:1 6 Layer1 +1 156 5 "" 0 +0 +1 158 9 "" 2 +0 +1 146 3 "" 12 +0 +1 151 4 "" 12 +0 +1 157 6 "" 2 +0 +1 164 12 "" 18 +0 +1 170 16 "" 1 +0 +15 33 +"CLOCK1" +"" +1 780 1 E E E 1 E 1 E N "_WindowText" E N "_3DFace" E E + E E +26 0 0 0 0 0 +E E E +1 +1 +LANG:1 0 + +0 +1 +LANG:1 33 MS Shell Dlg,-1,11,5,50,0,0,0,0,0 +0 780 1 900 23 +E 1 1 "%H:%M:%S" "%d/%m/%y" +1 1 1 1 +1 +LANG:1 33 MS Shell Dlg,-1,11,5,50,0,0,0,0,0 +0 1 +LANG:1 33 MS Shell Dlg,-1,11,5,50,0,0,0,0,0 +0 +1 172 20 "" 1 +0 +13 62 +"undock" +"" +1 950 5 E E E 1 E 1 E N "_ButtonText" E N "_Button" E E + E E +31 0 0 0 0 0 +E E E +0 +1 +LANG:1 16 Undock this view + +0 +1 +LANG:1 35 MS Shell Dlg 2,-1,11,5,50,0,0,0,0,0 +0 948 3 972 27 + +P +4294967295 +"pictures/undock.gif" +1 +LANG:1 12 PUSH_BUTTON1 +"main() +{ + navFunct_handleUndockClick(); +}" 0 + E E E +13 93 +"b_iltSwitch" +"" +1 1010.405 11 E E E 1 E 1 E N "_ButtonText" E N "_Button" E E + E E +32 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +0 +1 +LANG:1 35 MS Shell Dlg 2,-1,11,5,75,0,0,0,0,0 +0 991 3 1062 27 + +T +1 +LANG:1 9 iltSwitch +"main() +{ + bool localmode; + g_involved_stations = makeDynString(\"\"); + + dpGet(\"MCU001:LOFAR_PIC_Europe_DE601.localMode.stationSwitch\",localmode); + localmode = !localmode; + dpSet(\"MCU001:LOFAR_PIC_Europe_DE601.localMode.stationSwitch\",localmode); + + dynAppend(g_involved_stations,\"DE601:\"); + navCtrl_handleNavigatorEvent(\"\",\"ILTSwitched\",\"GCFCWD.ctl\"); + +}" 0 + E E E +1 173 21 "0.0" 18 +0 +1 187 21 "0.0" 1 +31 "transform" 0 0 0.9999999999999999 0 0.8921739130434782 -0.4999999999999947 96.31826086956554 +0 +32 136 +"SPACER1" +"" +1 380 0 E E E 1 E 1 E N {0,0,0} E N {255,255,255} E E + E E +33 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +1 +"sizePolicy" "Ignored Expanding" + 380 0 430 10 2 +32 137 +"SPACER2" +"" +1 1490 180 E E E 1 E 1 E N {0,0,0} E N {255,255,255} E E + E E +34 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +1 +"sizePolicy" "Expanding Ignored" + 1490 180 1501.996428571429 230 1 +32 138 +"SPACER3" +"" +1 1390 8.881784197001252e-016 E E E 1 E 1 E N {0,0,0} E N {255,255,255} E E + E E +35 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +1 +"sizePolicy" "Ignored Expanding" + 1390 8.881784197001252e-016 1440 20 2 +32 139 +"SPACER4" +"" +1 1200 210 E E E 1 E 1 E N {0,0,0} E N {255,255,255} E E + E E +36 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +1 +"sizePolicy" "Expanding Ignored" + 1200 210 1210 260 1 +32 140 +"SPACER5" +"" +1 1380 440 E E E 1 E 1 E N {0,0,0} E N {255,255,255} E E + E E +37 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +1 +"sizePolicy" "Ignored Expanding" + 1380 440 1430 450 2 +32 141 +"SPACER6" +"" +1 1200 660 E E E 1 E 1 E N {0,0,0} E N {255,255,255} E E + E E +38 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +1 +"sizePolicy" "Expanding Ignored" + 1200 660 1210 710 1 +32 142 +"SPACER7" +"" +1 1490 643 E E E 1 E 1 E N {0,0,0} E N {255,255,255} E E + E E +39 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +1 +"sizePolicy" "Expanding Ignored" + 1490 643 1498 693 1 +32 143 +"SPACER8" +"" +1 1190 950 E E E 1 E 1 E N {0,0,0} E N {255,255,255} E E + E E +40 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +1 +"sizePolicy" "Expanding Ignored" + 1190 950 1200 1000 1 +32 144 +"SPACER9" +"" +1 770 860 E E E 1 E 1 E N {0,0,0} E N {255,255,255} E E + E E +41 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +1 +"sizePolicy" "Ignored Expanding" + 770 860 820 880 2 +32 145 +"SPACER10" +"" +1 1500 926.0175879396984 E E E 1 E 1 E N {0,0,0} E N {255,255,255} E E + E E +42 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +1 +"sizePolicy" "Expanding Ignored" + 1500 926.0175879396984 1502 976.0175879396984 1 +0 +LAYER, 1 +1 +LANG:1 6 Layer2 +0 +LAYER, 2 +1 +LANG:1 6 Layer3 +0 +LAYER, 3 +1 +LANG:1 6 Layer4 +0 +LAYER, 4 +1 +LANG:1 6 Layer5 +0 +LAYER, 5 +1 +LANG:1 6 Layer6 +0 +LAYER, 6 +1 +LANG:1 6 Layer7 +0 +LAYER, 7 +1 +LANG:1 6 Layer8 +0 +3 3 "fw_topDetailSelection" -1 +"" "" +"objects/navigator_typeSelector.pnl" 1002 89 T 16 0.9964285714285714 0 1.002364066193853 -67.42142857142859 -119.2104018912529 +2 +"$name""fw_topDetailSelection" +"$selection""Hardware" +3 4 "fw_bottomDetailSelection" -1 +"" "" +"objects/navigator_typeSelector.pnl" 1084 468 T 17 1 0 1 -154 -73 +2 +"$name""fw_bottomDetailSelection" +"$selection""Processes" +3 5 "fw_headLines" -1 +"" "" +"objects/navigator_headlines.pnl" 87 765 T 19 1 0 1 -87 92 +1 +"$name""fw_headLines" +3 6 "fw_locator" -1 +"" "" +"objects/navigator_locator.pnl" 1065 798 T 20 1.00104275286757 0 0.6884422110552761 153.8894681960378 315.0251256281408 +1 +"$name""fw_locator" +3 9 "fw_progressBar" -1 +"" "" +"objects/navigator_progressBar.pnl" 815 767 T 23 1.305 0 1 46.42499999999998 90.00000000000003 +1 +"$name""fw_progressBar" +3 12 "fw_viewSelection" -1 +"" "" +"objects/navigator_viewSelection.pnl" -6 -6.99999999999989 T 25 1.000234110183463 0 1.003854010097507 6.001404661100779 6.02312406058504 +1 +"$name""fw_viewSelection" +3 16 "PANEL_REF17" -1 +"" "" +"objects/navigator_fastJumper.pnl" 580 10 T 26 1 0 1 17 -5 +1 +"$name""fw_fastJumper" +3 20 "PANEL_REF21" -1 +"" "" +"objects/show_legenda.pnl" 500 110 T 28 1 0 1 20 -4 +0 +3 21 "PANEL_REF21" -1 +"" "" +"objects/navigator_alerts.pnl" 15.50000000000001 926.8747307968413 T 33 1 0 1 0.4999999999999929 -9.874730796841277 +1 +"$name""fw_alerts" +0 diff --git a/MAC/Navigator2/panels/objects/Alerts/Station_Alerts.pnl b/MAC/Navigator2/panels/objects/Alerts/Station_Alerts.pnl new file mode 100644 index 0000000000000000000000000000000000000000..c0cfba963b383bd7e5d6b293c86cfa587632e843 --- /dev/null +++ b/MAC/Navigator2/panels/objects/Alerts/Station_Alerts.pnl @@ -0,0 +1,174 @@ +V 13 +1 +LANG:1 0 +PANEL,-1 -1 1246 130 N "_3DFace" 0 +"main() +{ + Init(); +}" 0 + E E E E 1 -1 -1 0 1 1 +""0 1 +E "#uses \"navPanel.ctl\" + +string baseDP=\"\"; +string sysName = \"\"; + +// Variable for alert table +string _propDpName = \"_AESPropertiesRTRow_\" + myUiNumber() + \"_\" + myModuleName() + \"_Top\"; + + + +void Init() +{ +// DebugTN( __FILE__, myModuleName() ); +// DebugTN( __FUNCTION__, _propDpName, sysName ); + + sysName = dpSubStr(g_currentDatapoint,DPSUB_SYS); + strreplace( sysName, \":\", \"\" ); + + dpConnect( \"CallbackAesRow\", false, _propDpName + \".Both.Systems.Selections\" ); +} + + + +void CallbackAesRow( string strDPE1, dyn_string dsDistSelections ) +{ + +// DebugTN( __FUNCTION__, dsDistSelections ); + + // If callback contains our system: do nothing.... + // otherwise init + if( dynContains( dsDistSelections, sysName ) >= 1 ) + { + return; + } + + setValue( \"PANEL_REF6..table_top\", \"rowHeight\", 17 ); + setValue( \"PANEL_REF6..table_top\", \"columnHeaderVisibility\", true ); + + dpSet( _propDpName + \".Both.Systems.CheckAllSystems\", false, + _propDpName + \".Both.Systems.Selections\", sysName ); + + aes_doStop( _propDpName ); + aes_doStart( _propDpName ); +} + + +void OpenAlarmFilter() +{ + string strModuleName, strPanelName; + dyn_float df; + dyn_string ds; + + strModuleName = \"AlarmFilterStation\"; + strPanelName = sysName; + + if( isModuleOpen( strModuleName ) ) + { + ChildPanelOnCentralModalReturn( \"vision/MessageWarning\", + \"Popup already open\", + makeDynString( \"$1:Alarmfilter already opened!\" ), + df, ds ); + + moduleRestore( strModuleName ); + moduleRaise( strModuleName ); + return; + } + + ModuleOnWithPanel( strModuleName, -1, -1, + 0, 0, + 1, 1, + \"Scale\", + \"Alerts/alarmsWinCCOA_Filtering.pnl\", + strPanelName, + makeDynString( \"$SYSTEM:\" + sysName ) ); +} + +" 0 + 3 +"CBRef" "1" +"EClose" E +"dpi" "96" +0 0 0 +"" +DISPLAY_LAYER, 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 +LAYER, 0 +1 +LANG:1 0 +1 17 0 "0" 18 +31 "transform" 0 0 1 0 1 120.2500000000002 0.9686956521738921 +0 +1 18 0 "0" 19 +31 "transform" 0 0 1 0 1 120.2500000000002 0.9686956521738921 +0 +1 19 0 "0" 24 +31 "transform" 0 0 1 0 1 122.2500000000002 0.9686956521738921 +0 +1 28 0 "0" 21 +31 "transform" 0 0 1 0 1 120.2500000000002 0.9686956521738921 +0 +1 29 0 "0" 22 +31 "transform" 0 0 1 0 1 120.2500000000002 0.9686956521738921 +0 +1 31 0 "0" 1 +31 "transform" 0 0 1.111363636363637 0 4.249999999999984 0.1113636363636381 -3.249999999999549 +0 +13 16 +"PUSH_BUTTON1" +"" +1 1110.25 99 E E E 1 E 1 E N "_ButtonText" E N "_Button" E E + E E +0 0 0 0 0 0 +E E E +0 +1 +LANG:1 0 + +0 +1 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +0 1108 88 1192 122 + +T +1 +LANG:1 11 Alarmfilter +"main() +{ + OpenAlarmFilter(); +}" 0 + E E E +0 +LAYER, 1 +1 +LANG:1 0 +0 +LAYER, 2 +1 +LANG:1 0 +0 +LAYER, 3 +1 +LANG:1 0 +0 +LAYER, 4 +1 +LANG:1 0 +0 +LAYER, 5 +1 +LANG:1 0 +0 +LAYER, 6 +1 +LANG:1 0 +0 +LAYER, 7 +1 +LANG:1 0 +0 +3 0 "PANEL_REF6" -1 +"" "" +"objects/STD_PANELS/AESRow.pnl" 175.7272727272728 411.5 T 0 1 0 1 -187.7272727272728 -441.5 +1 +"$AESREGDOLLAR_SCREENTYPE""aes_alertRow_Station" +0 diff --git a/MAC/Navigator2/panels/vision/aes/AESComments.pnl b/MAC/Navigator2/panels/vision/aes/AESComments.pnl new file mode 100644 index 0000000000000000000000000000000000000000..e4a6ea330f2ff890a448470e1482831fc4851d91 --- /dev/null +++ b/MAC/Navigator2/panels/vision/aes/AESComments.pnl @@ -0,0 +1,594 @@ +V 13 +4 +LANG:1 8 Comments +LANG:10000 13 Kommentierung +LANG:10001 8 Comments +LANG:10027 22 Комментарии +PANEL,-1 -1 586 492 N "_3DFace" 9 +"$comment" +"$count" +"$detailPanel" +"$direction" +"$dp" +"$dpid" +"$mode" +"$text" +"$time" +"main() +{ + dyn_string dsComments, dsParts; + dyn_string dsTime, dsDP, dsText, dsDirection; + + if(isDollarDefined(\"$mode\") && strtoupper($mode) == \"MULTI\") + { + g_bMulti = TRUE; + + lblHeader.text = getCatStr(\"aes\", \"commentHeaderMulti\"); + + tblSelectedAlerts.alternatingRowColors(makeDynString(\"white\", \"lightgrey\")); + + if(isDollarDefined(\"$time\")) + dsTime = stringToDynString($time); + + if(isDollarDefined(\"$dp\")) + dsDP = stringToDynString($dp); + + if(isDollarDefined(\"$text\")) + dsText = stringToDynString($text); + + if(isDollarDefined(\"$direction\")) + dsDirection = stringToDynString($direction); + + tblSelectedAlerts.appendLines(dynlen(dsTime), \"Time\", dsTime, \"DP\", dsDP, \"Alarmtext\", dsText, \"Direction\", dsDirection); + } + else + { + g_bMulti = FALSE; + + setInputFocus(myModuleName(), myPanelName(), \"tblHistory\"); + + lblHeader.text = getCatStr(\"aes\", \"commentHeaderSingle\"); + + tblHistory.tableMode = TABLE_SELECT_BROWSE; + tblHistory.selectByClick = TABLE_SELECT_LINE; + tblHistory.alternatingRowColors(makeDynString(\"white\", \"lightgrey\")); + + if(isDollarDefined(\"$comment\") && $comment != \"\") + dsComments = aes_splitComment($comment); + + for(int i = 1; i <= dynlen(dsComments); i++) + { + dsParts = strsplit(dsComments[i], \"|\"); + + // MvdJagt 18-jun-2018: to correct wrong comments (contains only 1 field instead of 3): correct them with time as 1970 and unknown user... + if( dynlen(dsParts) == 1 ) + { + DebugTN( __FILE__ + \"(): commentaar is fout! wordt nu door ons aangepast...\" ); + dsParts[3] = dsParts[1]; + + dsParts[1] = \"?\"; + dsParts[2] = (string)(time)0; + DebugTN( dsParts ); + } // End of change + + + if(dynlen(dsParts) == 3) + { + tblHistory.appendLine(\"Time\", dsParts[2], + \"User\", strltrim(dsParts[1], \" \"), + \"Comment\", dsParts[3], + \"...\", (strpos(dsParts[3], \"\\n\") >= 0 || strlen(dsParts[3]) > 35) ? \"...\" : \"\"); + + gNumberOfStoredEntries += 1; + } + else //IM 118148 alert comment does not work + { + ChildPanelOnCentralModal(\"vision/MessageInfo1\",myModuleName()+\" Warning\",makeDynString(getCatStr(\"aesExtension\",\"invalidComment\"))); + + setMultiValue(\"tblHistory\", \"enabled\", FALSE, + \"text\", \"enabled\", FALSE, + \"buOK\", \"enabled\", FALSE, + \"insert\", \"enabled\", FALSE); + + tblHistory.deleteAllLines(); //If invalid format was detected do not display inconsistent information in table + break; + } + } + } + + setMultiValue(\"tblSelectedAlerts\", \"visible\", g_bMulti, + \"tblHistory\", \"visible\", !g_bMulti, + \"delete\", \"visible\", !g_bMulti, + \"insert\", \"visible\", !g_bMulti, + \"buDetails\", \"visible\", !g_bMulti); +} + +" 0 + E E E E 1 -1 -1 0 10 10 +""0 1 +E "bool g_bMulti; +int gNumberOfStoredEntries = 0; //It should not be possible to delete previously stored entries +" 0 + 3 +"CBRef" "0" +"EClose" E +"dpi" "96" +0 0 0 +"" +DISPLAY_LAYER, 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 +LAYER, 0 +1 +LANG:1 6 Layer1 +13 4 +"buDetails" +"" +1 309 456 E E E 1 E 1 E N "_ButtonText" E N "_Button" E E + E E +7 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,40,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,40,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,40,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,40,0,0,0,0,0 +0 309 456 397 484 + +T +4 +LANG:1 10 Details... +LANG:10000 10 Details... +LANG:10001 10 Details... +LANG:10027 16 Детали ... +"/* +$-Parameter: + dpid (string) ... DP-Element + time (time as string) ... time of alert + count (int) ... count of alert + detailPanel (string) ... panel for details +*/ +main() +{ + // start child panel for detail information + ChildPanelOnParentModal($detailPanel, \"\", rootPanel(), + makeDynString(\"$dpid:\" + $dpid, + \"$time:\" + $time, + \"$count:\" + $count), 10, 10); +} +" 0 + E E E +13 3 +"buOK" +"" +1 399 456 E E E 1 E 1 E N "_ButtonText" E N "_Button" E E + E E +4 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,40,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,40,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,40,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,40,0,0,0,0,0 +0 399 456 487 484 + +T +4 +LANG:1 2 OK +LANG:10000 2 OK +LANG:10001 2 OK +LANG:10027 2 OK +"main() +{ + aes_addCommentOK(); +}" 0 + E E E +13 2 +"buCancel" +"" +1 489 456 E E E 1 E 1 E N {0,0,0} E N "_Button" E E + E E +5 0 0 0 27 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,40,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,40,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,40,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,40,0,0,0,0,0 +0 489 456 577 484 + +T +4 +LANG:1 6 Cancel +LANG:10000 9 Abbrechen +LANG:10001 6 Cancel +LANG:10027 16 Отменить +"main() +{ + aes_addCommentNOK(); +}" 0 + E E E +13 6 +"delete" +"" +1 -91 446 E E E 0 E 1 E N "_ButtonText" E N "_Button" E E + E E +8 0 0 0 0 0 +E E E +0 +4 +LANG:1 25 Delete not stored entries +LANG:10000 48 Löschen von noch nicht gespeicherten Einträgen +LANG:10001 25 Delete not stored entries +LANG:10027 54 Удалить неÑохраненные запиÑи + +0 +4 +LANG:1 26 Arial,-1,13,5,40,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,40,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,40,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,40,0,0,0,0,0 +0 129 456 217 484 + +T +4 +LANG:1 6 Delete +LANG:10000 8 Löschen +LANG:10001 6 Delete +LANG:10027 14 Удалить +"main() +{ + dyn_int diSelectedLines = tblHistory.getSelectedLines; + + if(dynlen(diSelectedLines) == 1) + tblHistory.deleteLineN(diSelectedLines[1]); + + diSelectedLines = tblHistory.getSelectedLines; + + if(dynlen(diSelectedLines) == 1) + text.text = tblHistory.cellValueRC(diSelectedLines[1], \"Comment\"); +} +" 0 + E E E +1 24 0 "" 0 +0 +25 9 +"tblHistory" +"" +1 7.999040307101723 28 E E E 1 E 1 E N "_WindowText" E N "_Window" E E + E E +9 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +1 +"selectionChangedCB" "synchronized main() +{ + dyn_int diSelectedLines = tblHistory.getSelectedLines; + int iPos = (dynlen(diSelectedLines)==1 ? diSelectedLines[1]+1 : -1); //Note that index of table rows starts at zero + + delete.enabled = (iPos > gNumberOfStoredEntries); //It should only be possible to delete entries which have not been saved yet +}" +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,11,5,50,0,0,0,0,0 +0 6 26 576 327 +E"main(int row, string column, string value) +{ + text.text = tblHistory.cellValueRC(row, \"Comment\"); +}" 0 + 1 0 1 4 0 "Time" 12 1 0 "s" 4 +LANG:1 4 Time +LANG:10000 4 Zeit +LANG:10001 4 Time +LANG:10027 10 Ð’Ñ€ÐµÐ¼Ñ +E +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +150 "User" 6 1 0 "s" 4 +LANG:1 4 User +LANG:10000 8 Benutzer +LANG:10001 4 User +LANG:10027 24 Пользователь +E +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +80 "Comment" 26 1 0 "s" 4 +LANG:1 7 Comment +LANG:10000 9 Kommentar +LANG:10001 7 Comment +LANG:10027 22 комментарий +E +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +296 "..." 1 1 0 "s" 4 +LANG:1 3 ... +LANG:10000 3 ... +LANG:10001 3 ... +LANG:10027 3 ... +E +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +21 +16 16 10 0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,11,5,50,0,0,0,0,0 +0 0 2 1 1 7 +1 0 +29 13 +"text" +"" +1 8.000000000000004 357 E E E 1 E 1 E N "_WindowText" E N "_Window" E E + E E +3 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 8 357 574 434 +15 PVSS.TextEdit.1 +0 +E2 16 +"lblHeader" +"" +1 10 7.999999999999943 E E E 1 E 1 E N "_WindowText" E N "_Window" E E + E E +11 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 2 1 E U 0 E 10 7.999999999999943 206 23 +0 2 2 "0s" 0 0 0 192 0 0 10 7.999999999999943 1 +4 +LANG:1 26 Arial,-1,13,5,75,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,75,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,75,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,75,0,0,0,0,0 +0 4 +LANG:1 34 History or list of selected alerts +LANG:10000 40 Historie oder Liste ausgewählter Alarme +LANG:10001 34 History or list of selected alerts +LANG:10027 68 ИÑÑ‚Ð¾Ñ€Ð¸Ñ Ð¸Ð»Ð¸ ÑпиÑок выбранных алармов +2 19 +"lblComment" +"" +1 10 337 E E E 1 E 1 E N "_WindowText" E N "_Window" E E + E E +12 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 2 1 E U 0 E 10 337 77 352 +0 2 2 "0s" 0 0 0 192 0 0 10 337 1 +4 +LANG:1 26 Arial,-1,13,5,75,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,75,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,75,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,75,0,0,0,0,0 +0 4 +LANG:1 8 Comment: +LANG:10000 10 Kommentar: +LANG:10001 8 Comment: +LANG:10027 17 ОпиÑание: +13 23 +"insert" +"" +1 -1 446 E E E 1 E 1 E N "_ButtonText" E N "_Button" E E + E E +14 0 0 0 0 0 +E E E +0 +4 +LANG:1 16 Insert new entry +LANG:10000 23 Neuen Eintrag einfügen +LANG:10001 16 Insert new entry +LANG:10027 40 Ð’Ñтавить новую запиÑÑŒ + +0 +4 +LANG:1 26 Arial,-1,13,5,40,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,40,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,40,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,40,0,0,0,0,0 +0 219 456 307 484 + +T +4 +LANG:1 6 Insert +LANG:10000 9 Einfügen +LANG:10001 6 Insert +LANG:10027 16 Ð’Ñтавить +"main() +{ + aes_addCommentApply(); + setInputFocus(myModuleName(), myPanelName(), \"text\"); +}" 0 + E E E +25 10 +"tblSelectedAlerts" +"" +1 -13 27.51551724137926 E E E 1 E 1 E N "_WindowText" E N "_Window" E E + E E +10 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,11,5,50,0,0,0,0,0 +0 6 26 576 327 +EE 1 0 1 4 0 "Time" 12 1 0 "s" 4 +LANG:1 4 Time +LANG:10000 4 Zeit +LANG:10001 4 Time +LANG:10027 10 Ð’Ñ€ÐµÐ¼Ñ +E +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +150 "DP" 16 1 0 "s" 4 +LANG:1 9 Datapoint +LANG:10000 10 Datenpunkt +LANG:10001 9 Datapoint +LANG:10027 23 Точка данных +E +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +191 "Alarmtext" 12 1 0 "s" 4 +LANG:1 10 Alert text +LANG:10000 9 Alarmtext +LANG:10001 10 Alert text +LANG:10027 23 ТекÑÑ‚ аларма +E +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +145 "Direction" 4 1 0 "s" 4 +LANG:1 9 Direction +LANG:10000 8 Richtung +LANG:10001 9 Direction +LANG:10027 22 Ðаправление +E +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +62 +16 16 10 0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,11,5,50,0,0,0,0,0 +0 0 2 1 1 7 +1 0 +0 +LAYER, 1 +1 +LANG:1 6 Layer2 +0 +LAYER, 2 +1 +LANG:1 6 Layer3 +0 +LAYER, 3 +1 +LANG:1 6 Layer4 +0 +LAYER, 4 +1 +LANG:1 6 Layer5 +0 +LAYER, 5 +1 +LANG:1 6 Layer6 +0 +LAYER, 6 +1 +LANG:1 6 Layer7 +0 +LAYER, 7 +1 +LANG:1 6 Layer8 +0 +3 0 "PANEL_REF0" -1 +"" "" +"objects_parts/STD_OBJECTS/ButtonBarBackground.pnl" 35 415 T 8 1.192096940739648 0 1 -51.65157964042989 32 +0 +0 diff --git a/MAC/Navigator2/panels/vision/aes/AS_detail_DP.pnl b/MAC/Navigator2/panels/vision/aes/AS_detail_DP.pnl new file mode 100644 index 0000000000000000000000000000000000000000..4c0ac8bee98bc9f09f346d62e3009aa3bd3a8f5a --- /dev/null +++ b/MAC/Navigator2/panels/vision/aes/AS_detail_DP.pnl @@ -0,0 +1,1824 @@ +V 13 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 +PANEL,-1 -1 664 623 N "_3DFace" 3 +"$count" +"$dpid" +"$time" +"/* +$-Parameters: + dpid (string) ... DP-ID of alert + time (time) ... Time of alert + count (int) ... count of alert +*/ + +main() +{ + int aesMode = AES_MODE_CURRENT; + + if (isDollarDefined(\"$aesMode\")) + { + aesMode = getDollarValue(\"$aesMode\"); + } + + dyn_int counts; + dyn_string dpes1, dpes2; + dyn_time times; + string dpe = $dpid; + string dirStr, stateStr; + time ti = $time, td; + int i, prio, count = $count, days, hours, minutes, partnidx; + int msgNum; + string msgKey, seconds; + + dyn_string abbr, panel, comment; + dyn_bool direction, ackable, ackOblig, oldestAck, singleAck, isSum; + langString alerttext; + dyn_anytype value; + dyn_time ackTime, partner; + dyn_int prior, ackState, ackUser; + dyn_string sClass; + bool filtered, force_filtered, visibleAlertReduction=FALSE; + dyn_string fore=makeDynString(\"_3DText\",\"_Window\"), back=makeDynString(\"_3DFace\",\"_ButtonShadow\"); + dyn_dyn_anytype tab; + string select; + string st1, st2; + time t; + bit64 b64UserBits; + + string dpWithoutDetail=\"_alert_hdl.\"; + + // MvdJagt 18-jun-2018: due to the fact that all systems/servers are configured at UTC time without time-zone, but the client runs on a PC with timezone + // the query didn't find the correct alert.... So we now first subtract the timezone and the additional hour when we are in summertime + time tiCorrected = ti - timeFromGMT(); + if( daylightsaving( ti ) ) + { + tiCorrected = tiCorrected - 3600; + } + + st1 = tiCorrected; +// st1 = ti; + + if (useRDBArchive()) //Problem with RDB IM 80422 + { +// count = 0; // counter must work in RDB +// time ti2 = ti + 0.001; // we have to use a time-range + time ti2 = tiCorrected + 0.001; // we have to use a time-range + st2 = ti2; + } + else + { + st2 = st1; + } + + // MvdJagt: end of changes + + bool bVisibleDiscretAttributes = true; + setMultiValue(\"ptUserBits\", \"visible\", bVisibleDiscretAttributes, + \"userBits\", \"visible\", bVisibleDiscretAttributes, + \"ptStateBits\", \"visible\", bVisibleDiscretAttributes, + \"frBits\", \"visible\", bVisibleDiscretAttributes); + + select = \"SELECT ALERT \"+ + \"'\"+dpWithoutDetail+\"._abbr','\"+dpWithoutDetail+\"._prior','\"+dpWithoutDetail+\"._text','\"+dpWithoutDetail+\"._direction','\"+ + dpWithoutDetail+\"._value','\"+dpWithoutDetail+\"._panel','\"+dpWithoutDetail+\"._comment','\"+dpWithoutDetail+\"._ack_time','\"+ + dpWithoutDetail+\"._partner','\"+dpWithoutDetail+\"._ackable','\"+dpWithoutDetail+\"._ack_state','\"+dpWithoutDetail+\"._ack_oblig','\"+ + dpWithoutDetail+\"._oldest_ack','\"+dpWithoutDetail+\"._single_ack','\"+dpWithoutDetail+\"._sum','\"+dpWithoutDetail+\"._ack_user','\"+ + dpWithoutDetail+\"._class','\"+dpWithoutDetail+\"._obsolete','\"+dpWithoutDetail+\"._value_status64','\"+dpWithoutDetail+\"._partn_idx' \" + + \" FROM \" + \"'\"+dpSubStr(dpe,DPSUB_SYS_DP_EL) +\"'\" + + (dpSubStr(dpe, DPSUB_SYS) != getSystemName()+\":\"?\" REMOTE '\"+dpSubStr(dpe, DPSUB_SYS)+\"'\":\"\")+ + \" TIMERANGE(\\\"\" + st1 + \"\\\",\\\"\" + st2 + \"\\\",1,0)\"; +DebugTN( select ); + dpQuery( select, tab ); + + if (useRDBArchive()) //because in RDB we have to ask for a timeperiod -> we must remove the wrong entries + { + for (int i=dynlen(tab); i>=2; i--) //only to second row, because first row is description + { + if (((time)((atime)tab[i][2])) != ti) + { + dynRemove(tab, i); + } + } + } + + int therange=aes_getRangeOfTab(tab, dpe, ti, count); + + int upper = 20; + if ( therange > 0 && dynlen( tab ) > 1 && dynlen(tab[therange]) >=upper ) + { + abbr = tab [therange][3]; + prior = tab [therange][4]; + alerttext = (langString) tab [therange][5]; + direction = tab [therange][6]; + value = tab [therange][7]; + panel = tab [therange][8]; + comment = tab [therange][9]; + ackTime = tab [therange][10]; + partner = tab [therange][11]; + ackable = tab [therange][12]; + ackState = tab [therange][13]; + ackOblig = tab [therange][14]; + oldestAck = tab [therange][15]; + singleAck = tab [therange][16]; + isSum = tab [therange][17]; + ackUser = tab [therange][18]; + sClass = tab [therange][19]; +// new feature userdepending alarm display --> wie have to replace the attributes _class _abbr _prio and _ackable if there is a alert class mapping configured IM #117931 + if(isFunctionDefined(\"aes_prepareAlertDetailDisplay\")) + aes_prepareAlertDetailDisplay(abbr,prior,ackable,sClass); +//// + visibleAlertReduction = !tab [therange][20]; //obsolete + b64UserBits = tab [therange][21]; + partnidx = tab [therange][22]; + + if(aesMode==AES_MODE_CURRENT) + { + if ( visibleAlertReduction ) + visibleAlertReduction = isAlertFilteringActive(); + if ( visibleAlertReduction ) + aes_getFiltered_and_ForceFiltered_Attribute( $dpid, dpWithoutDetail+aes_getDpDetail(dpe), ti, count, filtered, force_filtered ); + } + else + visibleAlertReduction=FALSE; + } + else + { + string sTemp = getCatStr(\"sc\",\"noDetailsForThisAlert\");; + ChildPanelOnCentralModal(\"vision/MessageWarning\", \"\", makeDynString(sTemp)); + return; + } + + i=1; + + if ( direction[i] ) + dirStr = getCatStr(\"sc\", \"entered\"); + else + dirStr = getCatStr(\"sc\", \"left\"); + + if ( ackState[i] == DPATTR_ACKTYPE_MULTIPLE ) + stateStr = getCatStr(\"sc\", \"multipleAck\"); + else if ( ackState[i] == DPATTR_ACKTYPE_SINGLE ) + stateStr = getCatStr(\"sc\", \"singleAck\"); + + // calculate time between the partners + if ( period(partner[i]) ) + { + unsigned diff; + + if ( partner[i] > ti ) + td = partner[i] - ti; + else + td = ti - partner[i]; + + diff = period(td); + + days = diff / 86400; diff -= days * 86400; + hours = diff / 3600; diff -= hours * 3600; + minutes = diff / 60; diff -= minutes * 60; + sprintf(seconds, \"%2u.%03d\", diff, milliSecond(td)); + } + + // get explanation text + if ( ackState[i] == DPATTR_ACKTYPE_MULTIPLE ) + msgNum = 0; + else if ( ackState[i] == DPATTR_ACKTYPE_SINGLE ) + msgNum = 1; + else + { + + switch ( ackable[i]*8 + ackOblig[i]*4 + oldestAck[i]*2 + singleAck[i] ) + { + case 0: // sum=0: not ackable alert + case 1: // sum=1: not ackable sum-alert + case 2: + case 3: msgNum = isSum[i] ? 2 : 3; break; + + case 4: + case 6: msgNum = 4; break; // oblig ack. alert, not yet ack. + + case 5: + case 7: msgNum = 5; break; // oblig single-ack alert, not yet ack. + + case 8: msgNum = 6; break; // ackable if oldest ack. + case 9: msgNum = 7; break; // single-ackable if oldest ack. + + case 10: msgNum = 8; break; // ackable, oldest + case 11: msgNum = 9; break; // single-ack, oldest + + case 12: msgNum = 10; break; // ack oblig if oldest ack + case 13: msgNum = 11; break; // single-ack oblig if oldest ack + + case 14: msgNum = 12; break; // ack oblig, oldest + case 15: msgNum = 13; break; // single-ack oblig, oldest + } + } + sprintf(msgKey, \"explain%d\", msgNum); + + string sPriorAndClass; + sprintf(sPriorAndClass, \"%03d\", prior[i]); + sPriorAndClass += \"_\"+dpSubStr(sClass[i], DPSUB_DP_EL); + setMultiValue(\"abbr\", \"text\", abbr[i], + \"prior\", \"text\", sPriorAndClass, + \"timeStr\", \"text\", alarmCountedTime(ti), + \"text\", \"text\", alerttext, + \"direction\", \"text\", dirStr, + \"value\", \"text\", (getType(value[i]) == STRING_VAR) ? value[i] : dpValToString(dpe,value[i],true), //for usage with 2.10 only + \"ackTypeStr\", \"text\", getCatStr(\"sc\", msgKey), + \"ack_time\", \"text\", alarmCountedTime(ackTime[i]), + \"partner\", \"text\", alarmCountedTimePartnIdx(partner[i], partnidx), + \"comment\", \"items\", as_splitComment(comment[i]), + \"panel\", \"text\", panel[i], + \"ackable\", \"state\", 0, ackable[i], + \"oldestAck\", \"state\", 0, oldestAck[i], + \"ackOblig\", \"state\", 0, ackOblig[i], + \"ackSingle\", \"state\", 0, singleAck[i], + \"ack_user\", \"text\", (ackState[i] == DPATTR_ACKTYPE_NOT) ? \"\" : getUserName(ackUser[i]), + \"ack_state\", \"text\", stateStr, + \"days\", \"text\", days, + \"hours\", \"text\", hours, + \"minutes\", \"text\", minutes, + \"seconds\", \"text\", seconds, + \"cb_force_filter\", \"state\", \"0\", force_filtered, + \"_alert___filtered_bad\",\"backCol\",back[filtered+1], + \"_alert___filtered_bad\",\"foreCol\",fore[filtered+1], + \"b1\",\"foreCol\",fore[filtered+1], + \"cb_force_filter\", \"visible\", visibleAlertReduction, + \"_alert___filtered_bad\", \"visible\", visibleAlertReduction, + \"b1\",\"visible\", visibleAlertReduction); + + displayUserBits(b64UserBits); +} + +displayUserBits(bit64 b64UserBits) +{ + paDisplayAlertBits(b64UserBits, false); +} + +string alarmCountedTime(time t) +{ + string res = (t) ? formatTime(\"%c\", t, \" (%03d \") : \"\"; + return (res != \"\") ? res + $count + \")\" : \"(\" + $count + \")\"; +} + +string alarmCountedTimePartnIdx(time t, int idx) +{ + string res = (t) ? formatTime(\"%c\", t, \" (%03d \") : \"\"; + return (res != \"\") ? res + idx + \")\" : \"(\" + idx + \")\"; +} + + + + + + + +" 0 + E E E E 1 -1 -1 0 0 0 +""0 1 +E E 3 +"CBRef" "1" +"EClose" E +"dpi" "96" +0 0 0 +"" +DISPLAY_LAYER, 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 +LAYER, 0 +1 +LANG:1 0 +2 0 +"Text3" +"" +1 93 67.00000000000003 E E E 1 E 1 E N "_3DText" E N "_Transparent" E E + E E +1 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E U 0 E 64 67.00000000000003 95 82 +0 2 2 "0s" 0 0 0 194 0 0 95 67.00000000000003 1 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 4 +LANG:1 6 Alert: +LANG:10000 6 Alarm: +LANG:10001 6 Alert: +LANG:10027 11 Ðларм: +2 1 +"Text4" +"" +1 93 37 E E E 1 E 1 E N "_3DText" E N "_Transparent" E E + E E +2 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E U 0 E 32 37 95 52 +0 2 2 "0s" 0 0 0 194 0 0 95 37 1 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 4 +LANG:1 11 Short sign: +LANG:10000 12 Kurzzeichen: +LANG:10001 11 Short sign: +LANG:10027 22 Сокр. обозн.: +2 2 +"Text5" +"" +1 93 97.00000000000003 E E E 1 E 1 E N "_3DText" E N "_Transparent" E E + E E +3 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E U 0 E 63 97.00000000000003 95 112 +0 2 2 "0s" 0 0 0 194 0 0 95 97.00000000000003 1 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 4 +LANG:1 5 Time: +LANG:10000 10 Zeitpunkt: +LANG:10001 5 Time: +LANG:10027 11 ВремÑ: +2 3 +"Text8" +"" +1 93 127 E E E 1 E 1 E N "_3DText" E N "_Transparent" E E + E E +4 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E U 0 E 38 127 95 142 +0 2 2 "0s" 0 0 0 194 0 0 95 127 1 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 4 +LANG:1 11 Alert text: +LANG:10000 10 Meldetext: +LANG:10001 11 Alert text: +LANG:10027 24 ТекÑÑ‚ аларма: +2 4 +"Text9" +"" +1 95 157 E E E 1 E 1 E N "_3DText" E N "_Transparent" E E + E E +5 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E U 0 E 40 157 95 172 +0 2 2 "0s" 0 0 0 194 0 0 95 157 1 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 4 +LANG:1 10 Direction: +LANG:10000 9 Richtung: +LANG:10001 10 Direction: +LANG:10027 23 Ðаправление: +2 5 +"Text10" +"" +1 93 187 E E E 1 E 1 E N "_3DText" E N "_Transparent" E E + E E +6 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E U 0 E 58 187 95 202 +0 2 2 "0s" 0 0 0 194 0 0 95 187 1 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 4 +LANG:1 6 Value: +LANG:10000 5 Wert: +LANG:10001 6 Value: +LANG:10027 17 Значение: +2 6 +"Text11" +"" +1 95 217 E E E 1 E 1 E N {0,0,0} E N "_Transparent" E E + E E +7 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E U 0 E 58 217 95 232 +0 2 2 "0s" 0 0 0 194 0 0 95 217 1 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 4 +LANG:1 6 Panel: +LANG:10000 6 Panel: +LANG:10001 6 Panel: +LANG:10027 13 Панель: +2 7 +"Text12" +"" +1 443 35 E E E 1 E 1 E N {0,0,0} E N "_Transparent" E E + E E +8 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E U 0 E 370 35 445 50 +0 2 2 "0s" 0 0 0 194 0 0 445 35 1 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 4 +LANG:1 13 Time of ack.: +LANG:10000 22 Quittierungszeitpunkt: +LANG:10001 13 Time of ack.: +LANG:10027 36 Ð’Ñ€ÐµÐ¼Ñ ÐºÐ²Ð¸Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ: +2 8 +"Text13" +"" +1 443 187 E E E 1 E 1 E N {0,0,0} E N "_Transparent" E E + E E +9 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E U 0 E 370 187 445 202 +0 2 2 "0s" 0 0 0 194 0 0 445 187 1 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 4 +LANG:1 13 Partner time: +LANG:10000 12 Partnerzeit: +LANG:10001 13 Partner time: +LANG:10027 28 Ð’Ñ€ÐµÐ¼Ñ Ð¿Ð°Ñ€Ñ‚Ð½ÐµÑ€Ð°: +2 9 +"Text14" +"" +1 443 68 E E E 1 E 1 E N {0,0,0} E N "_Transparent" E E + E E +10 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E U 0 E 340 68 445 83 +0 2 2 "0s" 0 0 0 194 0 0 445 68 1 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 4 +LANG:1 16 Acknowledgeable: +LANG:10000 12 Quittierbar: +LANG:10001 16 Acknowledgeable: +LANG:10027 23 Квитируемый: +2 10 +"Text15" +"" +1 443 127 E E E 1 E 1 E N {0,0,0} E N "_Transparent" E E + E E +11 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E U 0 E 374 127 445 142 +0 2 2 "0s" 0 0 0 194 0 0 445 127 1 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 4 +LANG:1 12 Ack. status: +LANG:10000 20 Quittierungszustand: +LANG:10001 12 Ack. status: +LANG:10027 38 Ð¡Ñ‚Ð°Ñ‚ÑƒÑ ÐºÐ²Ð¸Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ: +2 11 +"Text16" +"" +1 443 157 E E E 1 E 1 E N "_3DText" E N "_3DFace" E E + E E +12 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E U 1 E 339 157 445 172 +0 2 2 "0s" 0 0 0 194 0 0 445 157 1 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 4 +LANG:1 16 Acknowledged by: +LANG:10000 14 Quittiert von: +LANG:10001 16 Acknowledged by: +LANG:10027 23 Квитировано: +2 12 +"Text17" +"" +1 443 217 E E E 1 E 1 E N "_3DText" E N "_3DFace" E E + E E +13 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E U 1 E 393 217 445 232 +0 2 2 "0s" 0 0 0 194 0 0 445 217 1 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 4 +LANG:1 9 Duration: +LANG:10000 6 Dauer: +LANG:10001 9 Duration: +LANG:10027 25 ДлительноÑÑ‚ÑŒ: +2 13 +"Text18" +"" +1 443 99.00000000000003 E E E 1 E 1 E N {0,0,0} E N "_Transparent" E E + E E +14 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E U 0 E 361 99.00000000000003 445 114 +0 2 2 "0s" 0 0 0 194 0 0 445 99.00000000000003 1 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 4 +LANG:1 14 Requires ack.: +LANG:10000 18 Quittierpflichtig: +LANG:10001 14 Requires ack.: +LANG:10027 35 Треб. квитированиÑ: +2 14 +"Text19" +"" +1 562 99.00000000000003 E E E 1 E 1 E N {0,0,0} E N "_Transparent" E E + E E +15 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E U 0 E 498 99.00000000000003 564 114 +0 2 2 "0s" 0 0 0 194 0 0 564 99.00000000000003 1 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 4 +LANG:1 13 Individually: +LANG:10000 8 Einzeln: +LANG:10001 13 Individually: +LANG:10027 17 Отдельно: +2 15 +"Text20" +"" +1 562 68.00000000000003 E E E 1 E 1 E N "_3DText" E N "_3DFace" E E + E E +16 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E U 1 E 522 68.00000000000003 564 83 +0 2 2 "0s" 0 0 0 194 0 0 564 68.00000000000003 1 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 4 +LANG:1 7 Oldest: +LANG:10000 9 Älteste: +LANG:10001 7 Oldest: +LANG:10027 19 Старейший: +2 16 +"Text21" +"" +1 460 217 E E E 1 E 1 E N "_3DText" E N "_3DFace" E E + E E +17 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E U 1 E 453 217 460 232 +0 2 2 "0s" 0 0 0 194 0 0 460 217 1 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 4 +LANG:1 1 d +LANG:10000 1 d +LANG:10001 1 d +LANG:10027 1 d +2 17 +"Text22" +"" +1 505 217 E E E 1 E 1 E N "_3DText" E N "_3DFace" E E + E E +18 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E U 1 E 498 217 505 232 +0 2 2 "0s" 0 0 0 194 0 0 505 217 1 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 4 +LANG:1 1 h +LANG:10000 1 h +LANG:10001 1 h +LANG:10027 1 h +2 18 +"Text23" +"" +1 560 217 E E E 1 E 1 E N "_3DText" E N "_3DFace" E E + E E +19 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E U 1 E 549 217 560 232 +0 2 2 "0s" 0 0 0 194 0 0 560 217 1 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 4 +LANG:1 1 m +LANG:10000 1 m +LANG:10001 1 m +LANG:10027 1 m +2 19 +"Text24" +"" +1 604 217 E E E 1 E 1 E N "_3DText" E N "_3DFace" E E + E E +20 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E U 1 E 597 217 604 232 +0 2 2 "0s" 0 0 0 194 0 0 604 217 1 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 4 +LANG:1 1 s +LANG:10000 1 s +LANG:10001 1 s +LANG:10027 1 s +30 20 +"Frame1" +"" +1 10 586 E E E 1 E 1 E N "_3DText" E N "_Transparent" E E + E E +21 0 0 0 0 0 +E E E +1 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +2 +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E 1 0 1.03804347826087 0 -103.2934782608696 0 E 10 402 650 587 +4 +LANG:1 26 Arial,-1,13,5,75,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,75,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,75,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,75,0,0,0,0,0 +0 4 +LANG:1 10 Comment(s) +LANG:10000 12 Kommentar(e) +LANG:10001 10 Comment(s) +LANG:10027 29 Комментарий(-ии) +30 21 +"Frame2" +"" +1 10 392 E E E 1 E 1 E N "_3DText" E N "_Transparent" E E + E E +22 0 0 0 0 0 +E E E +1 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +2 +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E 1 0 1 0 -87.99999999999997 0 E 10 339 650 393 +4 +LANG:1 26 Arial,-1,13,5,75,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,75,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,75,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,75,0,0,0,0,0 +0 4 +LANG:1 6 Notice +LANG:10000 7 Hinweis +LANG:10001 6 Notice +LANG:10027 20 Примечание +30 22 +"Frame3" +"" +1 10 329 E E E 1 E 1 E N "_3DText" E N "_Transparent" E E + E E +23 0 0 0 0 0 +E E E +1 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +2 +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E 1 0 1.043668122270743 0 -102.7161572052402 0 E 10 108 300 338 +4 +LANG:1 26 Arial,-1,13,5,75,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,75,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,75,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,75,0,0,0,0,0 +0 4 +LANG:1 16 Alert attributes +LANG:10000 17 Meldungsattribute +LANG:10001 16 Alert attributes +LANG:10027 29 Ðтрибуты аларма +30 23 +"Frame4" +"" +1 310 333 E E E 1 E 1 E N "_3DText" E N "_Transparent" E E + E E +24 0 0 0 0 0 +E E E +1 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +2 +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E 1 0 1.043668122270743 0 -102.7161572052402 0 E 310 108 650 338 +4 +LANG:1 26 Arial,-1,13,5,75,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,75,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,75,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,75,0,0,0,0,0 +0 4 +LANG:1 26 Acknowledgement attributes +LANG:10000 20 Quttierungsattribute +LANG:10001 26 Acknowledgement attributes +LANG:10027 41 Ðтрибуты ÐºÐ²Ð¸Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ +14 25 +"abbr" +"" +1 98 34.00000000000003 E E E 1 E 1 E N "_WindowText" E N "_3DFace" E E + E E +26 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 98 33 290 59 +2 "0s" 0 0 0 0 0 -1 E E E +14 26 +"prior" +"" +1 98 64.00000000000003 E E E 1 E 1 E N "_WindowText" E N "_3DFace" E E + E E +27 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 98 63 290 89 +2 "0s" 0 0 0 0 0 -1 E E E +14 27 +"timeStr" +"" +1 98 94.00000000000003 E E E 1 E 1 E N "_WindowText" E N "_3DFace" E E + E E +28 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 98 93 290 119 +2 "0s" 0 0 0 0 0 -1 E E E +14 28 +"text" +"" +1 98 124 E E E 1 E 1 E N "_WindowText" E N "_3DFace" E E + E E +29 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 98 123 290 149 +2 "0s" 0 0 0 0 0 -1 E E E +14 29 +"direction" +"" +1 98 154 E E E 1 E 1 E N "_WindowText" E N "_3DFace" E E + E E +30 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 98 153 290 179 +2 "0s" 0 0 0 0 0 -1 E E E +14 30 +"value" +"" +1 98 184 E E E 1 E 1 E N "_WindowText" E N "_3DFace" E E + E E +31 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 98 183 290 209 +2 "0s" 0 0 0 0 0 -1 E E E +14 31 +"panel" +"" +1 98 214 E E E 1 E 1 E N "_WindowText" E N "_3DFace" E E + E E +32 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 98 213 290 239 +2 "0s" 0 0 0 0 0 -1 E E E +14 32 +"ack_time" +"" +1 451.0430107526882 32.00000000000003 E E E 1 E 1 E N "_WindowText" E N "_3DFace" E E + E E +33 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 451 31 637 57 +2 "0s" 0 0 0 0 0 -1 E E E +20 33 +"ackable" +"" +1 449 67.00000000000003 E E E 0 E 1 E N "_3DText" E N "_3DFace" E E + E E +34 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 449 67 480 87 +1 +T +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +1 +LANG:1 0 +E E +0 0 0 0 0 +1 +E E +EE20 34 +"oldestAck" +"" +1 567 67.00000000000003 E E E 0 E 1 E N "_3DText" E N "_3DFace" E E + E E +35 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 567 67 598 87 +1 +T +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +1 +LANG:1 0 +E E +0 0 0 0 0 +1 +E E +EE20 35 +"ackOblig" +"" +1 449 97.00000000000003 E E E 0 E 1 E N "_3DText" E N "_3DFace" E E + E E +36 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 449 97 481 118 +1 +T +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +1 +LANG:1 0 +E E +0 0 0 0 0 +1 +E E +EE20 36 +"ackSingle" +"" +1 567 98.00000000000003 E E E 0 E 1 E N "_3DText" E N "_3DFace" E E + E E +37 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 567 98 597 117 +1 +T +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +1 +LANG:1 0 +E E +0 0 0 0 0 +1 +E E +EE14 37 +"ack_state" +"" +1 448 124 E E E 1 E 1 E N "_WindowText" E N "_3DFace" E E + E E +38 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 448 123 638 149 +2 "0s" 0 0 0 0 0 -1 E E E +14 38 +"ack_user" +"" +1 448 154 E E E 1 E 1 E N "_WindowText" E N "_3DFace" E E + E E +39 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 448 153 638 179 +2 "0s" 0 0 0 0 0 -1 E E E +14 39 +"partner" +"" +1 448 184 E E E 1 E 1 E N "_WindowText" E N "_3DFace" E E + E E +40 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 448 183 638 209 +2 "0s" 0 0 0 0 0 -1 E E E +14 40 +"days" +"" +1 463 214 E E E 1 E 1 E N "_WindowText" E N "_3DFace" E E + E E +41 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 463 213 493 239 +2 "2d" 2 1 1 2 0 -1 E E E +14 41 +"hours" +"" +1 513.3225806451613 214 E E E 1 E 1 E N "_WindowText" E N "_3DFace" E E + E E +42 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 513 213 543 239 +2 "2d" 2 1 1 2 0 -1 E E E +14 42 +"minutes" +"" +1 563.3225806451613 214 E E E 1 E 1 E N "_WindowText" E N "_3DFace" E E + E E +43 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 563 213 593 239 +2 "2d" 2 1 1 2 0 -1 E E E +14 43 +"seconds" +"" +1 608.3225806451612 214 E E E 1 E 1 E N "_WindowText" E N "_3DFace" E E + E E +44 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 608 213 638 239 +2 "2d" 2 1 1 2 0 -1 E E E +14 44 +"ackTypeStr" +"" +1 22 269 E E E 1 E 1 E N "_WindowText" E N "_3DFace" E E + E E +45 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 20 268 642 294 +2 "0s" 0 0 0 0 0 -1 E E E +17 45 +"comment" +"" +1 20 333 E E E 1 E 1 E N {0,0,0} E N "_3DFace" E E + E E +46 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 18 333 638 492 +0 + +E +E +E + +0 0 +13 46 +"buDetails" +"" +1 350 565 E E E 1 E 0 E N "_ButtonText" E N "_Button" E E + E E +47 0 0 0 0 0 +E E E +0 +4 +LANG:1 17 Sum alert details +LANG:10000 19 Summenalarm Details +LANG:10001 17 Sum alert details +LANG:10027 37 Детали групп. аларма + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 348 563 436 591 + +T +4 +LANG:1 7 Details +LANG:10000 7 Details +LANG:10001 7 Details +LANG:10027 12 Детали +"// SimpleCtrlScriptStart {valid} +main() +{ + EP_childPanelOn(); +} + +EP_childPanelOn() +{ + ChildPanelOnCentral(\"vision/aes/AESSumAlertDetails.pnl\", + \"\", + makeDynString(\"$dpid:\" + $dpid)); +} + +// SimpleCtrlScript {EP_childPanelOn} +// Function {ChildPanelOnCentral} +// File {vision/aes/AESSumAlertDetails.pnl} +// Panel {} +// Parent {} +// Module {} +// OffsetX {0} +// OffsetY {0} +// PanelPara1 {$dpid:$dpid} +// SimpleCtrlScriptEnd {EP_childPanelOn} +" 0 + E E "main() +{ + int iType; + + dpGet(dpSubStr($dpid,DPSUB_SYS_DP_EL_CONF_DET)+\"._type\", iType); + this.visible = (iType==DPCONFIG_SUM_ALERT); + +}" 0 + +6 47 +"_alert___filtered_bad" +"" +1 576 348 E E E 1 E 0 E N "_3DText" E N "_3DFace" E E + E E +48 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +2 +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 0 1 E 1 0 1 -79 184 1 E 576 348 596 368 +2 48 +"b1" +"" +1 723 288 E E E 1 E 0 E N "_3DText" E N "_Transparent" E E + E E +49 0 0 0 0 0 +E E E +0 +4 +LANG:1 13 Filter active +LANG:10000 12 Filter aktiv +LANG:10001 13 Filter active +LANG:10027 29 Фильтр применен + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +"main() +{ + +}" 0 + E 0 1 1 0 1 E U 0 E 503 534 513 549 +0 0 0 "0s" 0 0 0 192 0 0 503 534 1 +4 +LANG:1 26 Arial,-1,13,5,40,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,40,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,40,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,40,0,0,0,0,0 +0 4 +LANG:1 1 F +LANG:10000 1 F +LANG:10001 1 F +LANG:10027 1 F +20 49 +"cb_force_filter" +"" +1 350 532 E E E 1 E 0 E N "_3DText" E N "_3DFace" E E + E E +50 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +0 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 348 530 492 557 +1 +T +4 +LANG:1 12 force Filter +LANG:10000 18 erzwinge Filterung +LANG:10001 12 force Filter +LANG:10027 27 Фильтр активен + +0 +1 +LANG:1 0 +E E +0 0 0 0 0 +1 +E E +E"main(int button, int state) +{ + int aSet = alertSet( ((time)$time), $count,dpSubStr($dpid,DPSUB_SYS_DP_EL_CONF_DET)+\"._force_filtered\", ((bool)state)); + + if ( aSet != -1 ) + { + bool filtered, force_filtered; + string dpdetail; // get Dp-detail + dpdetail = strrtrim ( dpSubStr ( $dpid, DPSUB_CONF_DET ), dpSubStr ( $dpid, DPSUB_CONF ) ); + dpdetail = substr ( dpdetail, 1); + if (aes_getFiltered_and_ForceFiltered_Attribute( $dpid, dpdetail, $time, $count, filtered, force_filtered )) + { + dyn_string fore=makeDynString(\"_3DText\",\"_Window\"), back=makeDynString(\"_3DFace\",\"_ButtonShadow\"); + string originalBack; + + if (state == 1) + filtered = true; //because Updating DP is not so fast + setMultiValue(\"_alert___filtered_bad\",\"backCol\",back[filtered+1], + \"_alert___filtered_bad\",\"foreCol\",fore[filtered+1], + \"b1\",\"foreCol\",fore[filtered+1]); + } + } +}" 0 +1 148 0 "" 93 +0 +2 144 +"ptStateBits" +"" +1 75 550 E E E 1 E 0 E N "_WindowText" E N "_Window" E E + E E +51 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 2 1 E U 0 E 40 550 75 565 +0 2 2 "0s" 0 0 0 194 0 0 75 550 1 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 4 +LANG:1 6 State: +LANG:10000 7 Status: +LANG:10001 6 State: +LANG:10027 10 СоÑÑ‚.: +2 146 +"ptUserBits" +"" +1 75 582 E E E 1 E 0 E N "_WindowText" E N "_Window" E E + E E +52 0 0 0 0 0 +E E E +0 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +3 +"sizePolicy" "Fixed Fixed" +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 1 1 2 1 E U 0 E 44 582 75 597 +0 2 2 "0s" 0 0 0 194 0 0 75 582 1 +4 +LANG:1 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,50,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,50,0,0,0,0,0 +0 4 +LANG:1 5 User: +LANG:10000 9 Benutzer: +LANG:10001 5 User: +LANG:10027 16 Пользов.: +30 147 +"frBits" +"" +1 10 592 E E E 1 E 0 E N "_WindowText" E N {0,0,0} E E + E E +53 0 0 0 0 0 +E E E +1 +4 +LANG:1 0 +LANG:10000 0 +LANG:10001 0 +LANG:10027 0 + +2 +"dashclr"N "_Transparent" +"antiAliased" "0" +E E 0 0 1 0 1 E 0.9411764705882353 0 1 0.5882352941176521 -79.99999999999997 0 E 10 592 351 691 +4 +LANG:1 26 Arial,-1,13,5,75,0,0,0,0,0 +LANG:10000 26 Arial,-1,13,5,75,0,0,0,0,0 +LANG:10001 26 Arial,-1,13,5,75,0,0,0,0,0 +LANG:10027 26 Arial,-1,13,5,75,0,0,0,0,0 +0 4 +LANG:1 4 Bits +LANG:10000 4 Bits +LANG:10001 4 Bits +LANG:10027 8 Биты +0 +LAYER, 1 +1 +LANG:1 0 +0 +LAYER, 2 +1 +LANG:1 0 +0 +LAYER, 3 +1 +LANG:1 0 +0 +LAYER, 4 +1 +LANG:1 0 +0 +LAYER, 5 +1 +LANG:1 0 +0 +LAYER, 6 +1 +LANG:1 0 +0 +LAYER, 7 +1 +LANG:1 0 +0 +3 0 "userBits" -1 +"" "" +"para/userBits64.pnl" 229 590 T 0 1 0 1 -150 -61.99999999999997 +1 +"$BOOL_ALLOWCLICK""FALSE" +0 diff --git a/MAC/Navigator2/scripts/monitorStateChanges.ctl b/MAC/Navigator2/scripts/monitorStateChanges.ctl index 77c37faa064f5fb234eccf0cf03e4f2b107ac728..a70a13125e80dcce8ac507e9e27efb5ee0f9f834 100644 --- a/MAC/Navigator2/scripts/monitorStateChanges.ctl +++ b/MAC/Navigator2/scripts/monitorStateChanges.ctl @@ -295,6 +295,8 @@ void SetAlertComment( string dpe, string msg, int delaymsec = 10 ) strAlertComment += message + "\uA7"; // Store new comment - alertSet( (time)atLastAlert, getACount(atLastAlert), dpSubStr( getAIdentifier(atLastAlert), DPSUB_SYS_DP_EL_CONF_DET ) + "._comment", strAlertComment ); +// alertSet( (time)atLastAlert, getACount(atLastAlert), dpSubStr( getAIdentifier(atLastAlert), DPSUB_SYS_DP_EL_CONF_DET ) + "._comment", strAlertComment ); + alertSet( (time)atLastAlert, getACount(atLastAlert), dpSubStr( getAIdentifier(atLastAlert), DPSUB_SYS_DP_EL_CONF_DET ) + "._comment", strAlertComment, + (time)atLastAlert, getACount(atLastAlert), dpSubStr( getAIdentifier(atLastAlert), DPSUB_SYS_DP_EL_CONF_DET ) + "._add_value_5", msg ); } diff --git a/MAC/Services/src/ObservationControl2.py b/MAC/Services/src/ObservationControl2.py index 8e1de22dd474b5bebc09b95b880740aea2418ca8..e1a2d4c2581c681ad74177f38f2bf403bc44f2c6 100644 --- a/MAC/Services/src/ObservationControl2.py +++ b/MAC/Services/src/ObservationControl2.py @@ -24,8 +24,9 @@ from optparse import OptionParser from fabric.exceptions import NetworkError try: + # WARNING: This code only works with Fabric Version 1 from fabric import tasks - from fabric.api import env, run + from fabric.api import env, run, settings except ImportError as e: print str(e) print 'Please install python package fabric: sudo apt-get install fabric' @@ -63,15 +64,16 @@ class ObservationControlHandler(MessageHandlerInterface): killed = False - pid_line = run('pidof ObservationControl') - pids = pid_line.split(' ') + with settings(warn_only=True): + pid_line = run('pidof ObservationControl') + pids = pid_line.split(' ') - for pid in pids: - pid_sas_id = run("ps -p %s --no-heading -o command | awk -F[{}] '{ printf $2; }'" % pid) - if str(pid_sas_id) == str(sas_id): - logger.info("Killing ObservationControl with PID: %s for SAS ID: %s", pid, sas_id) - run('kill -SIGINT %s' % pid) - killed = True + for pid in pids: + pid_sas_id = run("ps -p %s --no-heading -o command | awk -F[{}] '{ printf $2; }'" % pid) + if str(pid_sas_id) == str(sas_id): + logger.info("Killing ObservationControl with PID: %s for SAS ID: %s", pid, sas_id) + run('kill -SIGINT %s' % pid) + killed = True return killed diff --git a/MAC/Services/test/tObservationControl2.py b/MAC/Services/test/tObservationControl2.py index fef8ecf3e145f27297d14169d4a1ea6323317a85..334cf8cdd5598cbd266fa73582e2863cd1845abe 100644 --- a/MAC/Services/test/tObservationControl2.py +++ b/MAC/Services/test/tObservationControl2.py @@ -40,6 +40,10 @@ class TestObservationControlHandler(unittest.TestCase): self.addCleanup(fabric_env_pathcher.stop) self.fabric_env_mock = fabric_env_pathcher.start() + fabric_settings_pathcher = mock.patch('lofar.mac.ObservationControl2.settings') + self.addCleanup(fabric_settings_pathcher.stop) + self.fabric_settings_mock = fabric_settings_pathcher.start() + logger_patcher = mock.patch('lofar.mac.ObservationControl2.logger') self.addCleanup(logger_patcher.stop) self.logger_mock = logger_patcher.start() @@ -55,15 +59,20 @@ class TestObservationControlHandler(unittest.TestCase): self.observation_control_handler._abort_observation_task(self.sas_id) self.fabric_run_mock.assert_any_call( - "ps -p %s --no-heading -o command | awk -F[{}] '{ print $2; }'" % self.pid1) + "ps -p %s --no-heading -o command | awk -F[{}] '{ printf $2; }'" % self.pid1) self.fabric_run_mock.assert_any_call( - "ps -p %s --no-heading -o command | awk -F[{}] '{ print $2; }'" % self.pid2) + "ps -p %s --no-heading -o command | awk -F[{}] '{ printf $2; }'" % self.pid2) def test_abort_observation_task_should_run_kill_when_sas_id_matches(self): self.observation_control_handler._abort_observation_task(self.sas_id) self.fabric_run_mock.assert_any_call('kill -SIGINT %s' % self.pid1) + def test_abort_observation_should_set_run_settings_with_warn_only_as_true(self): + self.observation_control_handler._abort_observation_task(self.sas_id) + + self.fabric_settings_mock.assert_called_with(warn_only=True) + @mock.patch.dict(os.environ, {'LOFARENV': 'TEST'}) def test_observation_control_should_select_test_host_if_lofar_environment_is_test(self): ObservationControlHandler() diff --git a/QA/CMakeLists.txt b/QA/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..8b17b024a033265167696087c70ebeab3a0ddd00 --- /dev/null +++ b/QA/CMakeLists.txt @@ -0,0 +1,21 @@ +# Copyright (C) 2018 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id$ + +lofar_add_package(QA_Common) +lofar_add_package(QA_Service) diff --git a/QA/QA_Common/CMakeLists.txt b/QA/QA_Common/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..3590fb7ee54afa6ea45bc9999cf68c33af785d67 --- /dev/null +++ b/QA/QA_Common/CMakeLists.txt @@ -0,0 +1,26 @@ +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id$ + +lofar_package(QA_Common 1.0 DEPENDS pyparameterset PyCommon) + +include(PythonInstall) + +add_subdirectory(lib) +add_subdirectory(bin) +add_subdirectory(test) diff --git a/QA/QA_Common/bin/CMakeLists.txt b/QA/QA_Common/bin/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..0f696cc0dcba6026d49ac4adbcba9e862df57fe5 --- /dev/null +++ b/QA/QA_Common/bin/CMakeLists.txt @@ -0,0 +1,22 @@ +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id$ + +lofar_add_bin_scripts(show_hdf5_info + find_hdf5 + create_test_hypercube) diff --git a/QA/QA_Common/bin/create_test_hypercube b/QA/QA_Common/bin/create_test_hypercube new file mode 100755 index 0000000000000000000000000000000000000000..e8da0881f5d73dc29afc0e394bdc49d163a54125 --- /dev/null +++ b/QA/QA_Common/bin/create_test_hypercube @@ -0,0 +1,75 @@ +#!/usr/bin/env python + +import os +from optparse import OptionParser, OptionGroup +from lofar.qa.utils import * +from lofar.qa.hdf5_io import write_hypercube + +import logging +logger = logging.getLogger(__name__) + +def main(): + # make sure we run in UTC timezone + os.environ['TZ'] = 'UTC' + + ## Check the invocation arguments + parser = OptionParser(usage='create_test_hypercube [options] <path_to_new_h5_file>', + description='creates a test h5 hypercube with random data for the given number of stations, saps, subbands, timestamps.') + group = OptionGroup(parser, 'Dimensions') + group.add_option('-S', '--stations', dest='stations', type='int', default=3, help='number of stations to create (min=2), default: %default') + group.add_option('-s', '--subbands', dest='subbands', type='int', default=244, help='number of subbands (per sap) to create, default: %default') + group.add_option('-t', '--timestamps', dest='timestamps', type='int', default=128, help='number of timestamps to create, default: %default') + group.add_option('--saps', dest='saps', type='int', default=1, help='number of saps to create, default: %default') + parser.add_option_group(group) + + group = OptionGroup(parser, 'General signal options') + group.add_option('--snr', dest='snr', type='float', default=0.9, help='signal to noise ratio. The signal is a test image with a full sweep through all phase and amplitudes from [0..1], and the noise is just random complex numbers, default: %default') + group.add_option('-a', '--amplitude', dest='max_signal_amplitude', type='float', default=100, help='the max signal amplitude, default: %default') + group.add_option('-p', '--pol-ratio', dest='parallel_to_cross_polarization_ratio', type='float', default=1, help='the amplitude ratio between parallel and cross polarization, default: %default') + parser.add_option_group(group) + + group = OptionGroup(parser, 'Specific signal options') + group.add_option('--pw', '--num_phase_wraps', dest='num_phase_wraps', type='float', default=1, help='the number of times the phase wraps around 2pi along the freq/sb axis, default: %default') + group.add_option('--tsp', '--num_time_sawtooth_periods', dest='num_time_sawtooth_periods', type='float', default=1, help='the number of periods for the sawtooth signal along the time axis, default: %default') + group.add_option('--ssp', '--num_subband_sawtooth_periods', dest='num_subband_sawtooth_periods', type='float', default=0, help='the number of periods for the sawtooth signal along the subband/frequency axis, default: %default') + group.add_option('--tcp', '--num_time_cos_periods', dest='num_time_cos_periods', type='float', default=0, help='the number of periods for the cosine signal along the time axis, default: %default') + group.add_option('--scp', '--num_subband_cos_periods', dest='num_subband_cos_periods', type='float', default=0, help='the number of periods for the cosine signal along the subband/frequency axis, default: %default') + parser.add_option_group(group) + + group = OptionGroup(parser, 'Miscellaneous') + group.add_option('-o', '--otdb_id', dest='otdb_id', type='int', default=None, help='optional (fake/test) otdb id, default: %default') + group.add_option('-V', '--verbose', dest='verbose', action='store_true', help='Verbose logging') + parser.add_option_group(group) + + (options, args) = parser.parse_args() + + if len(args) != 1: + print 'Please provide a file name for the h5 file which you want to create...' + print + parser.print_help() + exit(1) + + logging.basicConfig(format='%(asctime)s %(name)s %(levelname)s %(message)s', + level=logging.DEBUG if options.verbose else logging.INFO) + + if options.stations < 2: + print 'setting number of stations to minimum of 2' + options.stations = 2 + + cube = create_hypercube(num_stations=options.stations, + num_saps=options.saps, + num_subbands_per_sap={sap:options.subbands for sap in range(options.saps)}, + num_timestamps=options.timestamps, + snr=options.snr, + max_signal_amplitude = options.max_signal_amplitude, + parallel_to_cross_polarization_ratio= options.parallel_to_cross_polarization_ratio, + num_phase_wraps=options.num_phase_wraps, + num_time_sawtooth_periods=options.num_time_sawtooth_periods, + num_subband_sawtooth_periods=options.num_subband_sawtooth_periods, + num_time_cos_periods=options.num_time_cos_periods, + num_subband_cos_periods=options.num_subband_cos_periods) + + write_hypercube(args[0], cube, sas_id=options.otdb_id) + +if __name__ == '__main__': + main() diff --git a/QA/QA_Common/bin/find_hdf5 b/QA/QA_Common/bin/find_hdf5 new file mode 100755 index 0000000000000000000000000000000000000000..19ca4a0cf650c96a76637515c2b182d4c358e9d7 --- /dev/null +++ b/QA/QA_Common/bin/find_hdf5 @@ -0,0 +1,182 @@ +#!/usr/bin/env python + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +if __name__ == '__main__': + import logging + logger = logging.getLogger(__name__) + + import os + import os.path + import sys + import fnmatch + import glob + from optparse import OptionParser, OptionGroup + from datetime import datetime, timedelta + + from lofar.parameterset import * + from lofar.qa.hdf5_io import * + from lofar.common.datetimeutils import parseDatetime + + # make sure we run in UTC timezone + os.environ['TZ'] = 'UTC' + + # Check the invocation arguments + parser = OptionParser(usage='find_hdf5 [options] <path_or_current_dir_if_omitted>', + description='find all h5 files in <path> matching the given filter options.') + + group = OptionGroup(parser, "Type", "Filter by observation/pipeline type. If all type options are omitted, then all types are selected") + group.add_option('-o', '--observation', dest='observation', action='store_true', default=False, + help='find observations. default: %default') + group.add_option('-p', '--pipeline', dest='pipeline', action='store_true', default=False, + help='find pipelines. default: %default') + parser.add_option_group(group) + + group = OptionGroup(parser, "Antenna", "Filter by antenna type (LBA/HBA). If all antenna options are omitted, then all types are selected. If an antenna option is given, then only observations are selected.") + group.add_option('--lba', dest='lba', action='store_true', default=False, + help='find LBA observations. default: %default') + group.add_option('--hba', dest='hba', action='store_true', default=False, + help='find HBA observations. default: %default') + parser.add_option_group(group) + + group = OptionGroup(parser, "Name/Project", "Filter by observation/project name. Wildcards are allowed.") + group.add_option('--name', dest='name', type='string', default=None, + help='find by observation name (use quotes when using wildcards). default: %default') + group.add_option('--project', dest='project', type='string', default=None, + help='find by project name/description (use quotes when using wildcards). default: %default') + parser.add_option_group(group) + + group = OptionGroup(parser, "Date/Duration", "Filter by starttime/endtime date and/or duration.") + group.add_option('-d', '--date', dest='date', type='string', default=None, + help='find by observations/pipelines by date (YYYY-MM-DD). default: %default') + group.add_option('--min_duration', dest='min_duration', type='string', default=None, + help='find by observations/pipelines which are at least this duration long (HH:MM). default: %default') + group.add_option('--max_duration', dest='max_duration', type='string', default=None, + help='find by observations/pipelines which are at most this duration long (HH:MM). default: %default') + parser.add_option_group(group) + + group = OptionGroup(parser, "Clusters", "Filter by cluster options.") + group.add_option('-c', '--clusters', dest='clusters', action="store_true", default=False, + help='find clustered h5 files. default: %default') + group.add_option('-C', '--no_clusters', dest='no_clusters', action="store_true", default=False, + help='find non-clustered h5 files. default: %default') + parser.add_option_group(group) + + group = OptionGroup(parser, "Output/Display", "Output format and display options. Display list of matching files by default.") + group.add_option('-i', '--info', dest='info', action='store_true', default=False, + help='show info for each matching file. default: %default') + group.add_option('-v', '--verbose', dest='verbose', action='store_true', default=False, + help='verbose logging. default: %default') + parser.add_option_group(group) + + (options, args) = parser.parse_args() + + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', + level=logging.DEBUG if options.verbose else logging.WARN) + + if not options.observation and not options.pipeline: + options.observation = True + options.pipeline = True + + if options.lba or options.hba: + options.observation = True + options.pipeline = False + + path = os.path.dirname(os.path.expanduser(args[0]) if len(args) == 1 else os.getcwd()) + + files = glob.glob(os.path.join(path, '*.h*5')) + + if path == os.getcwd(): + files = [os.path.basename(file) for file in files] + + files = sorted(files) + + info_dicts = {} + + for file in files: + try: + info_dict = read_info_dict(file) + if info_dict: + info_dicts[file] = info_dict + except: + pass + + if not (options.observation and options.pipeline): + if options.observation: + files = [f for f in files + if f in info_dicts and 'observation' in info_dicts[f].get('type', '').lower()] + + if options.pipeline: + files = [f for f in files + if f in info_dicts and 'pipeline' in info_dicts[f].get('type', '').lower()] + + + if not (options.lba and options.hba): + if options.lba: + files = [f for f in files + if f in info_dicts and 'lba' in info_dicts[f].get('antenna_array', '').lower()] + + if options.hba: + files = [f for f in files + if f in info_dicts and 'hba' in info_dicts[f].get('antenna_array', '').lower()] + + if options.name: + files = [f for f in files if f in info_dicts and + fnmatch.fnmatch(info_dicts[f].get('name', '').lower(), options.name.lower())] + + if options.project: + files = [f for f in files if f in info_dicts and + (fnmatch.fnmatch(info_dicts[f].get('project', '').lower(), options.project.lower()) or + fnmatch.fnmatch(info_dicts[f].get('project_description', '').lower(), options.project.lower()))] + + if options.date: + options.date = datetime.strptime(options.date, '%Y-%m-%d').date() + files = [f for f in files if f in info_dicts and + 'start_time' in info_dicts[f] and info_dicts[f]['start_time'].date() == options.date] + + if options.min_duration: + hours, sep, minutes = options.min_duration.partition(':') + options.min_duration = timedelta(hours=int(hours), minutes=int(minutes)) + files = [f for f in files if f in info_dicts and + 'stop_time' in info_dicts[f] and info_dicts[f]['stop_time'].date() == options.date] + + if options.max_duration: + hours, sep, minutes = options.max_duration.partition(':') + options.max_duration = timedelta(hours=int(hours), minutes=int(minutes)) + files = [f for f in files + if f in info_dicts and info_dicts[f].get('duration', timedelta()) <= options.max_duration] + + if options.clusters or options.no_clusters: + def has_clusters(h5_path): + with h5py.File(h5_path, "r+") as file: + return len(file.get('clustering',{})) + + if options.clusters: + files = [f for f in files if has_clusters(f)] + + if options.no_clusters: + files = [f for f in files if not has_clusters(f)] + + if options.info: + for file in files: + print read_info_from_hdf5(file, read_data_info=False) + else: + print '\n'.join(files) + + + diff --git a/QA/QA_Common/bin/show_hdf5_info b/QA/QA_Common/bin/show_hdf5_info new file mode 100755 index 0000000000000000000000000000000000000000..8974053c36133689eadd859fa5d26d6d91cd4e6f --- /dev/null +++ b/QA/QA_Common/bin/show_hdf5_info @@ -0,0 +1,50 @@ +#!/usr/bin/env python + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +if __name__ == '__main__': + import logging + logger = logging.getLogger(__name__) + + import os + import os.path + from optparse import OptionParser + + from lofar.qa.hdf5_io import * + + # make sure we run in UTC timezone + os.environ['TZ'] = 'UTC' + + # Check the invocation arguments + parser = OptionParser(usage='show_hdf5_info <input_MS_extract_hdf5_file> [options]', + description='show the meta data for the given MS_extract hdf5 file.') + parser.add_option('-d', '--data', dest='data', action='store_true', default=False, help='show data info (SAPs, #baselines, #subbands, #timeslots etc). (warning, slow!) default: %default') + parser.add_option('-V', '--verbose', dest='verbose', action='store_true', help='Verbose logging') + + (options, args) = parser.parse_args() + + if len(args) != 1: + print parser.print_help() + exit(-1) + + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', + level=logging.DEBUG if options.verbose else logging.WARNING) + + hdf_path = os.path.expanduser(args[0]) + + print read_info_from_hdf5(hdf_path, read_data_info=options.data, read_parset_info=True) diff --git a/QA/QA_Common/lib/CMakeLists.txt b/QA/QA_Common/lib/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..b72e082de94987bf0a4d19d95d5ed7490fc54040 --- /dev/null +++ b/QA/QA_Common/lib/CMakeLists.txt @@ -0,0 +1,26 @@ +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id$ + +python_install( + __init__.py + hdf5_io.py + geoconversions.py + utils.py + DESTINATION lofar/qa) + diff --git a/QA/QA_Common/lib/__init__.py b/QA/QA_Common/lib/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4f54da1af6a2548fa7ac163d34990380f2139bf9 --- /dev/null +++ b/QA/QA_Common/lib/__init__.py @@ -0,0 +1,17 @@ +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + diff --git a/QA/QA_Common/lib/geoconversions.py b/QA/QA_Common/lib/geoconversions.py new file mode 100644 index 0000000000000000000000000000000000000000..08fdf805d6b1afa04691a028eb91b149e32b5118 --- /dev/null +++ b/QA/QA_Common/lib/geoconversions.py @@ -0,0 +1,134 @@ +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +from numpy import sqrt, sin, cos, arctan2, array, cross, dot, ones +from numpy.linalg.linalg import norm +from scipy.interpolate import Rbf # Radial basis function interpolation. +from numpy.linalg import lstsq + +__all__ = ['geographic_from_xyz', 'pqr_cs002_from_xyz'] + + +def normalized_earth_radius(latitude_rad): + wgs84_f = 1./298.257223563 + return 1.0/sqrt(cos(latitude_rad)**2 + ((1.0 - wgs84_f)**2)*(sin(latitude_rad)**2)) + + +def geographic_from_xyz(xyz_m): + ''' + convert xyz coordinates to wgs84 coordinates + :param xyz_m: 1D array/list/tuple of x,y,z in meters + :return: tuple of lat_rad, lon_rad, height_m + ''' + wgs84_a = 6378137.0 + wgs84_f = 1./298.257223563 + wgs84_e2 = wgs84_f*(2.0 - wgs84_f) + + x_m, y_m, z_m = xyz_m + lon_rad = arctan2(y_m, x_m) + r_m = sqrt(x_m**2 + y_m**2) + # Iterate to latitude solution + phi_previous = 1e4 + phi = arctan2(z_m, r_m) + while abs(phi -phi_previous) > 1.6e-12: + phi_previous = phi + phi = arctan2(z_m + wgs84_e2*wgs84_a*normalized_earth_radius(phi)*sin(phi), + r_m) + lat_rad = phi + height_m = r_m*cos(lat_rad) + z_m*sin(lat_rad) - wgs84_a*sqrt(1.0 - wgs84_e2*sin(lat_rad)**2) + return lat_rad, lon_rad, height_m + + +def xyz_from_geographic(lon_rad, lat_rad, height_m): + c = normalized_earth_radius(lat_rad) + wgs84_f = 1./298.257223563 + wgs84_a = 6378137.0 + s = c*((1 - wgs84_f)**2) + return array([ + ((wgs84_a*c) + height_m)*cos(lat_rad)*cos(lon_rad), + ((wgs84_a*c) + height_m)*cos(lat_rad)*sin(lon_rad), + ((wgs84_a*s) + height_m)*sin(lat_rad)]) + + + +def normal_vector_ellipsoid(lon_rad, lat_rad): + return array([cos(lat_rad)*cos(lon_rad), + cos(lat_rad)*sin(lon_rad), + sin(lat_rad)]) + +def normal_vector_meridian_plane(xyz_m): + x_m, y_m, _ = xyz_m + return array([y_m, -x_m, 0.0])/sqrt(x_m**2 + y_m**2) + +def projection_matrix(xyz0_m, normal_vector): + r_unit = normal_vector + meridian_normal = normal_vector_meridian_plane(xyz0_m) + q_unit = cross(meridian_normal, r_unit) + q_unit /= norm(q_unit) + p_unit = cross(q_unit, r_unit) + p_unit /= norm(p_unit) + return array([p_unit, q_unit, r_unit]).T + +def transform(xyz_m, xyz0_m, mat): + offsets = xyz_m - xyz0_m + return array([dot(mat, offset) for offset in offsets]) + +LOFAR_XYZ0_m = array([3826574.0, 461045.0, 5064894.5]) +LOFAR_REF_MERIDIAN_NORMAL = normal_vector_meridian_plane(LOFAR_XYZ0_m) +LOFAR_PQR_TO_ETRS_MATRIX = array([[ -1.19595105e-01, -7.91954452e-01, 5.98753002e-01], + [ 9.92822748e-01, -9.54186800e-02, 7.20990002e-02], + [ 3.30969000e-05, 6.03078288e-01, 7.97682002e-01]]) + + + +def pqr_from_xyz(xyz_m, xyz0_m=LOFAR_XYZ0_m, matrix=LOFAR_PQR_TO_ETRS_MATRIX): + return transform(xyz_m, xyz0_m, matrix.T) + +def interpolation_function(pqr): + ''' + Return an interpolation function fn(x, y, z), which returns the value at x, y. + ''' + rbfi = Rbf(pqr[:,0], pqr[:,1], 0.0*pqr[:,2], pqr[:,2], function='linear') + def interpolator(x_m, y_m): + return rbfi(x_m, y_m, y_m*0.0) + return interpolator + + +def fit_plane(xyz): + # data_model z = ax +by +c + # M colvec(a, b, c) = colvec(z) + # M row i = (x_i, y_i, 1.0) + mean_position = xyz.mean(axis=0) + + mat = array([xyz[:,0]- mean_position[0], + xyz[:,1]- mean_position[1], + ones(len(xyz[:,2]))]).T + a, b, c = lstsq(mat, xyz[:,2] - mean_position[2])[0] + normal_vector = array([-a, -b, 1.0]) + normal_vector /= norm(normal_vector) + return {'mean': mean_position, 'normal': normal_vector} + + +def pqr_cs002_from_xyz(xyz_m): + ''' + convert xyz coordinates to lofar pqr coordinates with origin in CS002 + :param xyz_m: 1D array/list/tuple of x,y,z in meters + :return: tuple of pqr coords in meters + ''' + pqr = pqr_from_xyz(array([xyz_m]), + xyz0_m=array([ 3826577.462, 461022.624, 5064892.526])) + return pqr[0][0], pqr[0][1], pqr[0][2] diff --git a/QA/QA_Common/lib/hdf5_io.py b/QA/QA_Common/lib/hdf5_io.py new file mode 100644 index 0000000000000000000000000000000000000000..a3cdf069d7c3bcad31c8de858caa7e5f998e8b25 --- /dev/null +++ b/QA/QA_Common/lib/hdf5_io.py @@ -0,0 +1,1453 @@ +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +"""Module hdf5_io offers various methods to read/write/modify hdf5 files containing lofar measurement data. +Such an h5 file is usually generated from Lofar Measurement Sets (MS/casacore format) using the ms2hdf5 conversion tool. + +Since the data is stored in hdf (hierchical data format) and we use python, it makes sense that we use (nested) dicts as data holders. +The file contents is as follows: +- TODO + +External developers using this api whill primarily use the read_hypercube. +If you would like to do your own clustering, then use write_clusters and read_clusters as well. + +:Example: + + from lofar.qa.hdf5_io import * + + # read the data + h5_path = '/my/path/to/myfile.h5' + data = read_hypercube(h5_path, visibilities_in_dB=True, python_datetimes=False, read_flagging=False) + + # do your own processing, for example make clusters (see write_clusters for dict format) + my_clusters = .... #results of your algorithm + + # write your clusters into the same h5 file + # in this case they are stored under 'my_fancy_clustering_attempt_1', and a 'latest' symlink is made to these clustering results. + # multiple clustering results can all be stored in the same file, each with a different algo_name. + write_clusters(h5_path, clusters, algo_name='my_fancy_clustering_attempt_1') +""" + +import os.path +from datetime import datetime, timedelta + +import os +# prevent annoying h5py future/deprecation warnings +os.environ["TF_CPP_MIN_LOG_LEVEL"]="3" + +import h5py +import errno +import numpy as np +from time import sleep + +import logging +logger = logging.getLogger(__name__) + +np.set_printoptions(precision=1) + +class SharedH5File(): + """ + Wrapper class aroung h5py.File to open an hdf5 file in read, write, or read/write mode safely, + even when the file might be used simultanously by other processes. + It waits for <timeout> seconds until the file becomes available. + + Example usage: + + with SharedH5File("foo.h5", 'r') as file: + file["foo"] = "bar" + + """ + def __init__(self, path, mode='a', timeout=900): + self._path = path + self._mode = mode + self._timeout = timeout + self._file = None + + def open(self): + start_timestamp = datetime.utcnow() + while self._file is None: + try: + self._file = h5py.File(self._path, self._mode) + except IOError as e: + if not os.path.exists(self._path): + raise + + logger.warning("Cannot open file '%s' with mode '%s'. Trying again in 1 sec...", + self._path, self._mode) + sleep(max(0, min(1, self._timeout))) + if datetime.utcnow() - start_timestamp > timedelta(seconds=self._timeout): + logger.error("Cannot open file '%s' with mode '%s', even after trying for %s seconds", + self._path, self._mode, self._timeout) + raise + + return self._file + + def close(self): + self._file.close() + self._file = None + + def __enter__(self): + return self.open() + + def __exit__(self, exc_type, exc_val, exc_tb): + return self.close() + +def write_hypercube(path, saps, parset=None, sas_id=None, wsrta_id=None, do_compress=True, **kwargs): + """ + write a hypercube of visibility/flagging data for all saps of an observation/pipeline. + + :param str path: full path of the resulting h5 file. By convention we advise to use <observation_id>.MS_exctract.h5 + where observation_id is L<sas_id> for lofar and WSRTA<wsrta_id> for apertif + :param dict saps: each key is the id of a sap, and holds per sap a dict with the following key/value pairs: + + baselines: [string], list of stationpairs (tuples) (these are the ticks on the baseline axis of the visibilities) + + timestamps: [np.double], list of Modified Julian Date (these are the ticks on the time axis of the visibilities) + + central_frequencies: [np.double], list of central frequencies of the subbands (these are the ticks on the frequency axis of the visibilities) + + subbands: [np.int], list of subbands numbers (each subband has a corresponding central_frequency) + + polarizations: [string], list of polarization, one up to four, any of 'XX', 'XY', 'YX', 'YY' + + visibilities: numpy.array, the 4D array of visibilities. In the file these are reduced from doubles to chars by taking the 10.log10 and normalizing the result to fit in the [-128..127] range. + + flagging: numpy.array, the 4D array of flagging booleans. + :param parameterset parset: the optional paramaterset with all the settings which were used for this observation/pipeline + :param int sas_id: the optional observation/pipeline sas_id (the main id to track lofar observations/pipelines) + :param int wsrta_id: the optional observation wsrta_id (the main id to track wsrt apertif observations) + :param bool do_compress: compress the visibilities and flagging data (with lzf compression, slower but smaller output size) + :param dict kwargs: optional extra arguments + :return None + """ + logger.info('writing hypercube to file: %s', path) + + save_dir = os.path.dirname(path) + if not os.path.isabs(save_dir): + save_dir = os.path.join(os.getcwd(), save_dir) + + if not os.path.exists(save_dir): + os.makedirs(save_dir) + + with SharedH5File(path, "w") as file: + version = '1.4' + # 1.1 -> 1.2 change is not backwards compatible by design. + # 1.2 -> 1.3 change is almost backwards compatible, it just needs a dB/linear correction. see convert_12_to_13 + # 1.3 -> 1.4 storing scale factors per baseline per subband per pol, see convert_13_to_14 + ds = file.create_dataset('version', (1,), h5py.special_dtype(vlen=str), version) + ds.attrs['description'] = 'version of this hdf5 MS extract file' + + measurement_group = file.create_group('measurement') + measurement_group.attrs['description'] = 'all data (visibilities, flagging, parset, ...) for this measurement (observation/pipeline)' + + if parset is not None: + parset_str = str(parset) + ds = file.create_dataset('measurement/parset', (1,), h5py.special_dtype(vlen=str), + [parset_str], + compression="lzf") + ds.attrs['description'] = 'the parset of this observation/pipeline with all settings how this data was created' + + if sas_id is not None: + ds = file.create_dataset('measurement/sas_id', data=[sas_id]) + ds.attrs['description'] = 'lofar observation/pipeline sas id' + + if wsrta_id is not None: + ds = file.create_dataset('measurement/wsrta_id', data=[wsrta_id]) + ds.attrs['description'] = 'apertif observation wsrta id' + + saps_group = file.create_group('measurement/saps') + saps_group.attrs['description'] = 'the data (visibilities, flagging, ...) is stored per sub-array-pointing (sap)' + + for sap_nr in sorted(saps.keys()): + sap_dict = saps[sap_nr] + baselines = sap_dict['baselines'] + timestamps = sap_dict['timestamps'] + central_frequencies = sap_dict['central_frequencies'] + subbands = sap_dict['subbands'] + polarizations = sap_dict['polarizations'] + visibilities = sap_dict['visibilities'] + flagging = sap_dict['flagging'] + antenna_locations = sap_dict.get('antenna_locations') + + sap_group = file.create_group('measurement/saps/%d' % sap_nr) + ds = sap_group.create_dataset('polarizations', (len(polarizations),), h5py.special_dtype(vlen=str), polarizations) + ds.attrs['description'] = 'polarizations of the visibilities' + + ds = sap_group.create_dataset('baselines', (len(baselines),2), h5py.special_dtype(vlen=str), + [[str(bl[0]), str(bl[1])] for bl in baselines]) + ds.attrs['description'] = 'pairs of baselines between stations' + + if any(isinstance(t, datetime) for t in timestamps): + # try to import lofar.common.datetimeutils here and not at the top of the file + # to make this hdf5_io module as loosly coupled to other lofar code as possible + # do raise the possible ImportError, because we cannot proceed without converted datetimes. + from lofar.common.datetimeutils import to_modified_julian_date_in_seconds + timestamps = [to_modified_julian_date_in_seconds(t) if isinstance(t, datetime) else t for t in timestamps] + + ds = sap_group.create_dataset('timestamps', data=timestamps) + ds.attrs['units'] = 'modified julian date, (fractional) seconds since epoch 1858-11-17 00:00:00' + + ds = sap_group.create_dataset('central_frequencies', data=central_frequencies) + ds.attrs['units'] = 'Hz' + + ds = sap_group.create_dataset('subbands', data=subbands) + ds.attrs['description'] = 'subband number' + + if antenna_locations: + location_group = sap_group.create_group('antenna_locations') + location_group.attrs['description'] = 'the antenna locations in XYZ, PQR, WGS84 coordinates (units: meters and/or radians)' + + for ref_frame in ['XYZ', 'PQR', 'WGS84']: + location_sub_group = location_group.create_group(ref_frame) + location_sub_group.attrs['description'] = 'the antenna locations in %s coordinates (units: meters and/or radians)' % (ref_frame,) + + for antenna, location in antenna_locations[ref_frame].items(): + location_sub_group.create_dataset(antenna, data=location) + + logger.debug('''flagging NaN's and zero's in visibilities for file %s''', path) + zero_or_nan = np.absolute(visibilities) == 0.0 + zero_or_nan[np.isnan(visibilities)] = True + flagging[zero_or_nan] = True + + #we'll scale the 10log10(visibilities) so the complex-float can be mapped onto 2*int8 + logger.debug('normalizing visibilities for file %s', path) + #remove any NaN and/or 0 values in the visibilities? log(0) or log(nan) crashes, + # so determine smallest non-zero abs value, and fill that in for the flagged visibilities + try: + abs_non_zero_or_nan_visibilities = np.abs(visibilities)[zero_or_nan == False] + min_non_zero_or_nan_abs_value = max(1e-9, np.min(abs_non_zero_or_nan_visibilities)) + del abs_non_zero_or_nan_visibilities + except ValueError: + min_non_zero_or_nan_abs_value = 1e-12 + + # overwrite all visibilities values where flagging (or 0's or NaN's) occur with the min_non_flagged_value + # that enables us to take the log, and have good dynamic range when scaling to -128...127 + visibilities[zero_or_nan] = min_non_zero_or_nan_abs_value + del zero_or_nan + + # reduce dynamic range (so we fit more data in the available bits) + visibility_amplitudes = np.abs(visibilities) + visibility_amplitudes_dB = 10.0*np.log10(visibility_amplitudes) + visibility_phases = np.exp(1j*np.angle(visibilities)) + visibilities_dB = visibility_amplitudes_dB * visibility_phases + + #compute scale factors per subband, per polarization + scale_factors = np.empty(shape=(len(baselines),len(subbands),len(polarizations)), dtype=np.float32) + + # compute scale factor per baseline/subband/pol to map the visibilities_dB from complex64 to 2xint8 + for bl_idx in range(len(baselines)): + for pol_idx in range(len(polarizations)): + for sb_idx in range(len(subbands)): + #use 99.5 percentile instead if max to get rid of spikes + max_abs_vis_sb = max(1.0, np.percentile(visibility_amplitudes_dB[bl_idx,:,sb_idx,pol_idx], 99.5)) + scale_factor = 127.0 / max_abs_vis_sb + scale_factors[bl_idx, sb_idx, pol_idx] = 1.0/scale_factor + + # store the scale_factors in the file + scale_factor_ds = sap_group.create_dataset('visibility_scale_factors', data=scale_factors) + scale_factor_ds.attrs['description'] = 'scale factors per baseline per subband per polatization to un-normalize the stored visibilities' + scale_factor_ds.attrs['description'] = 'multiply real and imag parts of the visibilities with this factor per baseline per subband per polatization to un-normalize them and get the 10log10 values of the real and imag parts of the visibilities' + scale_factor_ds.attrs['units'] = '-' + + # create a array with one extra dimension, so we can split the complex value into two scaled int8's for real and imag part + # looping in python is not the most cpu efficient way + # but is saves us extra copies of the large visibilities array, which might not fit in memory? + logger.debug('converting visibilities from complexfloat to 2xint8 for file %s', path) + extended_shape = visibilities_dB.shape[:] + (2,) + scaled_visibilities = np.empty(extended_shape, dtype=np.int8) + + for bl_idx in range(len(baselines)): + for pol_idx in range(len(polarizations)): + for sb_idx in range(len(subbands)): + scale_factor = 1.0 / scale_factors[bl_idx, sb_idx, pol_idx] + scaled_visibilities[bl_idx,:,sb_idx,pol_idx,0] = scale_factor*visibilities_dB[bl_idx,:,sb_idx,pol_idx].real + scaled_visibilities[bl_idx,:,sb_idx,pol_idx,1] = scale_factor*visibilities_dB[bl_idx,:,sb_idx,pol_idx].imag + + logger.debug('reduced visibilities size from %s to %s bytes (factor %s)', + visibilities.nbytes, scaled_visibilities.nbytes, visibilities.nbytes/scaled_visibilities.nbytes) + + ds = sap_group.create_dataset('visibilities', data=scaled_visibilities, + compression="lzf" if do_compress else None) + ds.attrs['units'] = 'normalized dB within [-128..127]' + ds.attrs['dim[0]'] = 'baselines' + ds.attrs['dim[1]'] = 'timestamps' + ds.attrs['dim[2]'] = 'central_frequencies & subbands' + ds.attrs['dim[3]'] = 'polarizations' + ds.attrs['dim[4]'] = 'real part of normalized within [-128..127] 10log10(visibilities)' + ds.attrs['dim[5]'] = 'imag part of normalized within [-128..127] 10log10(visibilities)' + + ds = sap_group.create_dataset('flagging', data=flagging, + compression="lzf" if do_compress else None) + ds.attrs['units'] = 'bool (true=flagged)' + ds.attrs['dim[0]'] = 'baselines' + ds.attrs['dim[1]'] = 'timestamps' + ds.attrs['dim[2]'] = 'central_frequencies & subbands' + ds.attrs['dim[3]'] = 'polarizations' + ds.attrs['dim[4]'] = 'flagging values' + + if parset is not None: + fill_info_folder_from_parset(path) + + try: + # try to import the lofar.common.util.humanreadablesize here and not at the top of the file + # to make this hdf5_io module as loosly coupled to other lofar code as possible + from lofar.common.util import humanreadablesize + logger.info('finished writing %s hypercube to file: %s', humanreadablesize(os.path.getsize(path)), path) + except ImportError: + logger.info('finished writing hypercube to file: %s', path) + + +def read_sap_numbers(path): + """ + read the sap numbers (keys) from the hypercube data from the hdf5 hypercube file given by path. + :param str path: path to the hdf5 file you want to read + :return list: list of sap numbers + """ + logger.info('reading sap numbers from from file: %s', path) + + with SharedH5File(path, "r") as file: + version_str = file['version'][0] + + if version_str not in ['1.2', '1.3', '1.4']: + raise ValueError('Cannot read version %s' % (version_str,)) + + return sorted([int(sap_nr) for sap_nr in file['measurement/saps'].keys()]) + +def read_version(h5_path): + with SharedH5File(h5_path, "r") as file: + return file['version'][0] + +def read_hypercube(path, visibilities_in_dB=True, python_datetimes=False, read_visibilities=True, read_flagging=True, saps_to_read=None): + """ + read the hypercube data from the hdf5 hypercube file given by path. + + :param str path: path to the hdf5 file you want to read + :param bool visibilities_in_dB: return the in dB scale, or linear scale. + :param bool python_datetimes: return the timestamps as python datetime's when True (otherwise modified_julian_date/double) + :param bool read_visibilities: do/don't read visibilities (can save read-time and memory usage) + :param bool read_flagging: do/don't read flagging (can save read-time and memory usage) + :param list saps_to_read: only read these given SAPs (can save read-time and memory usage) + :return dict: same dict structure as in write_hypercube, parameter saps. + seealso:: write_hypercube + """ + logger.info('reading hypercube from file: %s', path) + + if read_version(path) == '1.2': + convert_12_to_13(path) + + if read_version(path) == '1.3': + convert_13_to_14(path) + + # reopen file read-only for safety reasons. + with SharedH5File(path, "r") as file: + if file['version'][0] != '1.4': + raise ValueError('Cannot read version %s' % (file['version'][0],)) + + result = {} + if 'measurement/parset' in file: + parset = read_hypercube_parset(path) + if parset: + result['parset'] = parset + + if 'measurement/sas_id' in file: + result['sas_id'] = file['measurement/sas_id'][0] + + if 'measurement/wsrta_id' in file: + result['wsrta_id'] = file['measurement/wsrta_id'][0] + + result['saps'] = {} + + for sap_nr, sap_dict in file['measurement/saps'].items(): + sap_nr = int(sap_nr) + if saps_to_read and sap_nr not in saps_to_read: + continue + + sap_result = {} + result['saps'][sap_nr] = sap_result + + polarizations = list(sap_dict['polarizations']) + sap_result['polarizations'] = polarizations + + baselines = sap_dict['baselines'][:] + baselines = [(bl[0], bl[1]) for bl in baselines] + sap_result['baselines'] = baselines + + timestamps = sap_dict['timestamps'][:] + if python_datetimes: + try: + # try to import lofar.common.datetimeutils here and not at the top of the file + # to make this hdf5_io module as loosly coupled to other lofar code as possible + from lofar.common.datetimeutils import from_modified_julian_date_in_seconds + timestamps = [from_modified_julian_date_in_seconds(t) for t in timestamps] + except ImportError as e: + logger.warning("Could not convert timestamps from modified julian date to python datetimes.") + + sap_result['timestamps'] = timestamps + + central_frequencies = sap_dict['central_frequencies'][:] + sap_result['central_frequencies'] = central_frequencies + + subbands = sap_dict['subbands'][:] + sap_result['subbands'] = subbands + + sap_result['antenna_locations'] = {} + if 'antenna_locations' in sap_dict: + location_group = sap_dict['antenna_locations'] + for ref_frame, location_sub_group in location_group.items(): + sap_result['antenna_locations'][ref_frame] = {} + for antenna, location in location_sub_group.items(): + sap_result['antenna_locations'][ref_frame][antenna] = tuple(location) + + if read_flagging: + flagging = sap_dict['flagging'][:] + sap_result['flagging'] = flagging + + if read_visibilities: + # read the visibility_scale_factors and (scaled_)visibilities + # denormalize them and convert back to complex + scale_factors = sap_dict['visibility_scale_factors'][:] + normalized_visibilities = sap_dict['visibilities'][:] + + logger.debug('denormalizing and converting real/imag to complex visibilities for file sap %s in %s', sap_nr, path) + reduced_shape = normalized_visibilities.shape[:-1] + visibilities = np.empty(reduced_shape, dtype=np.complex64) + + for bl_idx in range(len(baselines)): + for sb_idx in range(len(subbands)): + for pol_idx in range(len(polarizations)): + scale_factor = scale_factors[bl_idx, sb_idx, pol_idx] + visibilities[bl_idx,:,sb_idx,pol_idx].real = scale_factor*normalized_visibilities[bl_idx,:,sb_idx,pol_idx,0] + visibilities[bl_idx,:,sb_idx,pol_idx].imag = scale_factor*normalized_visibilities[bl_idx,:,sb_idx,pol_idx,1] + + if not visibilities_in_dB: + logger.debug('converting visibilities from dB to raw linear for file sap %s in %s', sap_nr, path) + visibilities = np.power(10, 0.1*np.abs(visibilities)) * np.exp(1j * np.angle(visibilities)) + + #HACK: explicitely set non-XX-polarizations to 0 for apertif + if 'measurement/wsrta_id' in file: + visibilities[:,:,:,1:] = 0 + + if 'flagging' in sap_result: + #explicitely set flagged visibilities to 0 + visibilities[sap_result['flagging']] = 0.0 + + sap_result['visibilities'] = visibilities + sap_result['visibilities_in_dB'] = visibilities_in_dB + + antennae = set([bl[0] for bl in sap_result['baselines']] + [bl[1] for bl in sap_result['baselines']]) + + logger.info('sap: %s, #subbands: %s, #timestamps: %s, #baselines: %s, #antennae: %s, #polarizations: %s', + sap_nr, + len(sap_result['subbands']), + len(sap_result['timestamps']), + len(sap_result['baselines']), + len(antennae), + len(sap_result['polarizations'])) + + logger.info('finished reading hypercube from file: %s', path) + + return result + + +def convert_12_to_13(h5_path): + with SharedH5File(h5_path, "r+") as file: + version_str = file['version'][0] + + if version_str != '1.2': + raise ValueError('Cannot convert version %s to 1.3' % (version_str,)) + + logger.info("converting %s from version %s to 1.3", h5_path, version_str) + + for sap_nr, sap_group in file['measurement/saps'].items(): + # read the scale_factors and visibilities in a v1.2 way, + # including incorrect reverse log10 to undo the incorrect storage of phases + scale_factors = sap_group['visibility_scale_factors'][:] + normalized_visibilities = sap_group['visibilities'][:] + subbands = sap_group['subbands'] + polarizations = sap_group['polarizations'] + + # apply v1.2 reconstruction of visibilities + visibilities = np.empty(normalized_visibilities.shape[:-1], dtype=np.complex64) + for sb_nr, scale_factor in enumerate(scale_factors): + visibilities[:, :, sb_nr, :].real = scale_factor * normalized_visibilities[:, :, sb_nr, :, 0] + visibilities[:, :, sb_nr, :].imag = scale_factor * normalized_visibilities[:, :, sb_nr, :, 1] + visibilities = np.power(10, 0.1 * visibilities) + + # now we have the original raw visibilities again (including some minor errors in amplitude and phase due to rounding/truncation. + # let's store them in the correct v1.3 way. + + # reduce dynamic range (so we fit more data in the available bits) + visibility_amplitudes = np.abs(visibilities) + visibility_amplitudes_dB = 10.0*np.log10(visibility_amplitudes) + visibility_phases = np.exp(1j*np.angle(visibilities)) + visibilities_dB = visibility_amplitudes_dB * visibility_phases + + #compute scale factors per subband, per polarization + scale_factors = np.empty(shape=(len(subbands),len(polarizations)), dtype=np.float32) + + for pol_idx, polarization in enumerate(polarizations): + #compute scale factor per subband to map the visibilities_dB per subband from complex64 to 2xint8 + for sb_nr in range(len(subbands)): + #use 99.9 percentile instead if max to get rid of spikes + max_abs_vis_sb = max(1.0, np.percentile(visibility_amplitudes_dB[:,:,sb_nr,pol_idx], 99.9)) + scale_factor = 127.0 / max_abs_vis_sb + scale_factors[sb_nr, pol_idx] = 1.0/scale_factor + + # overwrite the scale_factors in the file + del sap_group['visibility_scale_factors'] + scale_factor_ds = sap_group.create_dataset('visibility_scale_factors', data=scale_factors) + scale_factor_ds.attrs['description'] = 'scale factors per subband per polatization to un-normalize the stored visibilities' + scale_factor_ds.attrs['description'] = 'multiply real and imag parts of the visibilities with this factor per subband per polatization to un-normalize them and get the 10log10 values of the real and imag parts of the visibilities' + scale_factor_ds.attrs['units'] = '-' + + # scale the visibilities in the v1.3 way + extended_shape = visibilities_dB.shape[:] + (2,) + scaled_visibilities = np.empty(extended_shape, dtype=np.int8) + for sb_nr in range(len(subbands)): + scale_factor = 1.0 / scale_factors[sb_nr] + scaled_visibilities[:,:,sb_nr,:,0] = scale_factor*visibilities_dB[:,:,sb_nr,:].real + scaled_visibilities[:,:,sb_nr,:,1] = scale_factor*visibilities_dB[:,:,sb_nr,:].imag + + # overwrite the scale_factors in the file + sap_group['visibilities'][:] = scaled_visibilities + + # and finally update the version number + file['version'][0] = '1.3' + + logger.info("converted %s from version %s to 1.3", h5_path, version_str) + +def convert_13_to_14(h5_path): + with SharedH5File(h5_path, "r+") as file: + version_str = file['version'][0] + + if version_str != '1.3': + raise ValueError('Cannot convert version %s to 1.4' % (version_str,)) + + logger.info("converting %s from version %s to 1.4", h5_path, version_str) + + for sap_nr, sap_group in file['measurement/saps'].items(): + # read the scale_factors and visibilities in a v1.2 way, + # including incorrect reverse log10 to undo the incorrect storage of phases + scale_factors = sap_group['visibility_scale_factors'][:] + baselines = sap_group['baselines'] + subbands = sap_group['subbands'] + polarizations = sap_group['polarizations'] + + # apply v1.3 scale factors to new v1.4 + # in v1.3 scale factors were stored per subband per pol + # in v1.4 scale factors are stored per baseline per subband per pol + scale_factors_new = np.empty(shape=(len(baselines),len(subbands),len(polarizations)), dtype=np.float32) + + for pol_idx in range(len(polarizations)): + for sb_idx in range(len(subbands)): + scale_factors_new[:,sb_idx,pol_idx] = scale_factors[sb_idx,pol_idx] + + # overwrite the scale_factors in the file + del sap_group['visibility_scale_factors'] + scale_factor_ds = sap_group.create_dataset('visibility_scale_factors', data=scale_factors_new) + scale_factor_ds.attrs['description'] = 'scale factors per baseline per subband per polatization to un-normalize the stored visibilities' + scale_factor_ds.attrs['description'] = 'multiply real and imag parts of the visibilities with this factor per baseline per subband per polatization to un-normalize them and get the 10log10 values of the real and imag parts of the visibilities' + scale_factor_ds.attrs['units'] = '-' + + # and finally update the version number + file['version'][0] = '1.4' + + logger.info("converted %s from version %s to 1.4", h5_path, version_str) + +def add_parset_to_hypercube(h5_path, otdbrpc): + """ + helper method which tries to get the parset for the sas_id in the h5 file from otdb via the otdbrpc, and add it to the h5 file. + + :param str h5_path: path to the hdf5 file + :param lofar.sas.otdb.otdbrpc.OTDBRPC otdbrpc: an instance of a OTDBPC client + """ + try: + with SharedH5File(h5_path, "r+") as file: + if 'measurement/sas_id' in file: + sas_id = file['measurement/sas_id'][0] + + logger.info('trying to get the parset for sas_id %s', sas_id) + parset = otdbrpc.taskGetSpecification(otdb_id=sas_id)["specification"] + + if parset: + if 'measurement/parset' in file: + logger.info('removing previous parset from file') + del file['measurement/parset'] + + logger.info('adding parset for sas_id %s to %s hdf5 file', sas_id, os.path.basename(h5_path)) + parset_str = '\n'.join(['%s=%s'%(k,parset[k]) for k in sorted(parset.keys())]) + ds = file.create_dataset('measurement/parset', (1,), h5py.special_dtype(vlen=str), parset_str, + compression="lzf") + ds.attrs['description'] = 'the parset of this observation/pipeline with all settings how this data was created' + logger.info('added parset for sas_id %s to %s hdf5 file', sas_id, os.path.basename(h5_path)) + + fill_info_folder_from_parset(h5_path) + except Exception as e: + logger.error(e) + + +def read_hypercube_parset(h5_path, as_string=False): + """ + read the measurement parset from the given hdf5 hypercube file + + :param str h5_path: path to the hdf5 file + :param bool as_string: return the parset as string instead of as parameterset object if true + :return parameterset/string: the parset (as string or as parameterset) if any, else None + """ + logger.info('reading parset from %s hdf5 file', os.path.basename(h5_path)) + with SharedH5File(h5_path, "r") as file: + if 'measurement/parset' in file: + parset_str = file['measurement/parset'][0] + if as_string: + return '\n'.join(sorted(line.strip() for line in parset_str.split('\n'))) + + # try to import the lofar.parameterset here and not at the top of the file + # to make this hdf5_io module as loosly coupled to other lofar code as possible + try: + from lofar.parameterset import parameterset + parset = parameterset.fromString(parset_str) + return parset + except ImportError as e: + logger.warning("could not import parset: %s", e) + +def get_observation_id_str(data): + if 'sas_id' in data: + return 'L%d' % data['sas_id'] + if 'wsrta_id' in data: + return 'WSRTA%d' % data['wsrta_id'] + return 'unknown_id' + +def get_default_h5_filename(data, timestamped_if_unknown=True): + obs_id = get_observation_id_str(data) + if 'unknown' in obs_id and timestamped_if_unknown: + return datetime.utcnow().strftime('%Y%m%d%H%M%s') + '.MS_extract.h5' + return obs_id + '.MS_extract.h5' + +def combine_hypercubes(input_paths, output_dir, output_filename=None, do_compress=True): + """ + combine list of hypercubes into one file, for example when you created many h5 file in parallel with one subband per file. + :param [str] input_paths: paths of the hdf5 files you want to read and combine + :param str output_dir: directory where to save the resulting combined h5 file + :param str output_filename: optional output filename. if None, then <get_observation_id_str>.MS_extract.h5 is used + :param bool do_compress: compress the visibilities and flagging data (with lzf compression, slower but smaller output size) + """ + input_files = [] + output_path = None + try: + input_paths = sorted(input_paths) + existing_paths = [p for p in input_paths if os.path.exists(p)] + if not existing_paths: + raise ValueError('No input h5 files with valid paths given: %s' % (', '.join(input_paths),)) + + # convert any 1.2 to 1.3 file if needed + for path in existing_paths: + with SharedH5File(path, "r") as file: + if file['version'][0] == 1.2: + convert_12_to_13(path) + + # convert any 1.3 to 1.4 file if needed + for path in existing_paths: + with SharedH5File(path, "r") as file: + if file['version'][0] == 1.3: + convert_13_to_14(path) + + input_files = [SharedH5File(p, "r").open() for p in existing_paths] + + versions = set([file['version'][0] for file in input_files]) + + if len(versions) != 1: + raise ValueError('Cannot combine h5 files of multiple versions: %s' % (', '.join(versions),)) + + version_str = list(versions)[0] + + if version_str != '1.4': + raise ValueError('Cannot read version %s' % (version_str,)) + + sas_ids = set([file['measurement/sas_id'][0] for file in input_files if 'measurement/sas_id' in file]) + if len(sas_ids) > 1: + raise ValueError('Cannot combine h5 files of multiple observations with multiple sas_ids: %s' % (', '.join(sas_ids),)) + sas_id = list(sas_ids)[0] if sas_ids else None + + wsrta_ids = set([file['measurement/wsrta_id'][0] for file in input_files if 'measurement/wsrta_id' in file]) + if len(wsrta_ids) > 1: + raise ValueError('Cannot combine h5 files of multiple observations with multiple wsrta_ids: %s' % (', '.join(wsrta_ids),)) + wsrta_id = list(wsrta_ids)[0] if wsrta_ids else None + + if output_filename is None: + output_filename = get_default_h5_filename({'sas_id':sas_id} if sas_id else + {'wsrta_id': wsrta_id} if wsrta_id else None) + + output_path = os.path.join(output_dir, output_filename) + logger.info('combine_hypercubes: combining %s h5 files into %s', len(input_paths), output_path) + + with SharedH5File(output_path, "w") as output_file: + version = '1.4' + ds = output_file.create_dataset('version', (1,), h5py.special_dtype(vlen=str), version) + ds.attrs['description'] = 'version of this hdf5 MS extract file' + + measurement_group = output_file.create_group('measurement') + measurement_group.attrs['description'] = 'all data (visibilities, flagging, parset, ...) for this measurement (observation/pipeline)' + + if sas_id is not None: + ds = output_file.create_dataset('measurement/sas_id', data=[sas_id]) + ds.attrs['description'] = 'observation/pipeline sas id' + + #copy parset from the first input file containing one. assume parset is equal in all input files. + try: + input_file = next(f for f in input_files if 'measurement/parset' in f) + h5py.h5o.copy(input_file.id, 'measurement/parset', output_file.id, 'measurement/parset') + except StopIteration: + pass #no input file with parset, so nothing to copy. + + #make saps group and description + saps_group = output_file.create_group('measurement/saps') + saps_group.attrs['description'] = 'the data (visibilities, flagging, ...) is stored per sub-array-pointing (sap)' + + #rest of the items are multi dimensional, and may have different dimensions across the input files (only along the subband axis) + #gather item values of all files, per sap, then combine, then write in output_file + value_dicts_per_sap = {} + for input_file in input_files: + logger.info('combine_hypercubes: parsing file %s', input_file.filename) + + for sap_nr, sap_dict in input_file['measurement/saps'].items(): + sap_nr = int(sap_nr) + logger.info('combine_hypercubes: parsing sap %d in file %s', sap_nr, input_file.filename) + + #gather all items of one sap of one file in one dict + file_sap_value_dict = {} + + for item in sap_dict.keys(): + key = 'measurement/saps/%s/%s' % (sap_nr, item) + if item == 'antenna_locations': + file_sap_value_dict[key] = {} + location_group = sap_dict['antenna_locations'] + for ref_frame, location_sub_group in location_group.items(): + file_sap_value_dict[key][ref_frame] = {} + for antenna, location in location_sub_group.items(): + file_sap_value_dict[key][ref_frame][antenna] = location + else: + file_sap_value_dict[key] = input_file[key][:] + + #now, all items of this sap in input_file have been gathered into file_sap_value_dict + #this sap of this input file may contain mutiple subbands + #split out file_value_dict per subband + if sap_nr not in value_dicts_per_sap: + #per sap we make lists of value_dicts (one value_dict per file) + #we'll sort and combine them later + value_dicts_per_sap[sap_nr] = [] + + num_subbands_in_sap_in_input_file = len(file_sap_value_dict['measurement/saps/%s/subbands' % (sap_nr,)]) + logger.info('combine_hypercubes: num_subbands=%d in sap %d in file %s', num_subbands_in_sap_in_input_file, sap_nr, input_file.filename) + + for sb_cntr in range(num_subbands_in_sap_in_input_file): + value_dict = {} + for key,data in file_sap_value_dict.items(): + if 'visibilities' in key: + value_dict[key] = data[:,:,sb_cntr,:,:] + elif 'flagging' in key: + value_dict[key] = data[:,:,sb_cntr,:] + elif any(item in key for item in ['baselines', 'polarizations', 'timestamps', 'antenna_locations']): + value_dict[key] = data + elif 'visibility_scale_factors' in key: + value_dict[key] = data[:, sb_cntr,:] + else: + value_dict[key] = data[sb_cntr] + + #append the value_dict holding the items of a single subband to the subband list of this sap + value_dicts_per_sap[sap_nr].append(value_dict) + + logger.info('combine_hypercubes: sorting and combining all subbands and saps into one output file: %s', output_path) + + #all saps and all subbands have been parsed and put into value_dicts_per_sap + #sort and combine them + for sap_nr,sap_value_dicts in value_dicts_per_sap.items(): + num_subbands = len(sap_value_dicts) + logger.info('combine_hypercubes: sorting and combining %d subbands for sap %d', num_subbands, sap_nr) + #sort the sap_value_dicts by subband + sap_value_dicts = sorted(sap_value_dicts, key=lambda x: x['measurement/saps/%s/subbands' % (sap_nr,)]) + + #combine all seperate subbands + if sap_value_dicts: + combined_value_dict = {} + #setup numpy arrays based on shape and type of first value_dict, extend sb dimension to num_subbands + for key,data in sap_value_dicts[0].items(): + if 'visibilities' in key or 'flagging' in key: + shape = list(data.shape) + shape.insert(2, num_subbands) + shape = tuple(shape) + elif 'visibility_scale_factors' in key: + # from (#bl,#pol) to (#bl,#sb,#pol) + shape = (data.shape[0], num_subbands, data.shape[1]) + else: + shape = (num_subbands,) + + if 'antenna_locations' not in key: + combined_value_dict[key] = np.empty(shape=shape, dtype=data.dtype) + + #now loop over all value_dicts and copy data to it's subband slice in the just created empty numpy arrays + for sb_cntr, value_dict in enumerate(sap_value_dicts): + for key,data in value_dict.items(): + if 'visibilities' in key: + combined_value_dict[key][:,:,sb_cntr,:,:] = data + elif 'flagging' in key: + combined_value_dict[key][:,:,sb_cntr,:] = data + elif any(item in key for item in ['baselines', 'polarizations', 'timestamps', 'antenna_locations']): + combined_value_dict[key] = data + elif 'visibility_scale_factors' in key: + combined_value_dict[key][:,sb_cntr,:] = data + else: + combined_value_dict[key][sb_cntr] = data + + for key,data in combined_value_dict.items(): + logger.info('combine_hypercubes: storing %s in %s', key, output_filename) + ds_out = None + if 'visibilities' in key or 'flagging' in key: + ds_out = output_file.create_dataset(key, data=data, + compression="lzf" if do_compress else None) + elif 'antenna_locations' in key: + location_group = output_file.create_group(key) + location_group.attrs['description'] = 'the antenna locations in XYZ, PQR, WGS84 coordinates (units: meters and/or radians)' + for ref_frame, antenna_locations in data.items(): + location_sub_group = location_group.create_group(ref_frame) + location_sub_group.attrs['description'] = 'the antenna locations in %s coordinates (units: meters and/or radians)' % (ref_frame,) + + for antenna, location in antenna_locations.items(): + location_sub_group.create_dataset(antenna, data=location) + else: + ds_out = output_file.create_dataset(key, data=data) + + #search first input_file containing this keys + #and copy all dataset attributes from the input_file to the output_file + try: + if ds_out: + input_file = next(f for f in input_files if key in f) + ds_in = input_file[key] + + for attr_key, attr_value in ds_in.attrs.items(): + ds_out.attrs[attr_key] = attr_value + except StopIteration: + pass #no input file with key, so nothing to copy. + + fill_info_folder_from_parset(output_path) + except Exception as e: + logger.exception('combine_hypercubes: %s', e) + finally: + for h5file in input_files: + h5file.close() + + logger.info('combine_hypercubes: finished combining %s h5 files into %s', len(input_paths), output_path) + return output_path + +DEFAULT_ALGO_NAME='scipy.cluster.hierarchical.single on visibility distance v1' + +def _write_common_clustering_groups(h5_path, saps_dict, label=DEFAULT_ALGO_NAME): + """ + helper method to write some common groups when writing clustering results into the h5_path + + :param str h5_path: path to the hdf5 file + :param dict saps_dict: clustering results dict, see clusters parameter in write_clusters. + :param str label: A name/label for this clustering result, for example 'my_clusterer_run_3'. + Multiple clustering results can be stored in the same h5 file, as long as the label is unique. + If the label was already present in the file, then it is overwritten. + The always present symlink 'latest' is updated to this clustering result. + :return str: the name of the saps_group into which the non-common results can be written. + """ + with SharedH5File(h5_path, "r+") as file: + if 'clustering' in file: + clustering_group = file['clustering'] + else: + clustering_group = file.create_group('clustering') + clustering_group.attrs['description'] = 'clustering results' + + if label == 'latest': + raise ValueError('\'latest\' is a reserved label for a symlink to the actual latest clustering result.') + + if label in clustering_group: + algo_group = clustering_group[label] + else: + algo_group = clustering_group.create_group(label) + algo_group.attrs['description'] = 'clustering results for cluster method: %s' % label + + # always set/update the timestamp of this result + algo_group.attrs['timestamp'] = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S') + + # update the 'latest' symlink to this label + try: + symlink = h5py.SoftLink('/clustering/' + label) + if 'latest' in clustering_group: + del clustering_group['latest'] + clustering_group['latest'] = symlink + except RuntimeError: + #softlink was already present, just continue. + pass + + if 'saps' in algo_group: + saps_group = algo_group['saps'] + else: + saps_group = algo_group.create_group('saps') + saps_group.attrs['description'] = 'clustering results are stored per sub array pointing' + + for sap_nr, sap_item in saps_dict.items(): + if str(sap_nr) not in saps_group: + sap_group = saps_group.create_group(str(sap_nr)) + sap_group.attrs['description'] = 'clustering results for sub array pointing %d' % sap_nr + + return saps_group.name + + +def _delete_clustering_group_if_empty(h5_path, label): + """ + helper method to delete an empty clustering group + + :param str h5_path: path to the hdf5 file + :param str label: The name/label of the clustering group, for example 'my_clusterer_run_3'. + The always present symlink 'latest' is updated to the next latest clustering group result. + """ + with SharedH5File(h5_path, "r+") as file: + if 'clustering' in file: + clustering_group = file['clustering'] + + if label in clustering_group: + algo_group = clustering_group[label] + + if not algo_group.keys(): #the algo groups is empty..., so delete it + del clustering_group[label] + + timestamped_algo_groups = [algo_group for algo_group in clustering_group.values() if 'timestamp' in algo_group.attrs] + + # update the 'latest' symlink to the latest result + latest = datetime(0, 0, 0) + for algo_group in timestamped_algo_groups: + if algo_group.attrs['timestamp'] >= latest: + clustering_group["latest"] = h5py.SoftLink('/clustering/' + algo_group.name) + +def write_clusters(h5_path, clusters, label=DEFAULT_ALGO_NAME): + """ + write the clusters into an h5 file. + :param str h5_path: path to the h5 file + :param dict clusters: the clusters results dict. + { <sapnr>: { 'clusters': { <nr>: <list_of_baselines>, # for example: [('CS001', 'CS002), ('CS001', 'CS003')] + ... }, + ... }, + ... } + :param str label: A name/label for this clustering result, for example 'my_clusterer_run_3'. + Multiple clustering results can be stored in the same h5 file, as long as the label is unique. + If the label was already present in the file, then it is overwritten. + The always present symlink 'latest' is updated to this clustering result. + """ + logger.info('writing clusters to %s under label \'%s\'', h5_path, label) + saps_group_name = _write_common_clustering_groups(h5_path, clusters, label=label) + + #add indirection level: cluster method (including run-timestamp) + #include parameters and description + with SharedH5File(h5_path, "r+") as file: + saps_group = file[saps_group_name] + for sap_nr, sap_clusters_dict in clusters.items(): + sap_group = saps_group[str(sap_nr)] + + clusters_group = sap_group.create_group('clusters') + clusters_group.attrs['description'] = 'the clusters' + + sap_clusters = sap_clusters_dict['clusters'] + for cluster_nr in sorted(sap_clusters.keys()): + cluster_baselines = sorted(sap_clusters[cluster_nr]) + logger.debug('writing %d baselines in cluster %s for sap %d to %s', len(cluster_baselines), cluster_nr, sap_nr, h5_path) + + ds = clusters_group.create_dataset(str(cluster_nr), data=cluster_baselines) + ds.attrs['description'] = '%d baselines in cluster %d in sap %d' % (len(cluster_baselines), cluster_nr, sap_nr) + logger.info('finished writing clusters to %s', h5_path) + + +def read_clusters(h5_path, label='latest'): + """ + read the clusters from an h5 file. + :param str h5_path: path to the h5 file + :param str label: A name/label for this clustering result, for example 'my_clusterer_run_3', or the always present 'latest'. + :return (dict, list): the clustering_results dict, and the clustering_results annotations list. + + clustering_results = { <sapnr>: { 'clusters': { <nr>: <list_of_baselines>, # for example: [('CS001', 'CS002), ('CS001', 'CS003')] + ... }, + 'annotations': { <cluster_nr> : { 'annotation': <text>, + 'user': <user>, + 'timestamp: <datetime> }, + ... } + ... }, + ... } + + annotations list = [ { 'annotation': <text>, 'user': <user>, 'timestamp: <datetime> }, + { 'annotation': <text>, 'user': <user>, 'timestamp: <datetime> }, + .... ] + + + """ + result_clusters = {} + result_annotations = [] + + with SharedH5File(h5_path, "r") as file: + if 'clustering' not in file: + logger.debug('could not find any clustering results in %s', h5_path) + return result_clusters, result_annotations + + clustering_group = file['clustering'] + + if label not in clustering_group: + logger.debug('could not find clusters for algorithm \'%s\' for in %s', label, h5_path) + return result_clusters, result_annotations + + algo_group = clustering_group[label] + + logger.info('reading annotations for algorithm \'%s\', timestamp=\'%s\' from %s', label, algo_group.attrs.get('timestamp', '<unknown>'), h5_path) + + if 'annotations' in algo_group: + for anno_nr, anno_ds in algo_group['annotations'].items(): + annotation = anno_ds[0] + cluster_nr = anno_ds.attrs.get('cluster_nr') + user = anno_ds.attrs.get('user') + timestamp = anno_ds.attrs.get('timestamp') + + result_annotations.append({'annotation': annotation, + 'index': anno_nr, + 'user': user, + 'timestamp': datetime.strptime(timestamp, '%Y-%m-%d %H:%M:%S')}) + + saps_group = algo_group['saps'] + + logger.info('reading clusters for algorithm \'%s\', timestamp=\'%s\' from %s', label, algo_group.attrs.get('timestamp', '<unknown>'), h5_path) + + for sap_nr, sap_dict in saps_group.items(): + sap_nr = int(sap_nr) + sap_clusters_result = {} + sap_clusters_annotations = {} + sap_result = {'clusters': sap_clusters_result, + 'annotations': sap_clusters_annotations } + + if 'clusters' in sap_dict: + logger.debug('reading clusters for sap %d in %s', sap_nr, h5_path) + + result_clusters[sap_nr] = sap_result + + for cluster_nr in sorted(sap_dict['clusters'].keys()): + baselines = sap_dict['clusters'][cluster_nr][:] + cluster_nr = int(cluster_nr) + baselines = [(bl[0], bl[1]) for bl in baselines] + sap_clusters_result[cluster_nr] = baselines + logger.debug('read %d baselines in cluster %d in sap %d', len(baselines), cluster_nr, sap_nr) + else: + logger.debug('could not find clusters for sap %d in %s', sap_nr, h5_path) + + if 'annotations' in sap_dict: + logger.debug('reading cluster annotations for sap %d in %s', sap_nr, h5_path) + + for anno_nr, anno_ds in sap_dict['annotations'].items(): + try: + annotation = anno_ds[0] + cluster_nr = int(anno_ds.attrs.get('cluster_nr')) + logger.debug("%s %s", cluster_nr, type(cluster_nr)) + user = anno_ds.attrs.get('user') + timestamp = anno_ds.attrs.get('timestamp') + + if cluster_nr not in sap_clusters_annotations: + sap_clusters_annotations[cluster_nr] = [] + + sap_clusters_annotations[cluster_nr].append({'annotation': annotation, + 'index': anno_nr, + 'user': user, + 'timestamp': datetime.strptime(timestamp, '%Y-%m-%d %H:%M:%S')}) + except: + pass + + for cluster_nr, sap_clusters_annotation_list in sap_clusters_annotations.items(): + logger.debug('read %d cluster annotations for cluster %d in sap %d', len(sap_clusters_annotation_list), cluster_nr, sap_nr) + else: + logger.debug('could not find cluster annotations for sap %d in %s', sap_nr, h5_path) + + logger.info('read %d clusters for sap %d from %s', len(sap_result), sap_nr, h5_path) + logger.info('finised reading clusters from %s', h5_path) + + return result_clusters, result_annotations + + +def delete_clusters(h5_path, label=DEFAULT_ALGO_NAME): + """ + delete the clustering results with the given label from the h5 file. + :param str h5_path: h5_path to the h5 file + :param str label: the name/label for of the clustering result, for example 'my_clusterer_run_3'. + The always present symlink 'latest' is updated to the next latest clustering result. + """ + with SharedH5File(h5_path, "r+") as file: + if 'clustering' in file: + for name, group in file['clustering'].items(): + if label is None or name==label: + for sap_nr, sap_dict in group['saps'].items(): + if 'clusters' in sap_dict: + logger.info('deleting clusters for sap %s in %s', sap_nr, h5_path) + del sap_dict['clusters'] + + _delete_clustering_group_if_empty(h5_path, label) + + +def _add_annotation_to_group(annotations__parent_group, annotation, user=None, **kwargs): + """ + add an annotation to the cluster in the file at h5_path, given by the clustering label, sap_nr, cluster_nr. + :param str h5_path: h5_path to the h5 file + :param str label: the label of the clustering results group + :param int sap_nr: the sap number withing the clustering results group + :param int cluster_nr: the cluster number withing the sap within the clustering results group + :param str annotation: the annotation for this cluster (can be any free form text) + :param str user: an optional user name + """ + if 'annotations' in annotations__parent_group: + annotations_group = annotations__parent_group['annotations'] + else: + annotations_group = annotations__parent_group.create_group('annotations') + annotations_group.attrs['description'] = 'annotations on this cluster' + + for seq_nr, ds in annotations_group.items(): + if ds[0] == annotation: + if not 'cluster_nr' in kwargs or ('cluster_nr' in kwargs and ds.attrs['cluster_nr'] == kwargs['cluster_nr']): + raise ValueError('annotation "%s" already exists' % (annotation,)) + + seq_nr = max([int(x) for x in annotations_group.keys()])+1 if annotations_group.keys() else 0 + ds = annotations_group.create_dataset(str(seq_nr), (1,), h5py.special_dtype(vlen=str), annotation) + ds.attrs['user'] = user if user else 'anonymous' + ds.attrs['timestamp'] = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S') + + for key, value in kwargs.items(): + ds.attrs[key] = value + + +def annotate_cluster(h5_path, label, sap_nr, cluster_nr, annotation, user=None): + """ + add an annotation to the cluster in the file at h5_path, given by the clustering label, sap_nr, cluster_nr. + :param str h5_path: h5_path to the h5 file + :param str label: the label of the clustering results group + :param int sap_nr: the sap number withing the clustering results group + :param int cluster_nr: the cluster number withing the sap within the clustering results group + :param str annotation: the annotation for this cluster (can be any free form text) + :param str user: an optional user name + """ + with SharedH5File(h5_path, "r+") as file: + if 'clustering' in file: + clustering_group = file['clustering'] + + if label in clustering_group: + algo_group = clustering_group[label] + saps_group = algo_group['saps'] + + if str(sap_nr) in saps_group: + sap_group = saps_group[str(sap_nr)] + _add_annotation_to_group(sap_group, annotation, user, cluster_nr=cluster_nr) + + +def delete_cluster_annotation(h5_path, sap_nr, cluster_nr, annotation_nr, label='latest'): + """ + remove the annotation_nr'th annotation for the cluster in the file at h5_path, given by the clustering label, sap_nr, cluster_nr. + :param str h5_path: h5_path to the h5 file + :param str label: the label of the clustering results group + :param int sap_nr: the sap number withing the clustering results group + :param int cluster_nr: the cluster number withing the sap within the clustering results group + :param str annotation_nr: the annotation number (index) to delete + :param str label: the label of the clustering results group + """ + with SharedH5File(h5_path, "r+") as file: + if 'clustering' in file: + clustering_group = file['clustering'] + + if label in clustering_group: + algo_group = clustering_group[label] + saps_group = algo_group['saps'] + + if str(sap_nr) in saps_group: + sap_group = saps_group[str(sap_nr)] + if 'annotations' in sap_group: + annotations_group = sap_group['annotations'] + if 'annotations' in sap_group: + annotations_group = sap_group['annotations'] + if str(annotation_nr) in annotations_group: + del annotations_group[str(annotation_nr)] + +def annotate_clustering_results(h5_path, label, annotation, user=None): + """ + add an annotation at top level for the entire file at h5_path. + :param str h5_path: h5_path to the h5 file + :param str label: the label of the clustering results group + :param str annotation: the annotation for this cluster (can be any free form text) + :param str user: an optional user name + """ + with SharedH5File(h5_path, "r+") as file: + if 'clustering' in file: + clustering_group = file['clustering'] + + if label in clustering_group: + algo_group = clustering_group[label] + _add_annotation_to_group(algo_group, annotation, user) + + +def annotate_file(h5_path, annotation, user=None): + """ + add an annotation at top level for the entire file at h5_path. + :param str h5_path: h5_path to the h5 file + :param str annotation: the annotation for this cluster (can be any free form text) + :param str user: an optional user name + """ + with SharedH5File(h5_path, "r+") as file: + _add_annotation_to_group(file, annotation, user) + + +def read_file_annotations(h5_path): + """ + read the top level annotations on this file as a whole. + :param str h5_path: path to the h5 file + :return list: an annotations list with the top level annotations on this file as a whole. + + annotations list = [ { 'annotation': <text>, 'user': <user>, 'timestamp: <datetime> }, + { 'annotation': <text>, 'user': <user>, 'timestamp: <datetime> }, + .... ] + + + """ + result_annotations = [] + + with SharedH5File(h5_path, "r") as file: + if 'annotations' in file: + for anno_nr, anno_ds in file['annotations'].items(): + annotation = anno_ds[0] + cluster_nr = anno_ds.attrs.get('cluster_nr') + user = anno_ds.attrs.get('user') + timestamp = anno_ds.attrs.get('timestamp') + + result_annotations.append({'annotation': annotation, + 'user': user, + 'timestamp': datetime.strptime(timestamp, '%Y-%m-%d %H:%M:%S')}) + return result_annotations + +def get_stations(h5_path): + with SharedH5File(h5_path, "r+") as file: + stations = set() + for sap_dict in file['measurement/saps'].values(): + baselines = sap_dict['baselines'][:] + for bl in baselines: + stations.add(bl[0]) + return sorted(stations) + +def read_info_from_hdf5(h5_path, read_data_info=True, read_parset_info=True): + """ + Read basic info like Project, start/stoptime, stations, etc from h5 file. + :param str h5_path: h5_path to the h5 file + :param bool read_data_info: do/don't read data info (how many sap's, baselines, timestamps, subbands). + :param bool read_parset_info: do/don't read info from the parset (Project, PI, name, start/stop time, etc). + :return str: A human readable string with the requested info. + """ + result = {} + + with SharedH5File(h5_path, "r") as file: + need_to_fill_info_folder_from_parset = 'measurement/info' not in file + + if need_to_fill_info_folder_from_parset: + # try to convert old style file with parsets only into new files with info. + fill_info_folder_from_parset(h5_path) + + if read_data_info: + result = read_hypercube(h5_path, read_visibilities=False, read_flagging=False) + + if read_parset_info: + parset = read_hypercube_parset(h5_path) + if parset: + result['parset'] = parset + + file_annotations = read_file_annotations(h5_path) + clusters, clustering_algorithm_annotations = read_clusters(h5_path) + + return create_info_string(result, h5_path, file_annotations, clusters, clustering_algorithm_annotations) + + +def create_info_string(data, h5_path=None, file_annotations=None, clusters=None, cluster_annotations=None): + info = '' + + try: + parset = data['parset'] + if h5_path: + info += 'File : ' + os.path.basename(h5_path) + '\n' + try: + with SharedH5File(h5_path, "r") as file: + info += 'File version : ' + file['version'][0] + '\n' + except IOError: + pass + + info += 'Project : ' + parset.getString('ObsSW.Observation.Campaign.name') + '\n' + info += 'Project description : ' + parset.getString('ObsSW.Observation.Campaign.title') + '\n' + info += 'Project PI : ' + parset.getString('ObsSW.Observation.Campaign.PI') + '\n' + info += 'Type : ' + parset.getString('ObsSW.Observation.processSubtype') + '\n' + info += 'SAS id : ' + parset.getString('ObsSW.Observation.otdbID') + '\n' + info += 'name : ' + parset.getString('ObsSW.Observation.Scheduler.taskName') + '\n' + info += 'start time (UTC) : ' + parset.getString('ObsSW.Observation.startTime') + '\n' + info += 'stop time (UTC) : ' + parset.getString('ObsSW.Observation.stopTime') + '\n' + + try: + # try to import lofar.common.datetimeutils here and not at the top of the file + # to make this hdf5_io module as loosly coupled to other lofar code as possible + from lofar.common.datetimeutils import format_timedelta, parseDatetime + info += 'duration : ' + format_timedelta(parseDatetime(parset.getString('ObsSW.Observation.stopTime')) - + parseDatetime(parset.getString('ObsSW.Observation.startTime'))) + '\n' + except ImportError: + pass #just continue + + if 'observation' in parset.getString('ObsSW.Observation.processSubtype','').lower(): + info += '#Stations : ' + str(len(parset.getStringVector('ObsSW.Observation.VirtualInstrument.stationList'))) + '\n' + info += 'Stations : ' + ','.join(sorted(parset.getStringVector('ObsSW.Observation.VirtualInstrument.stationList'))) + '\n' + info += 'antenna array : ' + parset.getString('ObsSW.Observation.antennaArray') + '\n' + except: + #parset info not available + pass + + if file_annotations: + for i, anno in enumerate(file_annotations): + info += 'annotation[%02d] : \'%s\', by \'%s\' at \'%s\'\n' % (i, anno['annotation'], anno['user'], anno['timestamp'].strftime('%Y-%m-%d %H:%M:%S')) + + if 'saps' in data: + for sap_nr, sap_dict in data['saps'].items(): + info += 'data : sap: %s, #baselines: %s, #timestamps: %s, #subbands: %s, #polarizations: %s' % ( + sap_nr, len(sap_dict['baselines']), len(sap_dict['timestamps']), len(sap_dict['subbands']), len(sap_dict['polarizations'])) + '\n' + + if clusters: + for sap_nr in sorted(clusters.keys()): + sap_dict = clusters[sap_nr] + sap_cluster_dict = sap_dict['clusters'] + info += 'clusters : sap: %s, #clusters: %s, cluster sizes: %s' % ( + sap_nr, len(sap_cluster_dict), ', '.join([str(len(sap_cluster_dict[c_nr])) for c_nr in sorted(sap_cluster_dict.keys())])) + '\n' + + sap_cluster_annotation_dict = sap_dict.get('annotations', {}) + for sap_cluster_nr in sorted(sap_cluster_annotation_dict.keys()): + sap_cluster_annotations = sap_cluster_annotation_dict[sap_cluster_nr] + for sap_cluster_annotation in sap_cluster_annotations: + info += 'annotations : sap: %d cluster: %d : %s %s "%s"\n' % (sap_nr, sap_cluster_nr, + sap_cluster_annotation.get('user', '<unknown>'), + sap_cluster_annotation.get('timestamp', '<unknown>'), + sap_cluster_annotation.get('annotation', '<unknown>')) + + return info + + +def fill_info_folder_from_parset(h5_path): + try: + logger.info('fill_info_folder_from_parset for %s', h5_path) + parset = read_hypercube_parset(h5_path) + + if parset is not None: + with SharedH5File(h5_path, "r+") as file: + # remove previous info if present + if 'measurement/info' in file: + del file['measurement/info'] + + info_group = file.create_group('measurement/info') + info_group.attrs['description'] = 'Meta information about the measurement' + + for name, key in [('project', 'Campaign.name'), + ('project_description', 'Campaign.title'), + ('PI', 'Campaign.PI'), + ('type', 'processType'), + ('subtype', 'processSubtype'), + ('SAS_id', 'Campaign.otdbID'), + ('antenna_array', 'antennaArray'), + ('name', 'Scheduler.taskName')]: + ps_key = 'ObsSW.Observation.' + key + ps_value = parset.getString(ps_key, '<unknown>') + info_group.create_dataset(name, (1,), h5py.special_dtype(vlen=str), [ps_value]) + + try: + # try to import lofar.common.datetimeutils here and not at the top of the file + # to make this hdf5_io module as loosly coupled to other lofar code as possible + from lofar.common.datetimeutils import format_timedelta, parseDatetime, totalSeconds + start_time = parset.getString('ObsSW.Observation.startTime') + stop_time = parset.getString('ObsSW.Observation.stopTime') + duration = parseDatetime(stop_time) - parseDatetime(start_time) + info_group.create_dataset('start_time', (1,), h5py.special_dtype(vlen=str), [start_time]) + info_group.create_dataset('stop_time', (1,), h5py.special_dtype(vlen=str), [stop_time]) + ds = info_group.create_dataset('duration', data=[totalSeconds(duration)]) + ds.attrs['description'] = 'duration in seconds' + except (ImportError, RuntimeError, ValueError) as e: + logger.warning('Could not convert start/end time and/or duration in fill_info_folder_from_parset for %s. error: %s', h5_path, e) + except Exception as e: + logger.error('Error while running fill_info_folder_from_parset: %s', e) + +def read_info_dict(h5_path): + ''' read the info about the observation/pipeline from the h5 file given by h5_path. + :param str h5_path: h5_path to the h5 file + :return: a dict with the info about the observation/pipeline in native python types, like: + {'PI': 'my_PI', + 'SAS_id': 'my_id', + 'duration': datetime.timedelta(0, 3600), + 'name': 'my_observation_name', + 'project': 'my_project_name', + 'project_description': 'my_project_description', + 'antenna_array': 'LBA', + 'start_time': datetime.datetime(2018, 6, 11, 11, 0), + 'stop_time': datetime.datetime(2018, 6, 11, 12, 0), + 'type': 'my_process_subtype'} ''' + + with SharedH5File(h5_path, "r", timeout=10) as file: + need_to_fill_info_folder_from_parset = 'measurement/info' not in file + + if need_to_fill_info_folder_from_parset: + fill_info_folder_from_parset(h5_path) + + with SharedH5File(h5_path, "r", timeout=10) as file: + info_dict = {} + if 'measurement/info' in file: + for k, v in file['measurement/info'].items(): + k = str(k) + v = v[0] + info_dict[k] = v + + if k == 'start_time' or k == 'stop_time': + # try to import lofar.common.datetimeutils here and not at the top of the file + # to make this hdf5_io module as loosly coupled to other lofar code as possible + try: + from lofar.common.datetimeutils import parseDatetime + info_dict[k] = parseDatetime(v) + except ImportError: + pass + elif k == 'duration': + info_dict[k] = timedelta(seconds=v) + + return info_dict + +def read_SAP_targets(h5_path): + """reads the SAP targets from the parset + :param str h5_path: h5_path to the h5 file + :return: dict of SAP_nr to target name. + """ + + beam_dict = {} + + #TODO: use normal parset lib instead of error prone string parsing + try: + parset_str = read_hypercube_parset(h5_path, as_string=True) + if parset_str: + lines = parset_str.splitlines() + beam_lines = [l for l in lines if 'Observation.Beam[' in l and '.target' in l] + for line in beam_lines: + parts = line.partition('=') + beam_nr = int(parts[0][parts[0].index('[')+1: parts[0].index(']')]) + beam_dict[beam_nr] = parts[2] + + except Exception as e: + logger.error(e) + + return beam_dict + + diff --git a/QA/QA_Common/lib/utils.py b/QA/QA_Common/lib/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..9778ea0125b97cf9acd8c1a1e970ffbe8766f1b6 --- /dev/null +++ b/QA/QA_Common/lib/utils.py @@ -0,0 +1,141 @@ +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +import numpy as np +from datetime import datetime, timedelta + +from lofar.qa.geoconversions import * + +import logging +import math +logger = logging.getLogger(__name__) + +def create_hypercube(num_saps=3, num_stations=5, num_timestamps=11, num_subbands_per_sap=None, snr=0.9, + max_signal_amplitude=1e5, parallel_to_cross_polarization_ratio=20.0, + num_phase_wraps=1.0, + num_time_sawtooth_periods=1, num_subband_sawtooth_periods=0, + num_time_cos_periods=0, num_subband_cos_periods=0): + data = {} + assert max_signal_amplitude > 1.0 + logger.info('create_hypercube: num_saps=%s num_stations=%s num_timestamps=%s num_subbands_per_sap=%s snr=%s max_amplitude=%s pol_ratio=%s' \ + 'num_phase_wraps=%s num_time_sawtooth_periods=%s num_subband_sawtooth_periods=%s num_time_cos_periods=%s num_subband_cos_periods=%s', + num_saps, num_stations, num_timestamps, num_subbands_per_sap, snr, max_signal_amplitude, parallel_to_cross_polarization_ratio, + num_phase_wraps, num_time_sawtooth_periods, num_subband_sawtooth_periods, num_time_cos_periods, num_subband_cos_periods) + + if num_subbands_per_sap is None: + num_subbands_per_sap = {} + for sap_nr in range(num_saps): + num_subbands_per_sap[sap_nr] = 13*(sap_nr+1) + + stations = ['CS%03d' % (i + 1) for i in range(num_stations)] + baselines = [] + for idx, station1 in enumerate(stations): + for station2 in stations[idx:]: + baselines.append((station1, station2)) + + num_baselines = len(baselines) + + for sap_nr in range(num_saps): + #generate nice test visibilities + num_subbands = num_subbands_per_sap[sap_nr] + + #generate 'ticks' along the polarization-axes + polarizations = ['xx', 'xy', 'yx', 'yy'] + parallel_pol_idxs = [0,3] + cross_pol_idxs = [1,2] + + # create synthetic visibilities signal + baseline_visibilities_signal = np.zeros((num_timestamps, num_subbands, len(polarizations)), dtype=np.complex64) + + for subband_idx in range(num_subbands): + # subband_ratio ranges from 0 to-but-not-including 1.0 + # this ensures the phases start at 0rad, and sweep up to but not including 2PIrad + subband_ratio = ((subband_idx+1) / float(num_subbands)) if num_subbands > 1 else 1.0 + sawtooth_subband_amplitude = math.fmod(subband_ratio * num_subband_sawtooth_periods, 1) + if sawtooth_subband_amplitude == 0.0: + sawtooth_subband_amplitude = 1.0 + cos_subband_amplitude = 0.5 * (1.0 + np.cos(num_subband_cos_periods * subband_ratio * 2 * np.pi)) + + for timestamp_idx in range(num_timestamps): + # timestamp_ratio ranges from-and-including 1.0 to 'small'-but-not-zero + # this prevents the visibility_value from becoming 0 (from which we cannot take the log) + timestamp_ratio = ((timestamp_idx+1) / float(num_timestamps)) if num_timestamps > 1 else 1.0 + sawtooth_time_amplitude = math.fmod(timestamp_ratio * num_time_sawtooth_periods, 1) + if sawtooth_time_amplitude == 0.0: + sawtooth_time_amplitude = 1.0 + cos_time_amplitude = 0.5*(1.0+np.cos(num_time_cos_periods*timestamp_ratio * 2 * np.pi)) + + # create synthetic visibility_value + # amplitude varies in time. make sure the smallest amplitude is >= 1.0, + # because otherwise we cannot store them with enough bits in dB's + #amplitude = max(1.0, max_signal_amplitude * (sawtooth_time + sawtooth_subband + cos_subband + cos_time)/4.0) + amplitude = max(1.0, max_signal_amplitude * (sawtooth_time_amplitude * sawtooth_subband_amplitude * + cos_subband_amplitude * cos_time_amplitude)) + # phase varies in subband direction + phase = np.exp(1j * subband_ratio * 2.0 * np.pi * num_phase_wraps) + visibility_value_parallel = amplitude * phase + visibility_value_cross = max(1.0, amplitude/parallel_to_cross_polarization_ratio) * phase + baseline_visibilities_signal[timestamp_idx, subband_idx,parallel_pol_idxs] = visibility_value_parallel + baseline_visibilities_signal[timestamp_idx, subband_idx, cross_pol_idxs] = visibility_value_cross + + # use/apply the same visibilities for each baseline + visibilities_signal = np.zeros((num_baselines, num_timestamps, num_subbands, len(polarizations)), dtype=np.complex64) + for i in range(num_baselines): + visibilities_signal[i,:,:,:] = baseline_visibilities_signal + + # create some noise + visibilities_noise = np.zeros((num_baselines, num_timestamps, num_subbands, len(polarizations)), dtype=np.complex64) + visibilities_noise.real = np.random.normal(size=visibilities_noise.shape) + visibilities_noise.imag = np.random.normal(size=visibilities_noise.shape) + visibilities_noise *= max_signal_amplitude/np.max(np.abs(visibilities_noise)) + + # add signal and noise according to given ratio + visibilities = snr*visibilities_signal + (1.0-snr)*visibilities_noise + + # and some empty flagging + flagging = np.zeros(visibilities.shape, dtype=np.bool) + + # generate 'ticks' along the timestamp-axis + now = datetime.utcnow() + timestamps = [now+timedelta(minutes=i) for i in range(num_timestamps)] + + # generate 'ticks' along the central_frequencies-axes + # fill the HBA frequency range of 120-240MHz + central_frequencies = [120e6+i*120e6/max(1,num_subbands-1) for i in range(num_subbands)] + sb_offset = sum([len(sap['subbands']) for sap in data.values()]) + subbands = [i for i in range(sb_offset, sb_offset+num_subbands)] + + # create some synthetic antenna locations + antenna_locations = {'XYZ': {}, 'PQR': {}, 'WGS84' : {}} + for i, station in enumerate(stations): + ratio = float(i)/len(stations) + xyz_pos = (np.cos(ratio*2*np.pi),np.sin(ratio*2*np.pi),0) + antenna_locations['XYZ'][station] = xyz_pos + antenna_locations['PQR'][station] = pqr_cs002_from_xyz(xyz_pos) + antenna_locations['WGS84'][station] = geographic_from_xyz(xyz_pos) + + # combine all data in the dict + data[sap_nr] = { 'baselines':baselines, + 'timestamps':timestamps, + 'central_frequencies':central_frequencies, + 'subbands':subbands, + 'polarizations':polarizations, + 'visibilities':visibilities, + 'flagging':flagging, + 'antenna_locations': antenna_locations} + return data + diff --git a/QA/QA_Common/test/CMakeLists.txt b/QA/QA_Common/test/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..805b871beb4b8d2d05ee2172e6e3029bb915b496 --- /dev/null +++ b/QA/QA_Common/test/CMakeLists.txt @@ -0,0 +1,23 @@ +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id$ +include(LofarCTest) + +lofar_add_test(t_hdf5_io) + + diff --git a/QA/QA_Common/test/create_test_hypercube b/QA/QA_Common/test/create_test_hypercube new file mode 100755 index 0000000000000000000000000000000000000000..1d368470961a24869562a2443f60ec92a57127df --- /dev/null +++ b/QA/QA_Common/test/create_test_hypercube @@ -0,0 +1,42 @@ +#!/usr/bin/env python + +import os +from optparse import OptionParser +from lofar.qa.test.test_utils import * +from lofar.qa.hdf5_io import write_hypercube + +import logging +logger = logging.getLogger(__name__) + +def main(): + # make sure we run in UTC timezone + os.environ['TZ'] = 'UTC' + + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', + level=logging.INFO) + + ## Check the invocation arguments + parser = OptionParser(usage='create_test_hypercube [options] <path_to_new_h5_file>', + description='creates a test h5 hypercube with random data for the given number of stations, saps, subbands, timestamps.') + parser.add_option('-s', '--stations', dest='stations', type='int', default=3, help='number of stations to create, default: %default') + parser.add_option('-S', '--subbands', dest='subbands', type='int', default=244, help='number of subbands (per sap) to create, default: %default') + parser.add_option('-t', '--timestamps', dest='timestamps', type='int', default=128, help='number of timestamps to create, default: %default') + parser.add_option('--saps', dest='saps', type='int', default=1, help='number of saps to create, default: %default') + parser.add_option('-o', '--otdb_id', dest='otdb_id', type='int', default=None, help='optional (fake/test) otdb id, default: %default') + + (options, args) = parser.parse_args() + + if len(args) != 1: + print 'Please provide a file name for the h5 file which you want to create...' + print + parser.print_help() + exit(1) + + cube = create_hypercube(num_stations=options.stations, + num_saps=options.saps, + num_subbands_per_sap={sap:options.subbands for sap in range(options.saps)}, + num_timestamps=options.timestamps) + write_hypercube(args[0], cube, sas_id=options.otdb_id) + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/QA/QA_Common/test/t_hdf5_io.py b/QA/QA_Common/test/t_hdf5_io.py new file mode 100755 index 0000000000000000000000000000000000000000..fcd97e8c1011eb7028a766412d81591e99570b3e --- /dev/null +++ b/QA/QA_Common/test/t_hdf5_io.py @@ -0,0 +1,499 @@ +#!/usr/bin/env python + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +import unittest +import logging +import tempfile +import os +import random +import numpy as np +from datetime import datetime, timedelta + +from lofar.qa.hdf5_io import * +from lofar.parameterset import * +from lofar.common.datetimeutils import to_modified_julian_date_in_seconds + +from lofar.qa.utils import * + +np.set_printoptions(precision=2) + +logger = logging.getLogger(__name__) +logging.basicConfig(format='%(asctime)s %(levelname)s %(process)s %(message)s', level=logging.INFO) + +class TestHdf5_IO(unittest.TestCase): + def test_sharedh5file(self): + import h5py + from multiprocessing import Process, Event + path = tempfile.mkstemp()[1] + try: + # make sure there is no test file in the way + os.remove(path) + except OSError: + pass + + try: + # for testing synchronization/concurrency + event_created = Event() + event_closed = Event() + + # define helper method to open h5 file in other process + def create_file_and_keep_open_for_a_while(): + logger.info("creating h5 file %s", path) + with h5py.File(path, 'a') as file1: + event_created.set() + logger.info("created/openend h5 file %s, writing contents", path) + file1['foo'] = 'bar' + logger.info("keeping h5 file %s open for 2 seconds", path) + sleep(2) + logger.info("closed h5 file %s", path) + event_closed.set() + + # test if the events aren't set yet (as we did not start the other process yet...) + self.assertFalse(event_created.is_set()) + self.assertFalse(event_closed.is_set()) + + # start process, which opens and keeps open the h5 file... + proc = Process(target=create_file_and_keep_open_for_a_while) + proc.start() + + # wait until file was created + event_created.wait(5) + + # open SharedH5File (it should wait until create_file_and_keep_open_for_a_while is finished) + with SharedH5File(path, 'r+') as file2: + # Now that the SharedH5File is open, this tells us that the other process should be finished and the event is set. + self.assertTrue(event_closed.is_set()) + # was foo=bar really written? + self.assertEqual('bar', file2['foo'].value) + + # proc should already be finished, but let's wait anyway + proc.join() + finally: + try: + logger.info('removing test file: %s', path) + os.remove(path) + except OSError: + pass + + def test_write_and_read_again(self): + '''tests writing and reading an hdf5 file, and checks all parameters except for the visibility data. + See test_write_and_read_and_verify_data for elaborate data verification.''' + logger.info('test_write_and_read_again') + + path = tempfile.mkstemp()[1] + try: + logger.info('generating test data') + num_saps=3 + num_stations=7 + num_timestamps=11 + saps_in = create_hypercube(num_saps=num_saps, num_stations=num_stations, num_timestamps=num_timestamps) + + parset=parameterset() + parset.adoptArgv(['foo=bar']) + + write_hypercube(path, saps_in, parset) + + parset2 = read_hypercube_parset(path) + self.assertEqual(str(parset), str(parset2)) + + result = read_hypercube(path, visibilities_in_dB=False, python_datetimes=True) + + self.assertTrue(result['saps']) + self.assertEqual(num_saps, len(result['saps'])) + + for sap_nr, sap_out in result['saps'].items(): + sap_in = saps_in[sap_nr] + + self.assertTrue('timestamps' in sap_out) + self.assertEqual(len(sap_in['timestamps']), len(sap_out['timestamps'])) + for t_in, t_out in zip(sap_in['timestamps'], sap_out['timestamps']): + self.assertEqual(t_in, t_out) + + self.assertFalse(sap_out['visibilities_in_dB']) + self.assertEqual(sap_in['visibilities'].shape, sap_out['visibilities'].shape) + + self.assertTrue('antenna_locations' in sap_out) + for coords_type in ['XYZ', 'PQR', 'WGS84']: + self.assertTrue(coords_type in sap_out['antenna_locations']) + self.assertEqual(7, len(sap_out['antenna_locations'][coords_type])) + + #test the file annotations + annotate_file(path, 'This file was recorded in front of a live audience ;-)', 'test_user') + file_annotations = read_file_annotations(path) + + self.assertEqual(1, len(file_annotations)) + self.assertEqual('This file was recorded in front of a live audience ;-)', file_annotations[0]['annotation']) + self.assertEqual('test_user', file_annotations[0]['user']) + + finally: + logger.info('removing test file: %s', path) + os.remove(path) + + def test_write_and_read_and_verify_data(self): + '''extensive test to verify to correctness of all visibility amplitudes and phases + after it has been written and read back again, bot in raw and dB.''' + logger.info('test_write_and_read_and_verify_data') + + path = tempfile.mkstemp()[1] + + try: + # test over a wide range of possible number of saps, stations, timestamps, subbands, max_amplitude + # because these parameters can influence the applied data reduction in writing, and reconstruction in reading + for num_saps in [1, 2]: + for num_stations in [1, 3, 10]: + for num_timestamps in [1, 2, 10]: + for num_subbands_per_sap in [1, 2, 10]: + for max_amplitude in [100, 1000, 10000]: + for pol_ratio in [1, 10, 50]: + # create a synthetic hypercube with known data which we can verify + # amplitude varies with time + # phase varies with subband + saps_in = create_hypercube(num_saps=num_saps, + num_stations=num_stations, num_timestamps=num_timestamps, + num_subbands_per_sap={sap_nr:num_subbands_per_sap for sap_nr in range(num_saps)}, + snr=1.0, max_signal_amplitude=max_amplitude, + parallel_to_cross_polarization_ratio=pol_ratio) + + for sap_nr, sap_in_raw in saps_in.items(): + # test for correct input test data + max_amplitude_in = np.max(np.abs(sap_in_raw['visibilities'])) + self.assertTrue(np.abs(max_amplitude - max_amplitude_in) < 1e-3*max_amplitude) + + # write the hypercube and parset into an h5 file.... + write_hypercube(path, saps_in) + + # ...and read the data back from file and compare it + # input visibilities are not in dB, so request the output visibilities not to be in dB either + # (side note, visibilities are stored in the h5 file in dB's for better compression) + result_raw = read_hypercube(path, visibilities_in_dB=False, python_datetimes=True) + # but because we usually plot the visibilities in dB, read and check those as well + result_dB = read_hypercube(path, visibilities_in_dB=True, python_datetimes=True) + + self.assertTrue('saps' in result_raw) + self.assertTrue('saps' in result_dB) + saps_out_raw = result_raw['saps'] + saps_out_dB = result_dB['saps'] + self.assertEqual(num_saps, len(saps_out_raw)) + self.assertEqual(num_saps, len(saps_out_dB)) + + for sap_nr, sap_out_raw in saps_out_raw.items(): + sap_in_raw = saps_in[sap_nr] + sap_out_dB = saps_out_dB[sap_nr] + + # compare all in/out timestamps + self.assertTrue('timestamps' in sap_out_raw) + for t_in, t_out in zip(sap_in_raw['timestamps'], sap_out_raw['timestamps']): + self.assertEqual(t_in, t_out) + + # compare all in/out subbands + self.assertTrue('subbands' in sap_out_raw) + for sb_in, sb_out in zip(sap_in_raw['subbands'], sap_out_raw['subbands']): + self.assertEqual(sb_in, sb_out) + + # compare all in/out central_frequencies + self.assertTrue('central_frequencies' in sap_out_raw) + for freq_in, freq_out in zip(sap_in_raw['central_frequencies'], sap_out_raw['central_frequencies']): + self.assertEqual(freq_in, freq_out) + + self.assertFalse(sap_out_raw['visibilities_in_dB']) + self.assertEqual(sap_in_raw['visibilities'].shape, sap_out_raw['visibilities'].shape) + + # compare all in/out visibilities + vis_in_raw = sap_in_raw['visibilities'] + vis_out_raw = sap_out_raw['visibilities'] + vis_out_dB = sap_out_dB['visibilities'] + + # for the raw visibilities, comparison is easy... + # just check the differences in amplitude and in phase + abs_diff_raw = np.abs(vis_in_raw) - np.abs(vis_out_raw) + abs_phase_diff_raw = np.abs(np.unwrap(np.angle(vis_in_raw) - np.angle(vis_out_raw), axis=2)) + # phase has no 'meaning' for small (insignificant) amplitudes, + # so just set the phase difference to zero there + abs_phase_diff_raw[np.abs(vis_in_raw) <= max(1.0, 1e-3*max_amplitude)] = 0 + abs_phase_diff_raw[np.abs(vis_out_raw) <= max(1.0, 1e-3*max_amplitude)] = 0 + + # for the visibilities in dB, the phases should be equal to the input phases, + # no matter whether the visibilities are in dB or raw. + # but the amplitudes need conversion from dB back to raw first. + abs_vis_out_raw_from_dB = np.power(10, 0.1*np.abs(vis_out_dB)) + abs_diff_raw_dB = np.abs(vis_in_raw) - abs_vis_out_raw_from_dB + abs_phase_diff_raw_dB = np.abs(np.unwrap(np.angle(vis_in_raw) - np.angle(vis_out_dB), axis=2)) + # phase has no 'meaning' for small (insignificant) amplitudes, so just set it to zero there + abs_phase_diff_raw_dB[np.abs(vis_in_raw) <= max(1.0, 1e-3*max_amplitude)] = 0 + abs_phase_diff_raw_dB[abs_vis_out_raw_from_dB <= max(1.0, 1e-3*max_amplitude)] = 0 + + amplitude_threshold = 0.10 * max_amplitude + phase_threshold = 0.025 * 2 * np.pi + + for i in range(vis_in_raw.shape[0]): + for j in range(vis_in_raw.shape[1]): + for k in range(vis_in_raw.shape[2]): + for l in range(vis_in_raw.shape[3]): + self.assertLess(abs_diff_raw[i,j,k,l], amplitude_threshold) + self.assertLess(abs_diff_raw_dB[i,j,k,l], amplitude_threshold) + try: + self.assertLess(abs_phase_diff_raw[i,j,k,l], phase_threshold) + except AssertionError: + # phase is just below 2pi (close to 0) + self.assertLess(2*np.pi-abs_phase_diff_raw[i,j,k,l], phase_threshold) + + try: + self.assertLess(abs_phase_diff_raw_dB[i, j, k, l], phase_threshold) + except AssertionError: + # phase is just below 2pi (close to 0) + self.assertLess(2*np.pi-abs_phase_diff_raw_dB[i, j, k, l], phase_threshold) + + finally: + logger.info('removing test file: %s', path) + os.remove(path) + + def test_12_to_13_to_14_conversion(self): + path = tempfile.mkstemp()[1] + + try: + max_amplitude = 1000 + saps_in = create_hypercube(num_saps=1, + num_stations=2, num_timestamps=32, + num_subbands_per_sap={0: 32}, + snr=1.0, max_signal_amplitude=max_amplitude, + parallel_to_cross_polarization_ratio=1.0) + + # write the hypercube and parset into an h5 file.... + # this currently results in a v1.4 file + write_hypercube(path, saps_in, sas_id=123456) + + # check if version is 1.4 + with h5py.File(path, "r") as file: + version_str = file['version'][0] + self.assertEqual('1.4', version_str) + + # change version back to 1.2 + # and modify visibility data to have the 1.2 incorrect phases + with h5py.File(path, "r+") as file: + # revert version... + file['version'][0] = '1.2' + + # revert visibilities. + # Use saps_in's visibilities and the old hdf5_io code to compute and store the incorrect phases. + for sap_nr in sorted(saps_in.keys()): + visibilities_in = saps_in[sap_nr]['visibilities'] + subbands = saps_in[sap_nr]['subbands'] + + # this is v1.2's incorrect dB conversion messing up the phases + visibilities_dB = 10.0 * np.log10(visibilities_in) + abs_vis_dB = np.absolute(visibilities_dB) + + # this is v1.2's way of computing the scale factors per subband only + # compute scale factor per subband to map the visibilities_dB per subband from complex64 to 2xint8 + scale_factors = np.empty((len(subbands),), dtype=np.float32) + for sb_nr in range(len(subbands)): + # use 99.9 percentile instead if max to get rid of spikes + max_abs_vis_sb = np.percentile(abs_vis_dB[:, :, sb_nr, :], 99.9) + scale_factor = 127.0 / max_abs_vis_sb + scale_factors[sb_nr] = 1.0 / scale_factor + + # overwrite the visibility_scale_factors in the file with the v1.2 values + sap_group = file['measurement/saps/%s'%(sap_nr,)] + del sap_group['visibility_scale_factors'] + sap_group.create_dataset('visibility_scale_factors', data=scale_factors) + + extended_shape = visibilities_dB.shape[:] + (2,) + scaled_visibilities = np.empty(extended_shape, dtype=np.int8) + for sb_nr in range(len(subbands)): + scale_factor = 1.0 / scale_factors[sb_nr] + scaled_visibilities[:, :, sb_nr, :, 0] = scale_factor * visibilities_dB[:, :, sb_nr, :].real + scaled_visibilities[:, :, sb_nr, :, 1] = scale_factor * visibilities_dB[:, :, sb_nr, :].imag + + # overwrite the visibilities in the file with the v1.2 values + del sap_group['visibilities'] + sap_group.create_dataset('visibilities', data=scaled_visibilities) + + # check if version is 1.2 + with h5py.File(path, "r") as file: + version_str = file['version'][0] + self.assertEqual('1.2', version_str) + + # reading the 1.2 file should result in automatic conversion via 1.3 to 1.4 and correction of phases + result_raw = read_hypercube(path, visibilities_in_dB=False, python_datetimes=True) + + # check if version is now 1.3 + with h5py.File(path, "r") as file: + version_str = file['version'][0] + self.assertEqual('1.4', version_str) + + # read in dB as well because we usually plot the visibilities in dB + result_dB = read_hypercube(path, visibilities_in_dB=True, python_datetimes=True) + + saps_out_raw = result_raw['saps'] + saps_out_dB = result_dB['saps'] + + for sap_nr, sap_in_raw in saps_in.items(): + sap_out_raw = saps_out_raw[sap_nr] + sap_out_dB = saps_out_dB[sap_nr] + + # compare all in/out visibilities + vis_in_raw = sap_in_raw['visibilities'] + vis_out_raw = sap_out_raw['visibilities'] + vis_out_dB = sap_out_dB['visibilities'] + + # for the raw visibilities, comparison is easy... + # just check the differences in amplitude and in phase + abs_diff_raw = np.abs(vis_in_raw) - np.abs(vis_out_raw) + abs_phase_diff_raw = np.abs(np.unwrap(np.angle(vis_in_raw) - np.angle(vis_out_raw), axis=2)) + # phase has no 'meaning' for small (insignificant) amplitudes, + # so just set the phase difference to zero there + abs_phase_diff_raw[np.abs(vis_in_raw) <= max(1.0, 1e-3 * max_amplitude)] = 0 + abs_phase_diff_raw[np.abs(vis_out_raw) <= max(1.0, 1e-3 * max_amplitude)] = 0 + + # for the visibilities in dB, the phases should be equal to the input phases, + # no matter whether the visibilities are in dB or raw. + # but the amplitudes need conversion from dB back to raw first. + abs_vis_out_raw_from_dB = np.power(10, 0.1 * np.abs(vis_out_dB)) + abs_diff_raw_dB = np.abs(vis_in_raw) - abs_vis_out_raw_from_dB + abs_phase_diff_raw_dB = np.abs(np.unwrap(np.angle(vis_in_raw) - np.angle(vis_out_dB), axis=2)) + # phase has no 'meaning' for small (insignificant) amplitudes, so just set it to zero there + abs_phase_diff_raw_dB[np.abs(vis_in_raw) <= max(1.0, 1e-3 * max_amplitude)] = 0 + abs_phase_diff_raw_dB[abs_vis_out_raw_from_dB <= max(1.0, 1e-3 * max_amplitude)] = 0 + + amplitude_threshold = 0.10 * max_amplitude + phase_threshold = 0.025 * 2 * np.pi + + for i in range(vis_in_raw.shape[0]): + for j in range(vis_in_raw.shape[1]): + for k in range(vis_in_raw.shape[2]): + for l in range(vis_in_raw.shape[3]): + self.assertLess(abs_diff_raw[i, j, k, l], amplitude_threshold) + self.assertLess(abs_diff_raw_dB[i, j, k, l], amplitude_threshold) + self.assertLess(abs_phase_diff_raw[i, j, k, l], phase_threshold) + self.assertLess(abs_phase_diff_raw_dB[i, j, k, l], phase_threshold) + finally: + logger.info('removing test file: %s', path) + os.remove(path) + + + def test_combine_hypercubes(self): + logger.info('test_combine_hypercubes') + + paths = [] + try: + logger.info('generating test data') + num_saps=2 + num_stations=2 + num_timestamps=2 + MAX_AMPLITUDE = 100 + saps_in = create_hypercube(num_saps=num_saps, num_stations=num_stations, + num_timestamps=num_timestamps, num_subbands_per_sap={0:1,1:1}, + snr=1.0, max_signal_amplitude=MAX_AMPLITUDE) + + #write each sap to a seperate file + for sap_nr, sap_in in saps_in.items(): + path = tempfile.mkstemp()[1] + paths.append(path) + logger.info('writing sap %d to %s', sap_nr, path) + write_hypercube(path, {sap_nr:sap_in}, sas_id=999999) + + combined_filepath = combine_hypercubes(paths, output_dir='/tmp', output_filename=os.path.basename(tempfile.mkstemp()[1])) + self.assertIsNotNone(combined_filepath) + + paths.append(combined_filepath) + + result = read_hypercube(combined_filepath, visibilities_in_dB=False, python_datetimes=True) + + self.assertTrue(result['saps']) + self.assertEqual(num_saps, len(result['saps'])) + + for sap_nr, sap_out in result['saps'].items(): + sap_in = saps_in[sap_nr] + + self.assertTrue(sap_out['timestamps']) + for t_in, t_out in zip(sap_in['timestamps'], sap_out['timestamps']): + self.assertEqual(t_in, t_out) + + self.assertFalse(sap_out['visibilities_in_dB']) + self.assertEqual(sap_in['visibilities'].shape, sap_out['visibilities'].shape) + + diff = sap_in['visibilities'] - sap_out['visibilities'] + error = np.absolute(diff/sap_in['visibilities']) + + median_error = np.median(error) + logger.info('median error %s < threshold %s', median_error, 0.05) + self.assertTrue(median_error < 0.05) + + finally: + for path in paths: + logger.info('removing test file: %s', path) + os.remove(path) + + def test_common_info_from_parset(self): + logger.info('test_common_info_from_parset') + + logger.info('generating test data') + num_saps=1 + num_stations=2 + num_timestamps=3 + saps_in = create_hypercube(num_saps=num_saps, num_stations=num_stations, num_timestamps=num_timestamps) + + parset = parameterset.fromString("""ObsSW.Observation.Campaign.PI="my_PI" + ObsSW.Observation.Campaign.name="my_project_name" + ObsSW.Observation.Campaign.title="my_project_description" + ObsSW.Observation.processType="my_process_type" + ObsSW.Observation.processSubtype="my_process_subtype" + ObsSW.Observation.Campaign.otdbID="my_id" + ObsSW.Observation.antennaArray="LBA" + ObsSW.Observation.Scheduler.taskName="my_task_name" + ObsSW.Observation.startTime="2018-06-11 11:00:00" + ObsSW.Observation.stopTime="2018-06-11 12:00:00" + foo="bar" """) + + path = tempfile.mkstemp()[1] + try: + write_hypercube(path, saps_in, parset) + + # make sure the info folder is in the file, + # and delete it so we can test fill_info_folder_from_parset later on + with h5py.File(path, "r+") as file: + self.assertTrue('measurement/info' in file) + del file['measurement/info'] + + with h5py.File(path, "r") as file: + self.assertFalse('measurement/info' in file) + + # call the actual method under test, fill_info_folder_from_parset + fill_info_folder_from_parset(path) + + with h5py.File(path, "r") as file: + self.assertTrue('measurement/info' in file) + + info = read_info_dict(path) + self.assertEqual('my_PI', info['PI']) + self.assertEqual('my_id', info['SAS_id']) + self.assertEqual('my_task_name', info['name']) + self.assertEqual('my_project_name', info['project']) + self.assertEqual('my_project_description', info['project_description']) + self.assertEqual('my_process_type', info['type']) + self.assertEqual('my_process_subtype', info['subtype']) + self.assertEqual('LBA', info['antenna_array']) + self.assertEqual(datetime(2018, 6, 11, 11, 0), info['start_time']) + self.assertEqual(datetime(2018, 6, 11, 12, 0), info['stop_time']) + self.assertEqual(timedelta(0, 3600), info['duration']) + finally: + os.remove(path) + +if __name__ == '__main__': + unittest.main() diff --git a/QA/QA_Common/test/t_hdf5_io.run b/QA/QA_Common/test/t_hdf5_io.run new file mode 100755 index 0000000000000000000000000000000000000000..d0c3f18b8b6fcdb1dea8e85f97ca91d49a8f926b --- /dev/null +++ b/QA/QA_Common/test/t_hdf5_io.run @@ -0,0 +1,23 @@ +#!/bin/bash + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# Run the unit test +source python-coverage.sh +python_coverage_test "*hdf5_io*" t_hdf5_io.py + diff --git a/QA/QA_Common/test/t_hdf5_io.sh b/QA/QA_Common/test/t_hdf5_io.sh new file mode 100755 index 0000000000000000000000000000000000000000..d2a83354c226bab334dd7a0203e89ec69a7ad33e --- /dev/null +++ b/QA/QA_Common/test/t_hdf5_io.sh @@ -0,0 +1,20 @@ +#!/bin/sh + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +./runctest.sh t_hdf5_io diff --git a/QA/QA_Common/test/test_utils.py b/QA/QA_Common/test/test_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..f91b6f456742dfd25fbe13b941d1bd1b72824ff4 --- /dev/null +++ b/QA/QA_Common/test/test_utils.py @@ -0,0 +1,79 @@ +# Copyright (C) 2018 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +import numpy as np +from datetime import datetime, timedelta + +from lofar.common.datetimeutils import to_modified_julian_date_in_seconds +from lofar.qa.geoconversions import * + +def create_hypercube(num_saps=3, num_stations=5, num_timestamps=11, num_subbands_per_sap=None): + data = {} + + if num_subbands_per_sap is None: + num_subbands_per_sap = {} + for sap_nr in range(num_saps): + num_subbands_per_sap[sap_nr] = 13*(sap_nr+1) + + stations = ['CS%03d' % (i + 1) for i in range(num_stations)] + baselines = [] + for idx, station1 in enumerate(stations): + for station2 in stations[idx:]: + baselines.append((station1, station2)) + + num_baselines = len(baselines) + + for sap_nr in range(num_saps): + #generate nice test visibilities + num_subbands = num_subbands_per_sap[sap_nr] + + #generate 'ticks' along the polarization-axes + polarizations = ['xx', 'xy', 'yx', 'yy'] + + visibilities = np.empty((num_baselines, num_timestamps, num_subbands, len(polarizations)), dtype=np.complex64) + visibilities.real = np.random.random(visibilities.shape) + visibilities.imag = np.random.random(visibilities.shape) + + #and some flagging + flagging = np.zeros(visibilities.shape, dtype=np.bool) + + now = datetime.utcnow() + timestamps = [now+timedelta(seconds=i) for i in range(num_timestamps)] + timestamps_mjds = np.array([to_modified_julian_date_in_seconds(t) for t in timestamps]) + + #generate 'ticks' along the central_frequencies-axes + central_frequencies = [1e11+i*1e10 for i in range(num_subbands)] + sb_offset = sum([len(sap['subbands']) for sap in data.values()]) + subbands = ['SB%03d'% i for i in range(sb_offset, sb_offset+num_subbands)] + + antenna_locations = {'XYZ': {}, 'PQR': {}, 'WGS84' : {}} + for station in stations: + xyz_pos = (0,0,0) + antenna_locations['XYZ'][station] = xyz_pos + antenna_locations['PQR'][station] = pqr_cs002_from_xyz(xyz_pos) + antenna_locations['WGS84'][station] = geographic_from_xyz(xyz_pos) + + data[sap_nr] = { 'baselines':baselines, + 'timestamps':timestamps, + 'central_frequencies':central_frequencies, + 'subbands':subbands, + 'polarizations':polarizations, + 'visibilities':visibilities, + 'flagging':flagging, + 'antenna_locations': antenna_locations} + return data + diff --git a/QA/QA_Service/CMakeLists.txt b/QA/QA_Service/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..3da6a2d95811cc7bc01fe5147727e1a6edf4d9c0 --- /dev/null +++ b/QA/QA_Service/CMakeLists.txt @@ -0,0 +1,24 @@ +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id$ + +lofar_package(QA_Service 1.0 DEPENDS QA_Common PyMessaging OTDB_Services pyparameterset) + +add_subdirectory(lib) +add_subdirectory(bin) +add_subdirectory(test) diff --git a/QA/QA_Service/bin/CMakeLists.txt b/QA/QA_Service/bin/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..4247c52141d70ed67b0392115ac4123a8fa649e0 --- /dev/null +++ b/QA/QA_Service/bin/CMakeLists.txt @@ -0,0 +1,27 @@ +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id$ + +lofar_add_bin_scripts(qa_service qa_webservice) + +# supervisord config files +install(FILES + qa_service.ini + qa_webservice.ini + DESTINATION etc/supervisord.d) + diff --git a/QA/QA_Service/bin/qa_service b/QA/QA_Service/bin/qa_service new file mode 100755 index 0000000000000000000000000000000000000000..33e40bc973feb0d831b4bdddd6159728830e313a --- /dev/null +++ b/QA/QA_Service/bin/qa_service @@ -0,0 +1,23 @@ +#!/usr/bin/env python + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +from lofar.qa.service.qa_service import main + +if __name__ == '__main__': + main() diff --git a/QA/QA_Service/bin/qa_service.ini b/QA/QA_Service/bin/qa_service.ini new file mode 100644 index 0000000000000000000000000000000000000000..1ba42b547425ffff84f7544b51e5392525dd6b67 --- /dev/null +++ b/QA/QA_Service/bin/qa_service.ini @@ -0,0 +1,8 @@ +[program:qa_service] +command=/bin/bash -c 'source $LOFARROOT/lofarinit.sh;exec qa_service' +user=lofarsys +stopsignal=INT ; KeyboardInterrupt +stopasgroup=true ; bash does not propagate signals +stdout_logfile=%(program_name)s.log +redirect_stderr=true +stderr_logfile=NONE diff --git a/QA/QA_Service/bin/qa_webservice.ini b/QA/QA_Service/bin/qa_webservice.ini new file mode 100644 index 0000000000000000000000000000000000000000..37bd9b037a2c4558aea2e4b442d000c46a7a3d5f --- /dev/null +++ b/QA/QA_Service/bin/qa_webservice.ini @@ -0,0 +1,10 @@ +; supervisor ini file to start and run the adder_clustering docker image on head.cep4 with the webservice for the adder inspection plots + +[program:qa_webservice] +command=/bin/bash -c 'source $LOFARROOT/lofarinit.sh;exec qa_webservice' +user=lofarsys +stopsignal=INT ; KeyboardInterrupt +stopasgroup=true ; bash does not propagate signals +stdout_logfile=%(program_name)s.log +redirect_stderr=true +stderr_logfile=NONE diff --git a/QA/QA_Service/lib/CMakeLists.txt b/QA/QA_Service/lib/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..a3e87e5a7cbc3ecbc3d8d0bf51eba64dd5f17dca --- /dev/null +++ b/QA/QA_Service/lib/CMakeLists.txt @@ -0,0 +1,26 @@ +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id$ + +python_install( + __init__.py + config.py + qa_service.py + QABusListener.py + DESTINATION lofar/qa/service) + diff --git a/QA/QA_Service/lib/QABusListener.py b/QA/QA_Service/lib/QABusListener.py new file mode 100644 index 0000000000000000000000000000000000000000..e6b8710a3cc2aa08da7a93c9a0aafb4812a96e1c --- /dev/null +++ b/QA/QA_Service/lib/QABusListener.py @@ -0,0 +1,85 @@ +# Copyright (C) 2015 +# ASTRON (Netherlands Institute for Radio Astronomy) +# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it +# and/or modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be +# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +# +# $Id$ + +""" +QABusListener listens on the lofar qa message bus and calls (empty) on<SomeMessage> methods when such a message is received. +Typical usage is to derive your own subclass from QABusListener and implement the specific on<SomeMessage> methods that you are interested in. +""" + +from lofar.messaging.messagebus import AbstractBusListener +from lofar.qa.service.config import DEFAULT_QA_NOTIFICATION_BUSNAME, DEFAULT_QA_NOTIFICATION_SUBJECT_PREFIX + +import qpid.messaging +import logging +from datetime import datetime + +logger = logging.getLogger(__name__) + + +class QABusListener(AbstractBusListener): + def __init__(self, busname=DEFAULT_QA_NOTIFICATION_BUSNAME, subject=DEFAULT_QA_NOTIFICATION_SUBJECT_PREFIX, broker=None, **kwargs): + """ + QABusListener listens on the lofar qa message bus and calls (empty) on<SomeMessage> methods when such a message is received. + Typical usage is to derive your own subclass from QABusListener and implement the specific on<SomeMessage> methods that you are interested in. + :param address: valid Qpid address (default: lofar.otdb.status) + :param broker: valid Qpid broker host (default: None, which means localhost) + additional parameters in kwargs: + options= <dict> Dictionary of options passed to QPID + exclusive= <bool> Create an exclusive binding so no other services can consume duplicate messages (default: False) + numthreads= <int> Number of parallel threads processing messages (default: 1) + verbose= <bool> Output extra logging over stdout (default: False) + """ + address = "%s/%s.#" % (busname, subject) + super(QABusListener, self).__init__(address, broker, **kwargs) + + def _handleMessage(self, msg): + logger.debug("QABusListener.handleMessage: %s" %str(msg)) + + logger.info("received message subject=%s content=%s", msg.subject, msg.content) + subject_suffix = msg.subject.split('.')[-1] + + if subject_suffix == 'ConvertedMS2Hdf5': + self.onConvertedMS2Hdf5(msg.content) + elif subject_suffix == 'CreatedInspectionPlots': + self.onCreatedInspectionPlots(msg.content) + elif subject_suffix == 'Clustered': + self.onClustered(msg.content) + elif subject_suffix == 'Finished': + self.onFinished(msg.content) + elif subject_suffix == 'Error': + self.onError(msg.content) + + def onConvertedMS2Hdf5(self, msg_content): + pass + + def onClustered(self, msg_content): + pass + + def onCreatedInspectionPlots(self, msg_content): + pass + + def onFinished(self, msg_content): + pass + + def onError(self, msg_content): + pass + + +__all__ = ["QABusListener"] diff --git a/QA/QA_Service/lib/__init__.py b/QA/QA_Service/lib/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4f54da1af6a2548fa7ac163d34990380f2139bf9 --- /dev/null +++ b/QA/QA_Service/lib/__init__.py @@ -0,0 +1,17 @@ +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + diff --git a/QA/QA_Service/lib/config.py b/QA/QA_Service/lib/config.py new file mode 100644 index 0000000000000000000000000000000000000000..ccd9e891949e7746da451e170e1f8488fd8d1928 --- /dev/null +++ b/QA/QA_Service/lib/config.py @@ -0,0 +1,27 @@ +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id$ + +from lofar.messaging import adaptNameToEnvironment + +DEFAULT_QA_NOTIFICATION_BUSNAME= adaptNameToEnvironment('lofar.qa.notification') +DEFAULT_QA_NOTIFICATION_SUBJECT_PREFIX='QA' + +from lofar.sas.otdb.config import DEFAULT_OTDB_NOTIFICATION_BUSNAME +DEFAULT_QA_OTDB_NOTIFICATION_BUSNAME = "%s.for.qa_service" % (DEFAULT_OTDB_NOTIFICATION_BUSNAME,) + diff --git a/QA/QA_Service/lib/qa_service.py b/QA/QA_Service/lib/qa_service.py new file mode 100644 index 0000000000000000000000000000000000000000..2f1d064f52336996936617c5fb5039070e6ddeb4 --- /dev/null +++ b/QA/QA_Service/lib/qa_service.py @@ -0,0 +1,313 @@ +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id$ + +import os.path +import logging +from subprocess import call, Popen, PIPE, STDOUT +from optparse import OptionParser, OptionGroup +from threading import Thread +from lofar.common.util import waitForInterrupt +from lofar.sas.otdb.OTDBBusListener import OTDBBusListener +from lofar.qa.service.config import DEFAULT_QA_OTDB_NOTIFICATION_BUSNAME +from lofar.sas.otdb.config import DEFAULT_OTDB_NOTIFICATION_SUBJECT +from lofar.messaging.messagebus import ToBus +from lofar.messaging.messages import EventMessage +from lofar.qa.service.config import DEFAULT_QA_NOTIFICATION_BUSNAME, DEFAULT_QA_NOTIFICATION_SUBJECT_PREFIX +from lofar.common.cep4_utils import * + +logger = logging.getLogger(__name__) + +#TODO: idea: convert periodically while observing? + +class QAService(OTDBBusListener): + ''' + QAService listens on the lofar otdb message bus for NotificationMessages and starts qa processes + upon observation/pipeline completion. The qa processes convert MS (measurement sets) to hdf5 qa files, + and then starts generating plots from the hdf5 file. + ''' + def __init__(self, + qa_notification_busname=DEFAULT_QA_NOTIFICATION_BUSNAME, + qa_notification_subject_prefix=DEFAULT_QA_NOTIFICATION_SUBJECT_PREFIX, + otdb_notification_busname=DEFAULT_QA_OTDB_NOTIFICATION_BUSNAME, + otdb_notification_subject=DEFAULT_OTDB_NOTIFICATION_SUBJECT, + broker=None, + qa_base_dir = '/data/qa', + **kwargs): + """ + Instantiate a QAService which listens on the given messagebus for Completion messages. + See also the superclass, OTDBBusListener. + :param string qa_notification_busname: valid Qpid address (default: DEFAULT_QA_NOTIFICATION_BUSNAME) + :param string qa_notification_subject: the subject to listen for. (default: DEFAULT_QA_NOTIFICATION_SUBJECT) + :param string otdb_notification_busname: valid Qpid address (default: DEFAULT_QA_OTDB_NOTIFICATION_BUSNAME) + :param string otdb_notification_subject: the subject to listen for. (default: DEFAULT_OTDB_NOTIFICATION_SUBJECT) + :param broker: valid Qpid broker host (default: None, which means localhost) + """ + super(QAService, self).__init__(busname=otdb_notification_busname, + subject=otdb_notification_subject, + broker=broker, + **kwargs) + + self._qa_notification_subject_prefix = qa_notification_subject_prefix + self._send_bus = ToBus(qa_notification_busname, broker=broker) + self.qa_base_dir = qa_base_dir + self._unfinished_otdb_id_map = {} + + def start_listening(self, numthreads=None): + ''' + start listening and open event _send_bus. This method is called in __enter__ when using 'with' context. + ''' + super(QAService, self).start_listening(numthreads=numthreads) + self._send_bus.open() + + def stop_listening(self): + ''' + stop listening and close event _send_bus. This method is called in __exit__ when using 'with' context. + ''' + super(QAService, self).stop_listening() + self._send_bus.close() + + def onObservationCompleting(self, otdb_id, modificationTime): + ''' + this mehod is called automatically upon receiving a Completion NotificationMessage + :param int otdb_id: the task's otdb database id + :param datetime modificationTime: timestamp when the task's status changed to completing + :return: None + ''' + logger.info("task with otdb_id %s completed.", otdb_id) + + # immediately do qa when the obs is completing, because the data is already on disk... + # and do the handling of the feedback in onObservationFinished + self.do_qa(otdb_id=otdb_id) + + def onObservationFinished(self, otdb_id, modificationTime): + ''' + this mehod is called automatically upon receiving a Finished NotificationMessage + :param int otdb_id: the task's otdb database id + :param datetime modificationTime: timestamp when the task's status changed to finished + :return: None + ''' + logger.info("task with otdb_id %s finished.", otdb_id) + + # lookup the hdf5_file_path for the given otdb_id + # and (re)add the parset to the file (which now includes feedback) + hdf5_file_path = self._unfinished_otdb_id_map.get(otdb_id) + if hdf5_file_path: + del self._unfinished_otdb_id_map[otdb_id] + + try: + # import here and not at top of file + # because on some systems h5py is not available + # and then only this method fails, which is less bad than the whole service failing. + from lofar.qa.hdf5_io import add_parset_to_hypercube + from lofar.sas.otdb.otdbrpc import OTDBRPC + + with OTDBRPC(broker=self._send_bus.broker) as otdbrpc: + add_parset_to_hypercube(hdf5_file_path, otdbrpc) + except Exception as e: + logger.warning("Cannot add parset with feedback for otdb=%s. error: %s", otdb_id, e) + + def do_qa(self, otdb_id): + ''' + try to do all qa (quality assurance) steps for the given otdb_id + resulting in an h5 MS-extract file and inspection plots + :param int otdb_id: observation/pipeline otdb id for which the conversion needs to be done. + :return: None + ''' + hdf5_file_path = self._convert_ms2hdf5(otdb_id) + if hdf5_file_path: + # keep a note of where the h5 file was stored for this unfinished otdb_id + self._unfinished_otdb_id_map[otdb_id] = hdf5_file_path + + # cluster it + self._cluster_h5_file(hdf5_file_path, otdb_id) + + plot_dir_path = self._create_plots_for_h5_file(hdf5_file_path, otdb_id) + + # and notify that we're finished + self._send_event_message('Finished', {'otdb_id': otdb_id, + 'hdf5_file_path': hdf5_file_path, + 'plot_dir_path': plot_dir_path or ''}) + + def _send_event_message(self, subject_suffix, content): + try: + subject = '%s.%s' % (self._qa_notification_subject_prefix, subject_suffix) + msg = EventMessage(context=subject, content=content) + logger.info('sending event message %s: %s', subject, content) + self._send_bus.send(msg) + except Exception as e: + logger.error('Could not send event message: %s', e) + + def _convert_ms2hdf5(self, otdb_id): + ''' + convert the MS for the given otdb_id to an h5 MS-extract file. + The conversion will run via ssh on cep4 with massive parellelization. + When running on cep4, it is assumed that a docker image called adder exists on head.cep4 + When running locally, it is assumed that ms2hdf5 is installed locally. + :param int otdb_id: observation/pipeline otdb id for which the conversion needs to be done. + :return string: path to the generated h5 file. + ''' + try: + logger.info('trying to convert MS uv dataset with otdb_id %s if any', otdb_id) + + cmd = ['ms2hdf5', '-o', str(otdb_id), '--cep4', '-p', '-20'] + + # define default h5 filename use default cep4 qa output dir + h5_filename = 'L%s.MS_extract.h5' % otdb_id + h5_dir_path = os.path.join(self.qa_base_dir, 'ms_extract') + cmd += ['--output_dir', h5_dir_path] + cmd += ['--output_filename', h5_filename] + + # wrap the command in a cep4 docker ssh call + cmd = wrap_command_for_docker(cmd, 'adder', 'latest') + cmd = wrap_command_in_cep4_available_cpu_node_with_lowest_load_ssh_call(cmd) + + logger.info('starting ms2hdf5, executing: %s', ' '.join(cmd)) + + if call(cmd) == 0: + hdf5_path = os.path.join(h5_dir_path, h5_filename) + logger.info('converted uv dataset with otdb_id %s to hdf5 file %s', otdb_id, hdf5_path) + self._send_event_message('ConvertedMS2Hdf5', {'otdb_id': otdb_id, 'hdf5_file_path': hdf5_path}) + return hdf5_path + else: + msg = 'could not convert dataset with otdb_id %s' % otdb_id + logger.error(msg) + self._send_event_message('Error', {'otdb_id': otdb_id, 'message': msg}) + + except Exception as e: + logging.exception('error in _convert_ms2hdf5: %s', e) + self._send_event_message('Error', {'otdb_id': otdb_id, 'message': e.message}) + return None + + def _create_plots_for_h5_file(self, hdf5_path, otdb_id=None): + ''' + create plots for the given h5 file. The plots are created via an ssh call to cep4 + where the plots are created in parallel in the docker image. + :param hdf5_path: the full path to the hdf5 file for which we want the plots. + :param otdb_id: the otdb_id of the converted observation/pipeline (is used for logging only) + :return: the full directory path to the directory containing the created plots. + ''' + try: + #use default cep4 qa output dir. + plot_dir_path = os.path.join(self.qa_base_dir, 'inspectionplots') + task_plot_dir_path = '' + all_plots_succeeded = True + + for plot_options in [['-1', '-m'], # hot autocor, complex crosscor, in dB + ['-1', '-mn', '--raw'], # normalized hot autocor, normalized complex crosscor, in raw + ['-4']]: # delay-rate + cmd = ['plot_hdf5_dynamic_spectra', '-o %s' % (plot_dir_path,), '--optimize', '--force', '--cep4'] + plot_options + [hdf5_path] + + # wrap the command in a cep4 ssh call to docker container + cmd = wrap_command_for_docker(cmd, 'adder', 'latest') + cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + + logger.info('generating plots for otdb_id %s, executing: %s', otdb_id, ' '.join(cmd)) + + if call(cmd) == 0: + task_plot_dir_path = os.path.join(plot_dir_path, 'L%s' % otdb_id) + logger.info('generated plots for otdb_id %s in %s with command=%s', otdb_id, + task_plot_dir_path, + ' '.join(cmd)) + else: + all_plots_succeeded &= False + msg = 'could not generate plots for otdb_id %s cmd=%s' % (otdb_id, ' '.join(cmd)) + logger.error(msg) + self._send_event_message('Error', {'otdb_id': otdb_id, + 'message': msg}) + + self._send_event_message('CreatedInspectionPlots', {'otdb_id': otdb_id, + 'hdf5_file_path': hdf5_path, + 'plot_dir_path': task_plot_dir_path}) + return task_plot_dir_path + except Exception as e: + logging.exception('error in _create_plots_for_h5_file: %s', e) + self._send_event_message('Error', {'otdb_id': otdb_id, 'message': e.message}) + return None + + + def _cluster_h5_file(self, hdf5_path, otdb_id=None): + ''' + Try to cluster the baselines based on visibilities in the h5 file + using the clustering docker image developed by e-science. + This method assumes the adder_clustering docker image is available on cep4. If not, or if anything else + goes wrong, then the qa steps can just continue on the un-clustered h5 file. + The docker image can be build from the source on github: + https://github.com/NLeSC/lofar-predictive-maintenance + This is a private repo until the project has been published. At astron, jorrit has access. + In the future, we might incorporate the clustering code from the github repo in to the LOFAR source tree. + :param hdf5_path: the full path to the hdf5 file for which we want the plots. + :param otdb_id: the otdb_id of the converted observation/pipeline (is used for logging only) + :return: None + ''' + try: + # the command to cluster the given h5 file (executed in the e-science adder docker image) + cmd = ['cluster_this.py', hdf5_path] + cmd = wrap_command_for_docker(cmd, 'adder_clustering', 'latest') + cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + + logger.info('clustering hdf5 file %s otdb_id %s, executing: %s', hdf5_path, otdb_id, ' '.join(cmd)) + + if call(cmd) == 0: + logger.info('clustered hdf5 file %s otdb_id %s', hdf5_path, otdb_id) + + self._send_event_message('Clustered', {'otdb_id': otdb_id, + 'hdf5_file_path': hdf5_path}) + else: + msg = 'could not cluster hdf5 file %s otdb_id %s' % (hdf5_path, otdb_id) + logger.error(msg) + self._send_event_message('Error', {'otdb_id': otdb_id, 'message': msg}) + except Exception as e: + logging.exception('error in _cluster_h5_file: %s', e) + self._send_event_message('Error', {'otdb_id': otdb_id, 'message': e.message}) + + + +def main(): + ''' + Run the qa service program with commandline arguments. + ''' + + # Check the invocation arguments + parser = OptionParser("%prog [options]", + description='run the qa_service which listens for observations/pipelines finished events on ' + 'the bus and then starts the QA (Quality Assurance) processes to convert MS to ' + 'hdf5 files and generate inspection plots.') + group = OptionGroup(parser, 'QPid Messaging options') + group.add_option('-q', '--broker', dest='broker', type='string', default='localhost', help='Address of the qpid broker, default: %default') + group.add_option("--otdb_notification_busname", dest="otdb_notification_busname", type="string", + default=DEFAULT_QA_OTDB_NOTIFICATION_BUSNAME, + help="Bus or queue where the OTDB notifications are published. [default: %default]") + group.add_option("--otdb_notification_subject", dest="otdb_notification_subject", type="string", + default=DEFAULT_OTDB_NOTIFICATION_SUBJECT, + help="Subject of OTDB notifications on otdb_notification_busname. [default: %default]") + parser.add_option_group(group) + (options, args) = parser.parse_args() + + #config logging + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) + + #start the qa service + with QAService(otdb_notification_busname=options.otdb_notification_busname, + otdb_notification_subject=options.otdb_notification_subject, + broker=options.broker) as service: + #loop and wait for messages or interrupt. + waitForInterrupt() + +if __name__ == '__main__': + main() diff --git a/QA/QA_Service/test/CMakeLists.txt b/QA/QA_Service/test/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..6c2f2e66afa83079482e3c08dd54d6a3981cbaa9 --- /dev/null +++ b/QA/QA_Service/test/CMakeLists.txt @@ -0,0 +1,23 @@ +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id$ +include(LofarCTest) + +lofar_add_test(t_qa_service) + + diff --git a/QA/QA_Service/test/t_qa_service.py b/QA/QA_Service/test/t_qa_service.py new file mode 100755 index 0000000000000000000000000000000000000000..c9e9a095885cb2520c24d6fba32e8c775c5ac187 --- /dev/null +++ b/QA/QA_Service/test/t_qa_service.py @@ -0,0 +1,473 @@ +#!/usr/bin/env python + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +try: + from qpid.messaging import Connection + from qpid.messaging.exceptions import * + from qpidtoollibs import BrokerAgent +except ImportError: + print 'Cannot run test without qpid tools' + print 'Please source qpid profile' + exit(3) + +import unittest +import uuid +from threading import Event +import shutil +import mock + +import logging +logger = logging.getLogger(__name__) + +from lofar.qa.service.qa_service import QAService +from lofar.qa.service.QABusListener import * +from lofar.qa.hdf5_io import * +from lofar.messaging.messagebus import ToBus +from lofar.messaging.messages import EventMessage +from lofar.sas.otdb.config import DEFAULT_OTDB_NOTIFICATION_SUBJECT + + +# the tests below test is multi threaded (even multi process) +# define a QABusListener-derivative to handle synchronization (set the *_events) +class SynchronizingQABusListener(QABusListener): + ''' + the tests below test is multi threaded (even multi process) + this QABusListener-derivative handles synchronization (set the *_events) + and stores the msg_content results for expected result checking + ''' + def __init__(self, busname): + super(SynchronizingQABusListener, self).__init__(busname=busname) + self.converted_event = Event() + self.clustered_event = Event() + self.plotted_event = Event() + self.finished_event = Event() + self.error_event = Event() + + def onConvertedMS2Hdf5(self, msg_content): + self.converted_msg_content = msg_content + self.converted_event.set() + + def onCreatedInspectionPlots(self, msg_content): + self.plotted_msg_content = msg_content + self.plotted_event.set() + + def onFinished(self, msg_content): + self.finished_msg_content = msg_content + self.finished_event.set() + + def onClustered(self, msg_content): + self.clustered_msg_content = msg_content + self.clustered_event.set() + + def onError(self, msg_content): + self.error_msg_content = msg_content + self.error_event.set() + + +class TestQAService(unittest.TestCase): + ''' + Tests for the QAService class + ''' + def setUp(self): + ''' + quite complicated setup to setup test qpid exhanges + and mock away ssh calls to cep4 + and mock away dockerized commands + ''' + # setup broker connection + self.connection = Connection.establish('127.0.0.1') + self.broker = BrokerAgent(self.connection) + + # add test service exchange + self.TEST_UUID = uuid.uuid1() + self.busname = 'test-lofarbus-%s' % (self.TEST_UUID) + self.broker.addExchange('topic', self.busname) + + self.TEST_OTDB_ID = 999999 + + # where to store the test results + self.TEST_DIR = '/tmp/qa_service_%s' % self.TEST_UUID + self.TEST_H5_FILE = 'L%s.MS_extract.h5' % (self.TEST_OTDB_ID,) + self.TEST_H5_PATH = os.path.join(self.TEST_DIR, 'ms_extract', self.TEST_H5_FILE) + + # mock the calls to ssh cep4 and docker + def mocked_wrap_command_for_docker(cmd, image_name=None, image_label=None): + logger.info('mocked_wrap_command_for_docker returning original command: %s', ' '.join(cmd)) + return cmd + + def mocked_wrap_command_in_cep4_head_node_ssh_call(cmd): + logger.info('mocked_wrap_command_in_cep4_head_node_ssh_call returning original command: %s', ' '.join(cmd)) + return cmd + + def mocked_wrap_command_in_cep4_cpu_node_ssh_call(cmd, cpu_node_nr, via_head): + logger.info('mocked_wrap_command_in_cep4_cpu_node_ssh_call for cpu node nr %s via head=%s ' \ + 'returning original command: %s', cpu_node_nr, via_head, ' '.join(cmd)) + return cmd + + def mocked_get_cep4_available_cpu_nodes(): + logger.info('mocked_get_cep4_available_cpu_nodes for returning empty node list') + return [] + + # we need to patch the wrap_command_in_cep4_head_node_ssh_call function from module lofar.qa.service.qa_service, + # because that's were it's imported and used. + # (and not the original lofar.common.cep4_utils.wrap_command_for_docker) + wrap_command_for_docker_patcher = mock.patch('lofar.qa.service.qa_service.wrap_command_for_docker') + self.addCleanup(wrap_command_for_docker_patcher.stop) + self.wrap_command_for_docker_mock = wrap_command_for_docker_patcher.start() + self.wrap_command_for_docker_mock.side_effect = mocked_wrap_command_for_docker + + wrap_command_in_cep4_head_node_ssh_call_patcher = mock.patch('lofar.qa.service.qa_service.wrap_command_in_cep4_head_node_ssh_call') + self.addCleanup(wrap_command_in_cep4_head_node_ssh_call_patcher.stop) + self.wrap_command_in_cep4_head_node_ssh_call_mock = wrap_command_in_cep4_head_node_ssh_call_patcher.start() + self.wrap_command_in_cep4_head_node_ssh_call_mock.side_effect = mocked_wrap_command_in_cep4_head_node_ssh_call + + wrap_command_in_cep4_cpu_node_ssh_call_patcher = mock.patch('lofar.common.cep4_utils.wrap_command_in_cep4_cpu_node_ssh_call') + self.addCleanup(wrap_command_in_cep4_cpu_node_ssh_call_patcher.stop) + self.wrap_command_in_cep4_cpu_node_ssh_call_mock = wrap_command_in_cep4_cpu_node_ssh_call_patcher.start() + self.wrap_command_in_cep4_cpu_node_ssh_call_mock.side_effect = mocked_wrap_command_in_cep4_cpu_node_ssh_call + + get_cep4_available_cpu_nodes_patcher = mock.patch('lofar.common.cep4_utils.get_cep4_available_cpu_nodes') + self.addCleanup(get_cep4_available_cpu_nodes_patcher.stop) + self.get_cep4_available_cpu_nodes_mock = get_cep4_available_cpu_nodes_patcher.start() + self.get_cep4_available_cpu_nodes_mock.side_effect = mocked_get_cep4_available_cpu_nodes + + # mock the ssh_cmd_list function, and check in each test if it was NOT called, + # because that is what we are trying to prevent by mocking the other methods. + # So, in principle it should not be needed to mock it, + # but when there is some error in the code/test/mock we would like to prevent + # an accidental ssh call to cep4 + ssh_cmd_list_patcher = mock.patch('lofar.common.cep4_utils.ssh_cmd_list') + self.addCleanup(ssh_cmd_list_patcher.stop) + self.ssh_cmd_list_mock = ssh_cmd_list_patcher.start() + + def tearDown(self): + logger.info('removing test dir: %s', self.TEST_DIR) + shutil.rmtree(self.TEST_DIR, ignore_errors=True) + + # cleanup test bus and exit + if self.broker: + logger.info('removing test bus: %s', self.busname) + self.broker.delExchange(self.busname) + if self.connection: + self.connection.close() + + def send_otdb_task_completing_event(self): + '''helper method: create a ToBus and send a completing EventMessage''' + with ToBus(self.busname) as sender: + msg = EventMessage(context=DEFAULT_OTDB_NOTIFICATION_SUBJECT, + content={"treeID": self.TEST_OTDB_ID, + "state": 'completing', + "time_of_change": datetime.utcnow()}) + sender.send(msg) + + def test_01_qa_service_for_expected_behaviour(self): + ''' + This test starts a QAService, triggers a test observation completing event, + and tests if the generated h5 file and plots are as expected. + It is an end-to-end test which does not check the intermediate results. It is assumed that + the intermediate steps are tested in other tests/modules. + ''' + + logger.info(' -- test_01_qa_service_for_expected_behaviour -- ') + + # override the mock behaviour from setUp for this specific test + def mocked_wrap_command_for_docker(cmd, image_name=None, image_label=None): + # replace the ms2hdf5 command which runs normally in the docker container + # by a call to the create_test_hypercube which fakes the ms2hdf5 conversion for this test. + if 'ms2hdf5' in cmd: + # the create_test_hypercube executable should be available in the PATH environment + create_test_hypercube_path = 'create_test_hypercube' + + mocked_cmd = [create_test_hypercube_path, '-s 4', '-S 8', '-t 16', + '-o', str(self.TEST_OTDB_ID), self.TEST_H5_PATH] + logger.info('''mocked_wrap_command_for_docker returning mocked command to create test h5 file: '%s', instead of original command: '%s' ''', + ' '.join(mocked_cmd), ' '.join(cmd)) + return mocked_cmd + + if 'cluster_this.py' in cmd: + # replace the cluster command which runs normally in the docker container + # by a call to bash true, so the 'cluster_this' call returns 0 exit code + mocked_cmd = ['true'] + logger.info('''mocked_wrap_command_for_docker returning mocked command: '%s', instead of original command: '%s' ''', + ' '.join(mocked_cmd), ' '.join(cmd)) + return mocked_cmd + + logger.info('''mocked_wrap_command_for_docker returning original command: '%s' ''', ' '.join(cmd)) + return cmd + + self.wrap_command_for_docker_mock.side_effect = mocked_wrap_command_for_docker + + # start the QAService (the object under test) + with QAService(qa_notification_busname=self.busname, + otdb_notification_busname=self.busname, + qa_base_dir=self.TEST_DIR): + + # start listening for QA event messages from the QAService + with SynchronizingQABusListener(self.busname) as qa_listener: + # trigger a qa process by sending otdb task completing event + # this will result in the QAService actually doing its magic + self.send_otdb_task_completing_event() + + # start waiting until ConvertedMS2Hdf5 event message received (or timeout) + qa_listener.converted_event.wait(30) + + # ConvertedMS2Hdf5 event message should have been sent, so converted_event should have been set + self.assertTrue(qa_listener.converted_event.is_set()) + + # check the converted_msg_content + self.assertTrue('otdb_id' in qa_listener.converted_msg_content) + self.assertTrue('hdf5_file_path' in qa_listener.converted_msg_content) + + + # start waiting until Clustered event message received (or timeout) + qa_listener.clustered_event.wait(30) + + + # Clustered event message should have been sent, so clustered_event should have been set + self.assertTrue(qa_listener.clustered_event.is_set()) + + # check the clustered_msg_content + self.assertTrue('otdb_id' in qa_listener.clustered_msg_content) + self.assertTrue('hdf5_file_path' in qa_listener.clustered_msg_content) + + + # start waiting until CreatedInspectionPlots event message received (or timeout) + qa_listener.plotted_event.wait(30) + + # CreatedInspectionPlots event message should have been sent, so plotted_event should have been set + self.assertTrue(qa_listener.plotted_event.is_set()) + + # check the plotted_msg_content + self.assertTrue('otdb_id' in qa_listener.plotted_msg_content) + self.assertTrue('hdf5_file_path' in qa_listener.plotted_msg_content) + self.assertTrue('plot_dir_path' in qa_listener.plotted_msg_content) + + # check if the output dirs/files exist + self.assertTrue(os.path.exists(qa_listener.plotted_msg_content['hdf5_file_path'])) + logger.info(qa_listener.plotted_msg_content['plot_dir_path']) + self.assertTrue(os.path.exists(qa_listener.plotted_msg_content['plot_dir_path'])) + plot_file_names = [f for f in os.listdir(qa_listener.plotted_msg_content['plot_dir_path']) + if f.endswith('png')] + self.assertEqual(10, len(plot_file_names)) + + auto_correlation_plot_file_names = [f for f in plot_file_names + if 'auto' in f] + self.assertEqual(4, len(auto_correlation_plot_file_names)) + + complex_plot_file_names = [f for f in plot_file_names + if 'complex' in f] + self.assertEqual(6, len(complex_plot_file_names)) + + # start waiting until QAFinished event message received (or timeout) + qa_listener.finished_event.wait(30) + + # QAFinished event message should have been sent, so finished_event should have been set + self.assertTrue(qa_listener.finished_event.is_set()) + + # check the result_msg_content + self.assertTrue('otdb_id' in qa_listener.finished_msg_content) + self.assertTrue('hdf5_file_path' in qa_listener.finished_msg_content) + self.assertTrue('plot_dir_path' in qa_listener.finished_msg_content) + + self.wrap_command_for_docker_mock.assert_called() + self.wrap_command_in_cep4_cpu_node_ssh_call_mock.assert_called() + self.wrap_command_in_cep4_head_node_ssh_call_mock.assert_called() + self.get_cep4_available_cpu_nodes_mock.assert_called() + self.ssh_cmd_list_mock.assert_not_called() + + def test_02_qa_service_for_error_in_ms2hdf5(self): + ''' + This test starts a QAService, triggers a test observation completing event, + and tests if the conversion from MS to hdf5 fails (by intention). + It is an end-to-end test which does not check the intermediate results. It is assumed that + the intermediate steps are tested in other tests/modules. + ''' + + logger.info(' -- test_02_qa_service_for_error_in_ms2hdf5 -- ') + + def mocked_wrap_command_for_docker(cmd, image_name=None, image_label=None): + if 'ms2hdf5' in cmd: + # replace the ms2hdf5 command which runs normally in the docker container + # by a call to bash false, so the 'ms2hdf5' call returns non-0 exit code + mocked_cmd = ['false'] + logger.info('mocked_wrap_command_for_docker returning mocked erroneous command: %s', mocked_cmd) + return mocked_cmd + + logger.info('mocked_wrap_command_for_docker returning original command: %s', cmd) + return cmd + + self.wrap_command_for_docker_mock.side_effect = mocked_wrap_command_for_docker + + # start the QAService (the object under test) + with QAService(qa_notification_busname=self.busname, + otdb_notification_busname=self.busname, + qa_base_dir=self.TEST_DIR): + # start listening for QA event messages from the QAService + with SynchronizingQABusListener(self.busname) as qa_listener: + # trigger a qa process by sending otdb task completing event + # this will result in the QAService actually doing its magic + self.send_otdb_task_completing_event() + + # start waiting until QAFinished event message received (or timeout) + qa_listener.error_event.wait(30) + + # ------------ + # Error event message should have been sent, so error_event should have been set + self.assertTrue(qa_listener.error_event.is_set()) + + self.assertTrue('otdb_id' in qa_listener.error_msg_content) + self.assertTrue('message' in qa_listener.error_msg_content) + + self.wrap_command_for_docker_mock.assert_called() + self.wrap_command_in_cep4_cpu_node_ssh_call_mock.assert_called() + self.get_cep4_available_cpu_nodes_mock.assert_called() + self.ssh_cmd_list_mock.assert_not_called() + + def test_03_qa_service_for_error_in_creating_plots(self): + ''' + This test starts a QAService, triggers a test observation completing event, + and tests if the conversion from MS to hdf5 works, + but the plot generation fails (by intention). + It is an end-to-end test which does not check the intermediate results. It is assumed that + the intermediate steps are tested in other tests/modules. + ''' + + logger.info(' -- test_03_qa_service_for_error_in_creating_plots -- ') + + # mock the calls to ssh cep4 and docker + def mocked_wrap_command_for_docker(cmd, image_name=None, image_label=None): + if 'ms2hdf5' in cmd: + # replace the ms2hdf5 command which runs normally in the docker container + # by a call to the create_test_hypercube which fakes the ms2hdf5 conversion for this test. + create_test_hypercube_path = os.path.normpath(os.path.join(os.getcwd(), '../../../bin/create_test_hypercube')) + mocked_cmd = [create_test_hypercube_path, '-s 4', '-S 8', '-t 16', + '-o', str(self.TEST_OTDB_ID), self.TEST_H5_PATH] + logger.info('mocked_wrap_command_for_docker returning mocked command to create test h5 file: %s', + ' '.join(mocked_cmd)) + return mocked_cmd + + if 'cluster_this.py' in cmd: + # replace the cluster command which runs normally in the docker container + # by a call to bash true, so the 'cluster_this' call returns 0 exit code + mocked_cmd = ['true'] + logger.info('mocked_wrap_command_for_docker returning mocked command: %s', mocked_cmd) + return mocked_cmd + + + if 'plot_hdf5_dynamic_spectra' in cmd: + # replace the ms2hdf5 command which runs normally in the docker container + # by a call to bash false, so the 'ms2hdf5' call returns non-0 exit code + mocked_cmd = ['false'] + logger.info('mocked_wrap_command_for_docker returning mocked erroneous command: %s', mocked_cmd) + return mocked_cmd + + logger.info('mocked_wrap_command_for_docker returning original command: %s', ' '.join(cmd)) + return cmd + + self.wrap_command_for_docker_mock.side_effect = mocked_wrap_command_for_docker + + # start the QAService (the object under test) + with QAService(qa_notification_busname=self.busname, + otdb_notification_busname=self.busname, + qa_base_dir=self.TEST_DIR): + # start listening for QA event messages from the QAService + with SynchronizingQABusListener(self.busname) as qa_listener: + # trigger a qa process by sending otdb task completing event + # this will result in the QAService actually doing its magic + self.send_otdb_task_completing_event() + + # start waiting until ConvertedMS2Hdf5 event message received (or timeout) + qa_listener.converted_event.wait(30) + + # ConvertedMS2Hdf5 event message should have been sent, so converted_event should have been set + self.assertTrue(qa_listener.converted_event.is_set()) + + # check the result_msg_content + self.assertTrue('otdb_id' in qa_listener.converted_msg_content) + self.assertTrue('hdf5_file_path' in qa_listener.converted_msg_content) + + # start waiting until Error event message received (or timeout) + qa_listener.error_event.wait(30) + + # Error event message should have been sent, so error_event should have been set + self.assertTrue(qa_listener.error_event.is_set()) + + # check the result_msg_content + self.assertTrue('otdb_id' in qa_listener.error_msg_content) + self.assertTrue('message' in qa_listener.error_msg_content) + + self.wrap_command_for_docker_mock.assert_called() + self.wrap_command_in_cep4_cpu_node_ssh_call_mock.assert_called() + self.get_cep4_available_cpu_nodes_mock.assert_called() + self.ssh_cmd_list_mock.assert_not_called() + + def test_04_qa_service_for_error_ssh(self): + ''' + This test starts a QAService, triggers a test observation completing event, + and tests if conversion fails due to an intentionally failing (mocked) ssh call. + It is an end-to-end test which does not check the intermediate results. It is assumed that + the intermediate steps are tested in other tests/modules. + ''' + + logger.info(' -- test_04_qa_service_for_error_ssh -- ') + + def mocked_wrap_command_in_cep4_cpu_node_ssh_call(cmd, cpu_node_nr, via_head): + logger.info('mocked_wrap_command_in_cep4_cpu_node_ssh_call for cpu node nr %s via head=%s ' \ + 'returning call to bash false', cpu_node_nr, via_head) + return ['false', ';'] + + self.wrap_command_in_cep4_cpu_node_ssh_call_mock.side_effect = mocked_wrap_command_in_cep4_cpu_node_ssh_call + + # start the QAService (the object under test) + with QAService(qa_notification_busname=self.busname, + otdb_notification_busname=self.busname, + qa_base_dir=self.TEST_DIR): + # start listening for QA event messages from the QAService + with SynchronizingQABusListener(self.busname) as qa_listener: + # trigger a qa process by sending otdb task completing event + # this will result in the QAService actually doing its magic + self.send_otdb_task_completing_event() + + # start waiting until Error event message received (or timeout) + qa_listener.error_event.wait(30) + + # Error event message should have been sent, so error_event should have been set + self.assertTrue(qa_listener.error_event.is_set()) + + # check the result_msg_content + self.assertTrue('otdb_id' in qa_listener.error_msg_content) + self.assertTrue('message' in qa_listener.error_msg_content) + + self.wrap_command_for_docker_mock.assert_called() + self.wrap_command_in_cep4_cpu_node_ssh_call_mock.assert_called() + self.get_cep4_available_cpu_nodes_mock.assert_called() + self.ssh_cmd_list_mock.assert_not_called() + + +if __name__ == '__main__': + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) + + try: + Connection.establish('127.0.0.1') + except ConnectError: + logger.warning("cannot connect to qpid broker. skipping test...") + exit(3) + + #run the unit tests + unittest.main(defaultTest='TestQAService.test_01_qa_service_for_expected_behaviour') diff --git a/QA/QA_Service/test/t_qa_service.run b/QA/QA_Service/test/t_qa_service.run new file mode 100755 index 0000000000000000000000000000000000000000..d7f90bc01c6ee65646e2fe5bded22f08ae6e235d --- /dev/null +++ b/QA/QA_Service/test/t_qa_service.run @@ -0,0 +1,23 @@ +#!/bin/bash + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# Run the unit test +source python-coverage.sh +python_coverage_test "*QA*" t_qa_service.py + diff --git a/QA/QA_Service/test/t_qa_service.sh b/QA/QA_Service/test/t_qa_service.sh new file mode 100755 index 0000000000000000000000000000000000000000..be3cafb42ba4503ee5852669cb32c238c87f46af --- /dev/null +++ b/QA/QA_Service/test/t_qa_service.sh @@ -0,0 +1,20 @@ +#!/bin/sh + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +./runctest.sh t_qa_service diff --git a/RTCP/Cobalt/CoInterface/src/LTAFeedback.cc b/RTCP/Cobalt/CoInterface/src/LTAFeedback.cc index 490033af24371859e9312c1df2fce31d1dfc5600..b0e3845b4132fb994d9a9c9f4692a62ccf3b50f8 100644 --- a/RTCP/Cobalt/CoInterface/src/LTAFeedback.cc +++ b/RTCP/Cobalt/CoInterface/src/LTAFeedback.cc @@ -34,6 +34,14 @@ namespace LOFAR { namespace Cobalt { + // LOFAR-RELEASE 3.1.0 + // See also CEP/pipeline/framework/lofarpipe/support/feedback.py + int LTAFeedback::major_version(3); + int LTAFeedback::minor_version(1); + int LTAFeedback::patch_number(0); + std::string LTAFeedback::feedback_version(boost::str(boost::format("%02d.%02d.%02d") % + major_version % minor_version % patch_number)); + LTAFeedback::LTAFeedback(const ObservationSettings &settings): settings(settings) { @@ -62,6 +70,8 @@ namespace LOFAR ps.add(prefix + "filename", f.location.filename); ps.add(prefix + "size", "0"); ps.add(prefix + "location", locationHost + ":" + f.location.directory); + ps.add(prefix + "storageWriter", "LOFAR"); + ps.add(prefix + "storageWriterVersion", str(format("%d") % LofarStManVersion)); ps.add(prefix + "percentageWritten", "0"); ps.add(prefix + "startTime", TimeDouble::toString(settings.startTime, false)); @@ -128,6 +138,8 @@ namespace LOFAR ps.add(prefix + "location", locationHost + ":" + f.location.directory); ps.add(prefix + "percentageWritten", "0"); ps.add(prefix + "beamTypes", "[]"); + ps.add(prefix + "storageWriter", "HDF5DEFAULT"); + ps.add(prefix + "storageWriterVersion", "UNKNOWN"); const string type = settings.beamFormer.doFlysEye ? "FlysEyeBeam" : @@ -195,6 +207,9 @@ namespace LOFAR { ParameterSet ps; + // for MoM, to discriminate between older and newer feedback + ps.add("feedback_version", feedback_version); // e.g. 03.01.00 + // for MoM, to discriminate between Cobalt and BG/P observations ps.add("_isCobalt", "T"); diff --git a/RTCP/Cobalt/CoInterface/src/LTAFeedback.h b/RTCP/Cobalt/CoInterface/src/LTAFeedback.h index e8be4dd8d389581c12d45bc4eb991ddd247f6ab6..8005e83cc86d1b6385738ec0910c77fc2cd95787 100644 --- a/RTCP/Cobalt/CoInterface/src/LTAFeedback.h +++ b/RTCP/Cobalt/CoInterface/src/LTAFeedback.h @@ -59,6 +59,10 @@ namespace LOFAR // Package (sip) for the LTA. // \see http://proposal.astron.nl/schemas/LTA-SIP.xsd ParameterSet allFeedback() const; + static int major_version; + static int minor_version; + static int patch_number; + static std::string feedback_version; private: const ObservationSettings settings; diff --git a/RTCP/Cobalt/CoInterface/src/OutputTypes.h b/RTCP/Cobalt/CoInterface/src/OutputTypes.h index 032701e3eacc079f679169d9ab3d7e6a76f89fc1..dee5ca0e70fc2587643394e68a9391a773e1b6aa 100644 --- a/RTCP/Cobalt/CoInterface/src/OutputTypes.h +++ b/RTCP/Cobalt/CoInterface/src/OutputTypes.h @@ -22,6 +22,8 @@ #ifndef LOFAR_RTCP_INTERFACE_OUTPUT_TYPES_H #define LOFAR_RTCP_INTERFACE_OUTPUT_TYPES_H +static const unsigned LofarStManVersion = 3; + namespace LOFAR { namespace Cobalt diff --git a/RTCP/Cobalt/CoInterface/src/Parset.cc b/RTCP/Cobalt/CoInterface/src/Parset.cc index fcc317a4089037c99197330866741d64de50399f..35ae49eb9d63fb0c4e9dcf56cc5e7a6e72f1b9e5 100644 --- a/RTCP/Cobalt/CoInterface/src/Parset.cc +++ b/RTCP/Cobalt/CoInterface/src/Parset.cc @@ -618,18 +618,6 @@ namespace LOFAR if (settings.beamFormer.enabled) { // Parse global settings - - // 4096 channels is enough, but allow parset override. - if (!isDefined("Cobalt.BeamFormer.nrHighResolutionChannels")) { - settings.beamFormer.nrHighResolutionChannels = 4096; - } else { - settings.beamFormer.nrHighResolutionChannels = - getUint32("Cobalt.BeamFormer.nrHighResolutionChannels"); - ASSERTSTR(powerOfTwo(settings.beamFormer.nrHighResolutionChannels) && - settings.beamFormer.nrHighResolutionChannels < 65536, - "Parset: Cobalt.BeamFormer.nrHighResolutionChannels must be a power of 2 and < 64k"); - } - settings.beamFormer.doFlysEye = getBool("Cobalt.BeamFormer.flysEye", false); unsigned nrDelayCompCh; @@ -638,9 +626,6 @@ namespace LOFAR } else { nrDelayCompCh = getUint32("Cobalt.BeamFormer.nrDelayCompensationChannels"); } - if (nrDelayCompCh > settings.beamFormer.nrHighResolutionChannels) { - nrDelayCompCh = settings.beamFormer.nrHighResolutionChannels; - } settings.beamFormer.nrDelayCompensationChannels = nrDelayCompCh; // Derive antennaFields to use for beam forming @@ -719,6 +704,12 @@ namespace LOFAR size_t incoherent_idx = 0; // Parse all TABs + // + // Strategy: + // * If Fly's Eye is enabled, we have 1 TAB/station. Do NOT process any other TABs. + // * If Fly's Eye is disabled, process TABs in this order: + // 1. Manually specified TABs (Observation.Beam[x].TiedArrayBeam[y] + // 2. TAB rings (Observation.Beam[x].nrTabRings) settings.beamFormer.SAPs.resize(nrSAPs); for (unsigned i = 0; i < nrSAPs; ++i) @@ -735,12 +726,22 @@ namespace LOFAR size_t nrRings = getUint32(str(format("Observation.Beam[%u].nrTabRings") % i), 0); double ringWidth = getDouble(str(format("Observation.Beam[%u].tabRingSize") % i), 0.0); + // Throw if we encounter an unsupported configuration + if (settings.beamFormer.doFlysEye) { + if (nrRings > 0 || nrTABSParset > 0) { + THROW(CoInterfaceException, "Cannot produce (in)coherent TABs in Fly's Eye mode. Error detected in SAP " << i << "."); + } + } + // Create a ptr to RingCoordinates object // If there are tab rings the object will be actuall constructed // The actual tabs will be extracted after we added all manual tabs // But we need the number of tabs from rings at this location std::auto_ptr<RingCoordinates> ptrRingCoords; - if (nrRings > 0) { + if (settings.beamFormer.doFlysEye) { + // For Fly's Eye mode we have exactly one TAB per antenna field. + nrTABs = settings.antennaFields.size(); + } else if (nrRings > 0) { const string prefix = str(format("Observation.Beam[%u]") % i); string directionType = getString(prefix + ".directionType", "J2000"); @@ -761,9 +762,6 @@ namespace LOFAR // Increase the amount of tabs with the number from the coords object // this might be zero nrTABs = nrTABSParset + ptrRingCoords->nCoordinates(); - } else if (settings.beamFormer.doFlysEye) { - // For Fly's Eye mode we have exactly one TAB per antenna field. - nrTABs = settings.antennaFields.size(); } sap.TABs.resize(nrTABs); @@ -773,6 +771,7 @@ namespace LOFAR // Add flys eye tabs if (settings.beamFormer.doFlysEye) { + // Copy direction from SAP const string prefix = str(format("Observation.Beam[%u]") % i); tab.direction.type = getString(prefix + ".directionType", "J2000"); diff --git a/RTCP/Cobalt/CoInterface/src/Parset.h b/RTCP/Cobalt/CoInterface/src/Parset.h index fea46954dc81e08c60beadd923330358b4778e82..4f9f67e11334f80c9bc8903db26d01aa287782be 100644 --- a/RTCP/Cobalt/CoInterface/src/Parset.h +++ b/RTCP/Cobalt/CoInterface/src/Parset.h @@ -589,11 +589,6 @@ namespace LOFAR // Equal to the size of the first FFT. Power of two. unsigned nrDelayCompensationChannels; - // Number of channels per subband for bandpass correction, narrow band - // flagging, beamforming, and coherent dedispersion. - // Power of two and at least nrDelayCompensationChannels. - unsigned nrHighResolutionChannels; - // Are we in fly's eye mode? bool doFlysEye; diff --git a/RTCP/Cobalt/CoInterface/src/SparseSet.h b/RTCP/Cobalt/CoInterface/src/SparseSet.h index a4b32468a9cdac9ec1cf5875343f24fb6c88971e..b57435bf271ba54622163cd65f02c5f21ff5fb1b 100644 --- a/RTCP/Cobalt/CoInterface/src/SparseSet.h +++ b/RTCP/Cobalt/CoInterface/src/SparseSet.h @@ -128,6 +128,9 @@ namespace LOFAR // Read the SparseSet from *ptr. void unmarshall(const void *ptr); + // Write the flags to a set of bytes (set each byte to 1 if included, 0 otherwise) + void toByteset(char *ptr, size_t maxSize) const; + private: Ranges ranges; @@ -494,6 +497,17 @@ namespace LOFAR } + template <typename T> + void SparseSet<T>::toByteset(char *ptr, size_t maxSize) const + { + memset(ptr, 0, maxSize); + + for (const_iterator it = ranges.begin(); it != ranges.end(); it++) { + memset(ptr + it->begin, 1, it->end - it->begin); + } + } + + template <typename T> std::ostream &operator << (std::ostream &str, const SparseSet<T> &set) { diff --git a/RTCP/Cobalt/CoInterface/src/SubbandMetaData.h b/RTCP/Cobalt/CoInterface/src/SubbandMetaData.h index c50996d90d510660c8dfdda23979d7663484676b..a22286dbff27e49d590b3edaf2fbe11f736bae57 100644 --- a/RTCP/Cobalt/CoInterface/src/SubbandMetaData.h +++ b/RTCP/Cobalt/CoInterface/src/SubbandMetaData.h @@ -54,8 +54,11 @@ namespace LOFAR void read(Stream *str); void write(Stream *str) const; + // Maximum number of flags ranges to marshall + static const size_t MAXNRFLAGRANGES = 512; + // Maximum size of the buffer to marshall flags - static const size_t MAXFLAGSIZE = 8192 + 4; + static const size_t MAXFLAGSIZE = MAXNRFLAGRANGES * 2 * sizeof(unsigned) + 4; // Maximum number of TABs we'll support when marshalling static const size_t MAXNRTABS = 512; diff --git a/RTCP/Cobalt/CoInterface/test/tLTAFeedback.in_reference/Observation220133_feedback b/RTCP/Cobalt/CoInterface/test/tLTAFeedback.in_reference/Observation220133_feedback index 6cd100e18986c869b85cecfbf7261432eae81007..c5fea8f366b22302f433e9e9184cc4cd7b69643f 100644 --- a/RTCP/Cobalt/CoInterface/test/tLTAFeedback.in_reference/Observation220133_feedback +++ b/RTCP/Cobalt/CoInterface/test/tLTAFeedback.in_reference/Observation220133_feedback @@ -14,6 +14,8 @@ Observation.DataProducts.Output_Correlated_[0].percentageWritten=0 Observation.DataProducts.Output_Correlated_[0].size=0 Observation.DataProducts.Output_Correlated_[0].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[0].stationSubband=156 +Observation.DataProducts.Output_Correlated_[0].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[0].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[0].subband=0 Observation.DataProducts.Output_Correlated_[100].SAP=0 Observation.DataProducts.Output_Correlated_[100].centralFrequency=50000000.000000 @@ -28,6 +30,8 @@ Observation.DataProducts.Output_Correlated_[100].percentageWritten=0 Observation.DataProducts.Output_Correlated_[100].size=0 Observation.DataProducts.Output_Correlated_[100].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[100].stationSubband=256 +Observation.DataProducts.Output_Correlated_[100].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[100].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[100].subband=100 Observation.DataProducts.Output_Correlated_[101].SAP=0 Observation.DataProducts.Output_Correlated_[101].centralFrequency=50195312.500000 @@ -42,6 +46,8 @@ Observation.DataProducts.Output_Correlated_[101].percentageWritten=0 Observation.DataProducts.Output_Correlated_[101].size=0 Observation.DataProducts.Output_Correlated_[101].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[101].stationSubband=257 +Observation.DataProducts.Output_Correlated_[101].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[101].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[101].subband=101 Observation.DataProducts.Output_Correlated_[102].SAP=0 Observation.DataProducts.Output_Correlated_[102].centralFrequency=50390625.000000 @@ -56,6 +62,8 @@ Observation.DataProducts.Output_Correlated_[102].percentageWritten=0 Observation.DataProducts.Output_Correlated_[102].size=0 Observation.DataProducts.Output_Correlated_[102].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[102].stationSubband=258 +Observation.DataProducts.Output_Correlated_[102].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[102].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[102].subband=102 Observation.DataProducts.Output_Correlated_[103].SAP=0 Observation.DataProducts.Output_Correlated_[103].centralFrequency=50585937.500000 @@ -70,6 +78,8 @@ Observation.DataProducts.Output_Correlated_[103].percentageWritten=0 Observation.DataProducts.Output_Correlated_[103].size=0 Observation.DataProducts.Output_Correlated_[103].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[103].stationSubband=259 +Observation.DataProducts.Output_Correlated_[103].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[103].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[103].subband=103 Observation.DataProducts.Output_Correlated_[104].SAP=0 Observation.DataProducts.Output_Correlated_[104].centralFrequency=50781250.000000 @@ -84,6 +94,8 @@ Observation.DataProducts.Output_Correlated_[104].percentageWritten=0 Observation.DataProducts.Output_Correlated_[104].size=0 Observation.DataProducts.Output_Correlated_[104].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[104].stationSubband=260 +Observation.DataProducts.Output_Correlated_[104].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[104].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[104].subband=104 Observation.DataProducts.Output_Correlated_[105].SAP=0 Observation.DataProducts.Output_Correlated_[105].centralFrequency=50976562.500000 @@ -98,6 +110,8 @@ Observation.DataProducts.Output_Correlated_[105].percentageWritten=0 Observation.DataProducts.Output_Correlated_[105].size=0 Observation.DataProducts.Output_Correlated_[105].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[105].stationSubband=261 +Observation.DataProducts.Output_Correlated_[105].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[105].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[105].subband=105 Observation.DataProducts.Output_Correlated_[106].SAP=0 Observation.DataProducts.Output_Correlated_[106].centralFrequency=51171875.000000 @@ -112,6 +126,8 @@ Observation.DataProducts.Output_Correlated_[106].percentageWritten=0 Observation.DataProducts.Output_Correlated_[106].size=0 Observation.DataProducts.Output_Correlated_[106].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[106].stationSubband=262 +Observation.DataProducts.Output_Correlated_[106].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[106].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[106].subband=106 Observation.DataProducts.Output_Correlated_[107].SAP=0 Observation.DataProducts.Output_Correlated_[107].centralFrequency=51367187.500000 @@ -126,6 +142,8 @@ Observation.DataProducts.Output_Correlated_[107].percentageWritten=0 Observation.DataProducts.Output_Correlated_[107].size=0 Observation.DataProducts.Output_Correlated_[107].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[107].stationSubband=263 +Observation.DataProducts.Output_Correlated_[107].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[107].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[107].subband=107 Observation.DataProducts.Output_Correlated_[108].SAP=0 Observation.DataProducts.Output_Correlated_[108].centralFrequency=51562500.000000 @@ -140,6 +158,8 @@ Observation.DataProducts.Output_Correlated_[108].percentageWritten=0 Observation.DataProducts.Output_Correlated_[108].size=0 Observation.DataProducts.Output_Correlated_[108].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[108].stationSubband=264 +Observation.DataProducts.Output_Correlated_[108].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[108].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[108].subband=108 Observation.DataProducts.Output_Correlated_[109].SAP=0 Observation.DataProducts.Output_Correlated_[109].centralFrequency=51757812.500000 @@ -154,6 +174,8 @@ Observation.DataProducts.Output_Correlated_[109].percentageWritten=0 Observation.DataProducts.Output_Correlated_[109].size=0 Observation.DataProducts.Output_Correlated_[109].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[109].stationSubband=265 +Observation.DataProducts.Output_Correlated_[109].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[109].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[109].subband=109 Observation.DataProducts.Output_Correlated_[10].SAP=0 Observation.DataProducts.Output_Correlated_[10].centralFrequency=32421875.000000 @@ -168,6 +190,8 @@ Observation.DataProducts.Output_Correlated_[10].percentageWritten=0 Observation.DataProducts.Output_Correlated_[10].size=0 Observation.DataProducts.Output_Correlated_[10].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[10].stationSubband=166 +Observation.DataProducts.Output_Correlated_[10].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[10].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[10].subband=10 Observation.DataProducts.Output_Correlated_[110].SAP=0 Observation.DataProducts.Output_Correlated_[110].centralFrequency=51953125.000000 @@ -182,6 +206,8 @@ Observation.DataProducts.Output_Correlated_[110].percentageWritten=0 Observation.DataProducts.Output_Correlated_[110].size=0 Observation.DataProducts.Output_Correlated_[110].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[110].stationSubband=266 +Observation.DataProducts.Output_Correlated_[110].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[110].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[110].subband=110 Observation.DataProducts.Output_Correlated_[111].SAP=0 Observation.DataProducts.Output_Correlated_[111].centralFrequency=52148437.500000 @@ -196,6 +222,8 @@ Observation.DataProducts.Output_Correlated_[111].percentageWritten=0 Observation.DataProducts.Output_Correlated_[111].size=0 Observation.DataProducts.Output_Correlated_[111].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[111].stationSubband=267 +Observation.DataProducts.Output_Correlated_[111].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[111].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[111].subband=111 Observation.DataProducts.Output_Correlated_[112].SAP=0 Observation.DataProducts.Output_Correlated_[112].centralFrequency=52343750.000000 @@ -210,6 +238,8 @@ Observation.DataProducts.Output_Correlated_[112].percentageWritten=0 Observation.DataProducts.Output_Correlated_[112].size=0 Observation.DataProducts.Output_Correlated_[112].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[112].stationSubband=268 +Observation.DataProducts.Output_Correlated_[112].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[112].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[112].subband=112 Observation.DataProducts.Output_Correlated_[113].SAP=0 Observation.DataProducts.Output_Correlated_[113].centralFrequency=52539062.500000 @@ -224,6 +254,8 @@ Observation.DataProducts.Output_Correlated_[113].percentageWritten=0 Observation.DataProducts.Output_Correlated_[113].size=0 Observation.DataProducts.Output_Correlated_[113].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[113].stationSubband=269 +Observation.DataProducts.Output_Correlated_[113].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[113].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[113].subband=113 Observation.DataProducts.Output_Correlated_[114].SAP=0 Observation.DataProducts.Output_Correlated_[114].centralFrequency=52734375.000000 @@ -238,6 +270,8 @@ Observation.DataProducts.Output_Correlated_[114].percentageWritten=0 Observation.DataProducts.Output_Correlated_[114].size=0 Observation.DataProducts.Output_Correlated_[114].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[114].stationSubband=270 +Observation.DataProducts.Output_Correlated_[114].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[114].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[114].subband=114 Observation.DataProducts.Output_Correlated_[115].SAP=0 Observation.DataProducts.Output_Correlated_[115].centralFrequency=52929687.500000 @@ -252,6 +286,8 @@ Observation.DataProducts.Output_Correlated_[115].percentageWritten=0 Observation.DataProducts.Output_Correlated_[115].size=0 Observation.DataProducts.Output_Correlated_[115].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[115].stationSubband=271 +Observation.DataProducts.Output_Correlated_[115].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[115].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[115].subband=115 Observation.DataProducts.Output_Correlated_[116].SAP=0 Observation.DataProducts.Output_Correlated_[116].centralFrequency=53125000.000000 @@ -266,6 +302,8 @@ Observation.DataProducts.Output_Correlated_[116].percentageWritten=0 Observation.DataProducts.Output_Correlated_[116].size=0 Observation.DataProducts.Output_Correlated_[116].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[116].stationSubband=272 +Observation.DataProducts.Output_Correlated_[116].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[116].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[116].subband=116 Observation.DataProducts.Output_Correlated_[117].SAP=0 Observation.DataProducts.Output_Correlated_[117].centralFrequency=53320312.500000 @@ -280,6 +318,8 @@ Observation.DataProducts.Output_Correlated_[117].percentageWritten=0 Observation.DataProducts.Output_Correlated_[117].size=0 Observation.DataProducts.Output_Correlated_[117].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[117].stationSubband=273 +Observation.DataProducts.Output_Correlated_[117].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[117].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[117].subband=117 Observation.DataProducts.Output_Correlated_[118].SAP=0 Observation.DataProducts.Output_Correlated_[118].centralFrequency=53515625.000000 @@ -294,6 +334,8 @@ Observation.DataProducts.Output_Correlated_[118].percentageWritten=0 Observation.DataProducts.Output_Correlated_[118].size=0 Observation.DataProducts.Output_Correlated_[118].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[118].stationSubband=274 +Observation.DataProducts.Output_Correlated_[118].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[118].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[118].subband=118 Observation.DataProducts.Output_Correlated_[119].SAP=0 Observation.DataProducts.Output_Correlated_[119].centralFrequency=53710937.500000 @@ -308,6 +350,8 @@ Observation.DataProducts.Output_Correlated_[119].percentageWritten=0 Observation.DataProducts.Output_Correlated_[119].size=0 Observation.DataProducts.Output_Correlated_[119].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[119].stationSubband=275 +Observation.DataProducts.Output_Correlated_[119].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[119].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[119].subband=119 Observation.DataProducts.Output_Correlated_[11].SAP=0 Observation.DataProducts.Output_Correlated_[11].centralFrequency=32617187.500000 @@ -322,6 +366,8 @@ Observation.DataProducts.Output_Correlated_[11].percentageWritten=0 Observation.DataProducts.Output_Correlated_[11].size=0 Observation.DataProducts.Output_Correlated_[11].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[11].stationSubband=167 +Observation.DataProducts.Output_Correlated_[11].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[11].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[11].subband=11 Observation.DataProducts.Output_Correlated_[120].SAP=0 Observation.DataProducts.Output_Correlated_[120].centralFrequency=53906250.000000 @@ -336,6 +382,8 @@ Observation.DataProducts.Output_Correlated_[120].percentageWritten=0 Observation.DataProducts.Output_Correlated_[120].size=0 Observation.DataProducts.Output_Correlated_[120].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[120].stationSubband=276 +Observation.DataProducts.Output_Correlated_[120].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[120].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[120].subband=120 Observation.DataProducts.Output_Correlated_[121].SAP=0 Observation.DataProducts.Output_Correlated_[121].centralFrequency=54101562.500000 @@ -350,6 +398,8 @@ Observation.DataProducts.Output_Correlated_[121].percentageWritten=0 Observation.DataProducts.Output_Correlated_[121].size=0 Observation.DataProducts.Output_Correlated_[121].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[121].stationSubband=277 +Observation.DataProducts.Output_Correlated_[121].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[121].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[121].subband=121 Observation.DataProducts.Output_Correlated_[122].SAP=0 Observation.DataProducts.Output_Correlated_[122].centralFrequency=54296875.000000 @@ -364,6 +414,8 @@ Observation.DataProducts.Output_Correlated_[122].percentageWritten=0 Observation.DataProducts.Output_Correlated_[122].size=0 Observation.DataProducts.Output_Correlated_[122].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[122].stationSubband=278 +Observation.DataProducts.Output_Correlated_[122].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[122].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[122].subband=122 Observation.DataProducts.Output_Correlated_[123].SAP=0 Observation.DataProducts.Output_Correlated_[123].centralFrequency=54492187.500000 @@ -378,6 +430,8 @@ Observation.DataProducts.Output_Correlated_[123].percentageWritten=0 Observation.DataProducts.Output_Correlated_[123].size=0 Observation.DataProducts.Output_Correlated_[123].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[123].stationSubband=279 +Observation.DataProducts.Output_Correlated_[123].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[123].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[123].subband=123 Observation.DataProducts.Output_Correlated_[124].SAP=0 Observation.DataProducts.Output_Correlated_[124].centralFrequency=54687500.000000 @@ -392,6 +446,8 @@ Observation.DataProducts.Output_Correlated_[124].percentageWritten=0 Observation.DataProducts.Output_Correlated_[124].size=0 Observation.DataProducts.Output_Correlated_[124].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[124].stationSubband=280 +Observation.DataProducts.Output_Correlated_[124].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[124].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[124].subband=124 Observation.DataProducts.Output_Correlated_[125].SAP=0 Observation.DataProducts.Output_Correlated_[125].centralFrequency=54882812.500000 @@ -406,6 +462,8 @@ Observation.DataProducts.Output_Correlated_[125].percentageWritten=0 Observation.DataProducts.Output_Correlated_[125].size=0 Observation.DataProducts.Output_Correlated_[125].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[125].stationSubband=281 +Observation.DataProducts.Output_Correlated_[125].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[125].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[125].subband=125 Observation.DataProducts.Output_Correlated_[126].SAP=0 Observation.DataProducts.Output_Correlated_[126].centralFrequency=55078125.000000 @@ -420,6 +478,8 @@ Observation.DataProducts.Output_Correlated_[126].percentageWritten=0 Observation.DataProducts.Output_Correlated_[126].size=0 Observation.DataProducts.Output_Correlated_[126].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[126].stationSubband=282 +Observation.DataProducts.Output_Correlated_[126].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[126].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[126].subband=126 Observation.DataProducts.Output_Correlated_[127].SAP=0 Observation.DataProducts.Output_Correlated_[127].centralFrequency=55273437.500000 @@ -434,6 +494,8 @@ Observation.DataProducts.Output_Correlated_[127].percentageWritten=0 Observation.DataProducts.Output_Correlated_[127].size=0 Observation.DataProducts.Output_Correlated_[127].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[127].stationSubband=283 +Observation.DataProducts.Output_Correlated_[127].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[127].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[127].subband=127 Observation.DataProducts.Output_Correlated_[128].SAP=0 Observation.DataProducts.Output_Correlated_[128].centralFrequency=55468750.000000 @@ -448,6 +510,8 @@ Observation.DataProducts.Output_Correlated_[128].percentageWritten=0 Observation.DataProducts.Output_Correlated_[128].size=0 Observation.DataProducts.Output_Correlated_[128].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[128].stationSubband=284 +Observation.DataProducts.Output_Correlated_[128].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[128].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[128].subband=128 Observation.DataProducts.Output_Correlated_[129].SAP=0 Observation.DataProducts.Output_Correlated_[129].centralFrequency=55664062.500000 @@ -462,6 +526,8 @@ Observation.DataProducts.Output_Correlated_[129].percentageWritten=0 Observation.DataProducts.Output_Correlated_[129].size=0 Observation.DataProducts.Output_Correlated_[129].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[129].stationSubband=285 +Observation.DataProducts.Output_Correlated_[129].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[129].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[129].subband=129 Observation.DataProducts.Output_Correlated_[12].SAP=0 Observation.DataProducts.Output_Correlated_[12].centralFrequency=32812500.000000 @@ -476,6 +542,8 @@ Observation.DataProducts.Output_Correlated_[12].percentageWritten=0 Observation.DataProducts.Output_Correlated_[12].size=0 Observation.DataProducts.Output_Correlated_[12].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[12].stationSubband=168 +Observation.DataProducts.Output_Correlated_[12].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[12].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[12].subband=12 Observation.DataProducts.Output_Correlated_[130].SAP=0 Observation.DataProducts.Output_Correlated_[130].centralFrequency=55859375.000000 @@ -490,6 +558,8 @@ Observation.DataProducts.Output_Correlated_[130].percentageWritten=0 Observation.DataProducts.Output_Correlated_[130].size=0 Observation.DataProducts.Output_Correlated_[130].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[130].stationSubband=286 +Observation.DataProducts.Output_Correlated_[130].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[130].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[130].subband=130 Observation.DataProducts.Output_Correlated_[131].SAP=0 Observation.DataProducts.Output_Correlated_[131].centralFrequency=56054687.500000 @@ -504,6 +574,8 @@ Observation.DataProducts.Output_Correlated_[131].percentageWritten=0 Observation.DataProducts.Output_Correlated_[131].size=0 Observation.DataProducts.Output_Correlated_[131].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[131].stationSubband=287 +Observation.DataProducts.Output_Correlated_[131].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[131].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[131].subband=131 Observation.DataProducts.Output_Correlated_[132].SAP=0 Observation.DataProducts.Output_Correlated_[132].centralFrequency=56250000.000000 @@ -518,6 +590,8 @@ Observation.DataProducts.Output_Correlated_[132].percentageWritten=0 Observation.DataProducts.Output_Correlated_[132].size=0 Observation.DataProducts.Output_Correlated_[132].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[132].stationSubband=288 +Observation.DataProducts.Output_Correlated_[132].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[132].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[132].subband=132 Observation.DataProducts.Output_Correlated_[133].SAP=0 Observation.DataProducts.Output_Correlated_[133].centralFrequency=56445312.500000 @@ -532,6 +606,8 @@ Observation.DataProducts.Output_Correlated_[133].percentageWritten=0 Observation.DataProducts.Output_Correlated_[133].size=0 Observation.DataProducts.Output_Correlated_[133].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[133].stationSubband=289 +Observation.DataProducts.Output_Correlated_[133].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[133].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[133].subband=133 Observation.DataProducts.Output_Correlated_[134].SAP=0 Observation.DataProducts.Output_Correlated_[134].centralFrequency=56640625.000000 @@ -546,6 +622,8 @@ Observation.DataProducts.Output_Correlated_[134].percentageWritten=0 Observation.DataProducts.Output_Correlated_[134].size=0 Observation.DataProducts.Output_Correlated_[134].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[134].stationSubband=290 +Observation.DataProducts.Output_Correlated_[134].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[134].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[134].subband=134 Observation.DataProducts.Output_Correlated_[135].SAP=0 Observation.DataProducts.Output_Correlated_[135].centralFrequency=56835937.500000 @@ -560,6 +638,8 @@ Observation.DataProducts.Output_Correlated_[135].percentageWritten=0 Observation.DataProducts.Output_Correlated_[135].size=0 Observation.DataProducts.Output_Correlated_[135].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[135].stationSubband=291 +Observation.DataProducts.Output_Correlated_[135].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[135].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[135].subband=135 Observation.DataProducts.Output_Correlated_[136].SAP=0 Observation.DataProducts.Output_Correlated_[136].centralFrequency=57031250.000000 @@ -574,6 +654,8 @@ Observation.DataProducts.Output_Correlated_[136].percentageWritten=0 Observation.DataProducts.Output_Correlated_[136].size=0 Observation.DataProducts.Output_Correlated_[136].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[136].stationSubband=292 +Observation.DataProducts.Output_Correlated_[136].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[136].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[136].subband=136 Observation.DataProducts.Output_Correlated_[137].SAP=0 Observation.DataProducts.Output_Correlated_[137].centralFrequency=57226562.500000 @@ -588,6 +670,8 @@ Observation.DataProducts.Output_Correlated_[137].percentageWritten=0 Observation.DataProducts.Output_Correlated_[137].size=0 Observation.DataProducts.Output_Correlated_[137].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[137].stationSubband=293 +Observation.DataProducts.Output_Correlated_[137].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[137].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[137].subband=137 Observation.DataProducts.Output_Correlated_[138].SAP=0 Observation.DataProducts.Output_Correlated_[138].centralFrequency=57421875.000000 @@ -602,6 +686,8 @@ Observation.DataProducts.Output_Correlated_[138].percentageWritten=0 Observation.DataProducts.Output_Correlated_[138].size=0 Observation.DataProducts.Output_Correlated_[138].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[138].stationSubband=294 +Observation.DataProducts.Output_Correlated_[138].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[138].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[138].subband=138 Observation.DataProducts.Output_Correlated_[139].SAP=0 Observation.DataProducts.Output_Correlated_[139].centralFrequency=57617187.500000 @@ -616,6 +702,8 @@ Observation.DataProducts.Output_Correlated_[139].percentageWritten=0 Observation.DataProducts.Output_Correlated_[139].size=0 Observation.DataProducts.Output_Correlated_[139].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[139].stationSubband=295 +Observation.DataProducts.Output_Correlated_[139].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[139].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[139].subband=139 Observation.DataProducts.Output_Correlated_[13].SAP=0 Observation.DataProducts.Output_Correlated_[13].centralFrequency=33007812.500000 @@ -630,6 +718,8 @@ Observation.DataProducts.Output_Correlated_[13].percentageWritten=0 Observation.DataProducts.Output_Correlated_[13].size=0 Observation.DataProducts.Output_Correlated_[13].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[13].stationSubband=169 +Observation.DataProducts.Output_Correlated_[13].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[13].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[13].subband=13 Observation.DataProducts.Output_Correlated_[140].SAP=0 Observation.DataProducts.Output_Correlated_[140].centralFrequency=57812500.000000 @@ -644,6 +734,8 @@ Observation.DataProducts.Output_Correlated_[140].percentageWritten=0 Observation.DataProducts.Output_Correlated_[140].size=0 Observation.DataProducts.Output_Correlated_[140].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[140].stationSubband=296 +Observation.DataProducts.Output_Correlated_[140].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[140].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[140].subband=140 Observation.DataProducts.Output_Correlated_[141].SAP=0 Observation.DataProducts.Output_Correlated_[141].centralFrequency=58007812.500000 @@ -658,6 +750,8 @@ Observation.DataProducts.Output_Correlated_[141].percentageWritten=0 Observation.DataProducts.Output_Correlated_[141].size=0 Observation.DataProducts.Output_Correlated_[141].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[141].stationSubband=297 +Observation.DataProducts.Output_Correlated_[141].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[141].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[141].subband=141 Observation.DataProducts.Output_Correlated_[142].SAP=0 Observation.DataProducts.Output_Correlated_[142].centralFrequency=58203125.000000 @@ -672,6 +766,8 @@ Observation.DataProducts.Output_Correlated_[142].percentageWritten=0 Observation.DataProducts.Output_Correlated_[142].size=0 Observation.DataProducts.Output_Correlated_[142].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[142].stationSubband=298 +Observation.DataProducts.Output_Correlated_[142].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[142].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[142].subband=142 Observation.DataProducts.Output_Correlated_[143].SAP=0 Observation.DataProducts.Output_Correlated_[143].centralFrequency=58398437.500000 @@ -686,6 +782,8 @@ Observation.DataProducts.Output_Correlated_[143].percentageWritten=0 Observation.DataProducts.Output_Correlated_[143].size=0 Observation.DataProducts.Output_Correlated_[143].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[143].stationSubband=299 +Observation.DataProducts.Output_Correlated_[143].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[143].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[143].subband=143 Observation.DataProducts.Output_Correlated_[144].SAP=0 Observation.DataProducts.Output_Correlated_[144].centralFrequency=58593750.000000 @@ -700,6 +798,8 @@ Observation.DataProducts.Output_Correlated_[144].percentageWritten=0 Observation.DataProducts.Output_Correlated_[144].size=0 Observation.DataProducts.Output_Correlated_[144].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[144].stationSubband=300 +Observation.DataProducts.Output_Correlated_[144].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[144].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[144].subband=144 Observation.DataProducts.Output_Correlated_[145].SAP=0 Observation.DataProducts.Output_Correlated_[145].centralFrequency=58789062.500000 @@ -714,6 +814,8 @@ Observation.DataProducts.Output_Correlated_[145].percentageWritten=0 Observation.DataProducts.Output_Correlated_[145].size=0 Observation.DataProducts.Output_Correlated_[145].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[145].stationSubband=301 +Observation.DataProducts.Output_Correlated_[145].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[145].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[145].subband=145 Observation.DataProducts.Output_Correlated_[146].SAP=0 Observation.DataProducts.Output_Correlated_[146].centralFrequency=58984375.000000 @@ -728,6 +830,8 @@ Observation.DataProducts.Output_Correlated_[146].percentageWritten=0 Observation.DataProducts.Output_Correlated_[146].size=0 Observation.DataProducts.Output_Correlated_[146].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[146].stationSubband=302 +Observation.DataProducts.Output_Correlated_[146].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[146].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[146].subband=146 Observation.DataProducts.Output_Correlated_[147].SAP=0 Observation.DataProducts.Output_Correlated_[147].centralFrequency=59179687.500000 @@ -742,6 +846,8 @@ Observation.DataProducts.Output_Correlated_[147].percentageWritten=0 Observation.DataProducts.Output_Correlated_[147].size=0 Observation.DataProducts.Output_Correlated_[147].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[147].stationSubband=303 +Observation.DataProducts.Output_Correlated_[147].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[147].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[147].subband=147 Observation.DataProducts.Output_Correlated_[148].SAP=0 Observation.DataProducts.Output_Correlated_[148].centralFrequency=59375000.000000 @@ -756,6 +862,8 @@ Observation.DataProducts.Output_Correlated_[148].percentageWritten=0 Observation.DataProducts.Output_Correlated_[148].size=0 Observation.DataProducts.Output_Correlated_[148].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[148].stationSubband=304 +Observation.DataProducts.Output_Correlated_[148].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[148].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[148].subband=148 Observation.DataProducts.Output_Correlated_[149].SAP=0 Observation.DataProducts.Output_Correlated_[149].centralFrequency=59570312.500000 @@ -770,6 +878,8 @@ Observation.DataProducts.Output_Correlated_[149].percentageWritten=0 Observation.DataProducts.Output_Correlated_[149].size=0 Observation.DataProducts.Output_Correlated_[149].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[149].stationSubband=305 +Observation.DataProducts.Output_Correlated_[149].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[149].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[149].subband=149 Observation.DataProducts.Output_Correlated_[14].SAP=0 Observation.DataProducts.Output_Correlated_[14].centralFrequency=33203125.000000 @@ -784,6 +894,8 @@ Observation.DataProducts.Output_Correlated_[14].percentageWritten=0 Observation.DataProducts.Output_Correlated_[14].size=0 Observation.DataProducts.Output_Correlated_[14].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[14].stationSubband=170 +Observation.DataProducts.Output_Correlated_[14].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[14].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[14].subband=14 Observation.DataProducts.Output_Correlated_[150].SAP=0 Observation.DataProducts.Output_Correlated_[150].centralFrequency=59765625.000000 @@ -798,6 +910,8 @@ Observation.DataProducts.Output_Correlated_[150].percentageWritten=0 Observation.DataProducts.Output_Correlated_[150].size=0 Observation.DataProducts.Output_Correlated_[150].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[150].stationSubband=306 +Observation.DataProducts.Output_Correlated_[150].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[150].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[150].subband=150 Observation.DataProducts.Output_Correlated_[151].SAP=0 Observation.DataProducts.Output_Correlated_[151].centralFrequency=59960937.500000 @@ -812,6 +926,8 @@ Observation.DataProducts.Output_Correlated_[151].percentageWritten=0 Observation.DataProducts.Output_Correlated_[151].size=0 Observation.DataProducts.Output_Correlated_[151].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[151].stationSubband=307 +Observation.DataProducts.Output_Correlated_[151].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[151].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[151].subband=151 Observation.DataProducts.Output_Correlated_[152].SAP=0 Observation.DataProducts.Output_Correlated_[152].centralFrequency=60156250.000000 @@ -826,6 +942,8 @@ Observation.DataProducts.Output_Correlated_[152].percentageWritten=0 Observation.DataProducts.Output_Correlated_[152].size=0 Observation.DataProducts.Output_Correlated_[152].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[152].stationSubband=308 +Observation.DataProducts.Output_Correlated_[152].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[152].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[152].subband=152 Observation.DataProducts.Output_Correlated_[153].SAP=0 Observation.DataProducts.Output_Correlated_[153].centralFrequency=60351562.500000 @@ -840,6 +958,8 @@ Observation.DataProducts.Output_Correlated_[153].percentageWritten=0 Observation.DataProducts.Output_Correlated_[153].size=0 Observation.DataProducts.Output_Correlated_[153].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[153].stationSubband=309 +Observation.DataProducts.Output_Correlated_[153].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[153].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[153].subband=153 Observation.DataProducts.Output_Correlated_[154].SAP=0 Observation.DataProducts.Output_Correlated_[154].centralFrequency=60546875.000000 @@ -854,6 +974,8 @@ Observation.DataProducts.Output_Correlated_[154].percentageWritten=0 Observation.DataProducts.Output_Correlated_[154].size=0 Observation.DataProducts.Output_Correlated_[154].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[154].stationSubband=310 +Observation.DataProducts.Output_Correlated_[154].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[154].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[154].subband=154 Observation.DataProducts.Output_Correlated_[155].SAP=0 Observation.DataProducts.Output_Correlated_[155].centralFrequency=60742187.500000 @@ -868,6 +990,8 @@ Observation.DataProducts.Output_Correlated_[155].percentageWritten=0 Observation.DataProducts.Output_Correlated_[155].size=0 Observation.DataProducts.Output_Correlated_[155].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[155].stationSubband=311 +Observation.DataProducts.Output_Correlated_[155].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[155].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[155].subband=155 Observation.DataProducts.Output_Correlated_[156].SAP=0 Observation.DataProducts.Output_Correlated_[156].centralFrequency=60937500.000000 @@ -882,6 +1006,8 @@ Observation.DataProducts.Output_Correlated_[156].percentageWritten=0 Observation.DataProducts.Output_Correlated_[156].size=0 Observation.DataProducts.Output_Correlated_[156].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[156].stationSubband=312 +Observation.DataProducts.Output_Correlated_[156].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[156].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[156].subband=156 Observation.DataProducts.Output_Correlated_[157].SAP=0 Observation.DataProducts.Output_Correlated_[157].centralFrequency=61132812.500000 @@ -896,6 +1022,8 @@ Observation.DataProducts.Output_Correlated_[157].percentageWritten=0 Observation.DataProducts.Output_Correlated_[157].size=0 Observation.DataProducts.Output_Correlated_[157].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[157].stationSubband=313 +Observation.DataProducts.Output_Correlated_[157].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[157].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[157].subband=157 Observation.DataProducts.Output_Correlated_[158].SAP=0 Observation.DataProducts.Output_Correlated_[158].centralFrequency=61328125.000000 @@ -910,6 +1038,8 @@ Observation.DataProducts.Output_Correlated_[158].percentageWritten=0 Observation.DataProducts.Output_Correlated_[158].size=0 Observation.DataProducts.Output_Correlated_[158].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[158].stationSubband=314 +Observation.DataProducts.Output_Correlated_[158].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[158].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[158].subband=158 Observation.DataProducts.Output_Correlated_[159].SAP=0 Observation.DataProducts.Output_Correlated_[159].centralFrequency=61523437.500000 @@ -924,6 +1054,8 @@ Observation.DataProducts.Output_Correlated_[159].percentageWritten=0 Observation.DataProducts.Output_Correlated_[159].size=0 Observation.DataProducts.Output_Correlated_[159].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[159].stationSubband=315 +Observation.DataProducts.Output_Correlated_[159].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[159].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[159].subband=159 Observation.DataProducts.Output_Correlated_[15].SAP=0 Observation.DataProducts.Output_Correlated_[15].centralFrequency=33398437.500000 @@ -938,6 +1070,8 @@ Observation.DataProducts.Output_Correlated_[15].percentageWritten=0 Observation.DataProducts.Output_Correlated_[15].size=0 Observation.DataProducts.Output_Correlated_[15].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[15].stationSubband=171 +Observation.DataProducts.Output_Correlated_[15].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[15].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[15].subband=15 Observation.DataProducts.Output_Correlated_[160].SAP=0 Observation.DataProducts.Output_Correlated_[160].centralFrequency=61718750.000000 @@ -952,6 +1086,8 @@ Observation.DataProducts.Output_Correlated_[160].percentageWritten=0 Observation.DataProducts.Output_Correlated_[160].size=0 Observation.DataProducts.Output_Correlated_[160].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[160].stationSubband=316 +Observation.DataProducts.Output_Correlated_[160].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[160].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[160].subband=160 Observation.DataProducts.Output_Correlated_[161].SAP=0 Observation.DataProducts.Output_Correlated_[161].centralFrequency=61914062.500000 @@ -966,6 +1102,8 @@ Observation.DataProducts.Output_Correlated_[161].percentageWritten=0 Observation.DataProducts.Output_Correlated_[161].size=0 Observation.DataProducts.Output_Correlated_[161].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[161].stationSubband=317 +Observation.DataProducts.Output_Correlated_[161].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[161].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[161].subband=161 Observation.DataProducts.Output_Correlated_[162].SAP=0 Observation.DataProducts.Output_Correlated_[162].centralFrequency=62109375.000000 @@ -980,6 +1118,8 @@ Observation.DataProducts.Output_Correlated_[162].percentageWritten=0 Observation.DataProducts.Output_Correlated_[162].size=0 Observation.DataProducts.Output_Correlated_[162].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[162].stationSubband=318 +Observation.DataProducts.Output_Correlated_[162].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[162].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[162].subband=162 Observation.DataProducts.Output_Correlated_[163].SAP=0 Observation.DataProducts.Output_Correlated_[163].centralFrequency=62304687.500000 @@ -994,6 +1134,8 @@ Observation.DataProducts.Output_Correlated_[163].percentageWritten=0 Observation.DataProducts.Output_Correlated_[163].size=0 Observation.DataProducts.Output_Correlated_[163].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[163].stationSubband=319 +Observation.DataProducts.Output_Correlated_[163].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[163].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[163].subband=163 Observation.DataProducts.Output_Correlated_[164].SAP=0 Observation.DataProducts.Output_Correlated_[164].centralFrequency=62500000.000000 @@ -1008,6 +1150,8 @@ Observation.DataProducts.Output_Correlated_[164].percentageWritten=0 Observation.DataProducts.Output_Correlated_[164].size=0 Observation.DataProducts.Output_Correlated_[164].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[164].stationSubband=320 +Observation.DataProducts.Output_Correlated_[164].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[164].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[164].subband=164 Observation.DataProducts.Output_Correlated_[165].SAP=0 Observation.DataProducts.Output_Correlated_[165].centralFrequency=62695312.500000 @@ -1022,6 +1166,8 @@ Observation.DataProducts.Output_Correlated_[165].percentageWritten=0 Observation.DataProducts.Output_Correlated_[165].size=0 Observation.DataProducts.Output_Correlated_[165].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[165].stationSubband=321 +Observation.DataProducts.Output_Correlated_[165].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[165].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[165].subband=165 Observation.DataProducts.Output_Correlated_[166].SAP=0 Observation.DataProducts.Output_Correlated_[166].centralFrequency=62890625.000000 @@ -1036,6 +1182,8 @@ Observation.DataProducts.Output_Correlated_[166].percentageWritten=0 Observation.DataProducts.Output_Correlated_[166].size=0 Observation.DataProducts.Output_Correlated_[166].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[166].stationSubband=322 +Observation.DataProducts.Output_Correlated_[166].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[166].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[166].subband=166 Observation.DataProducts.Output_Correlated_[167].SAP=0 Observation.DataProducts.Output_Correlated_[167].centralFrequency=63085937.500000 @@ -1050,6 +1198,8 @@ Observation.DataProducts.Output_Correlated_[167].percentageWritten=0 Observation.DataProducts.Output_Correlated_[167].size=0 Observation.DataProducts.Output_Correlated_[167].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[167].stationSubband=323 +Observation.DataProducts.Output_Correlated_[167].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[167].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[167].subband=167 Observation.DataProducts.Output_Correlated_[168].SAP=0 Observation.DataProducts.Output_Correlated_[168].centralFrequency=63281250.000000 @@ -1064,6 +1214,8 @@ Observation.DataProducts.Output_Correlated_[168].percentageWritten=0 Observation.DataProducts.Output_Correlated_[168].size=0 Observation.DataProducts.Output_Correlated_[168].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[168].stationSubband=324 +Observation.DataProducts.Output_Correlated_[168].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[168].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[168].subband=168 Observation.DataProducts.Output_Correlated_[169].SAP=0 Observation.DataProducts.Output_Correlated_[169].centralFrequency=63476562.500000 @@ -1078,6 +1230,8 @@ Observation.DataProducts.Output_Correlated_[169].percentageWritten=0 Observation.DataProducts.Output_Correlated_[169].size=0 Observation.DataProducts.Output_Correlated_[169].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[169].stationSubband=325 +Observation.DataProducts.Output_Correlated_[169].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[169].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[169].subband=169 Observation.DataProducts.Output_Correlated_[16].SAP=0 Observation.DataProducts.Output_Correlated_[16].centralFrequency=33593750.000000 @@ -1092,6 +1246,8 @@ Observation.DataProducts.Output_Correlated_[16].percentageWritten=0 Observation.DataProducts.Output_Correlated_[16].size=0 Observation.DataProducts.Output_Correlated_[16].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[16].stationSubband=172 +Observation.DataProducts.Output_Correlated_[16].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[16].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[16].subband=16 Observation.DataProducts.Output_Correlated_[170].SAP=0 Observation.DataProducts.Output_Correlated_[170].centralFrequency=63671875.000000 @@ -1106,6 +1262,8 @@ Observation.DataProducts.Output_Correlated_[170].percentageWritten=0 Observation.DataProducts.Output_Correlated_[170].size=0 Observation.DataProducts.Output_Correlated_[170].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[170].stationSubband=326 +Observation.DataProducts.Output_Correlated_[170].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[170].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[170].subband=170 Observation.DataProducts.Output_Correlated_[171].SAP=0 Observation.DataProducts.Output_Correlated_[171].centralFrequency=63867187.500000 @@ -1120,6 +1278,8 @@ Observation.DataProducts.Output_Correlated_[171].percentageWritten=0 Observation.DataProducts.Output_Correlated_[171].size=0 Observation.DataProducts.Output_Correlated_[171].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[171].stationSubband=327 +Observation.DataProducts.Output_Correlated_[171].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[171].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[171].subband=171 Observation.DataProducts.Output_Correlated_[172].SAP=0 Observation.DataProducts.Output_Correlated_[172].centralFrequency=64062500.000000 @@ -1134,6 +1294,8 @@ Observation.DataProducts.Output_Correlated_[172].percentageWritten=0 Observation.DataProducts.Output_Correlated_[172].size=0 Observation.DataProducts.Output_Correlated_[172].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[172].stationSubband=328 +Observation.DataProducts.Output_Correlated_[172].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[172].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[172].subband=172 Observation.DataProducts.Output_Correlated_[173].SAP=0 Observation.DataProducts.Output_Correlated_[173].centralFrequency=64257812.500000 @@ -1148,6 +1310,8 @@ Observation.DataProducts.Output_Correlated_[173].percentageWritten=0 Observation.DataProducts.Output_Correlated_[173].size=0 Observation.DataProducts.Output_Correlated_[173].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[173].stationSubband=329 +Observation.DataProducts.Output_Correlated_[173].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[173].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[173].subband=173 Observation.DataProducts.Output_Correlated_[174].SAP=0 Observation.DataProducts.Output_Correlated_[174].centralFrequency=64453125.000000 @@ -1162,6 +1326,8 @@ Observation.DataProducts.Output_Correlated_[174].percentageWritten=0 Observation.DataProducts.Output_Correlated_[174].size=0 Observation.DataProducts.Output_Correlated_[174].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[174].stationSubband=330 +Observation.DataProducts.Output_Correlated_[174].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[174].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[174].subband=174 Observation.DataProducts.Output_Correlated_[175].SAP=0 Observation.DataProducts.Output_Correlated_[175].centralFrequency=64648437.500000 @@ -1176,6 +1342,8 @@ Observation.DataProducts.Output_Correlated_[175].percentageWritten=0 Observation.DataProducts.Output_Correlated_[175].size=0 Observation.DataProducts.Output_Correlated_[175].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[175].stationSubband=331 +Observation.DataProducts.Output_Correlated_[175].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[175].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[175].subband=175 Observation.DataProducts.Output_Correlated_[176].SAP=0 Observation.DataProducts.Output_Correlated_[176].centralFrequency=64843750.000000 @@ -1190,6 +1358,8 @@ Observation.DataProducts.Output_Correlated_[176].percentageWritten=0 Observation.DataProducts.Output_Correlated_[176].size=0 Observation.DataProducts.Output_Correlated_[176].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[176].stationSubband=332 +Observation.DataProducts.Output_Correlated_[176].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[176].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[176].subband=176 Observation.DataProducts.Output_Correlated_[177].SAP=0 Observation.DataProducts.Output_Correlated_[177].centralFrequency=65039062.500000 @@ -1204,6 +1374,8 @@ Observation.DataProducts.Output_Correlated_[177].percentageWritten=0 Observation.DataProducts.Output_Correlated_[177].size=0 Observation.DataProducts.Output_Correlated_[177].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[177].stationSubband=333 +Observation.DataProducts.Output_Correlated_[177].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[177].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[177].subband=177 Observation.DataProducts.Output_Correlated_[178].SAP=0 Observation.DataProducts.Output_Correlated_[178].centralFrequency=65234375.000000 @@ -1218,6 +1390,8 @@ Observation.DataProducts.Output_Correlated_[178].percentageWritten=0 Observation.DataProducts.Output_Correlated_[178].size=0 Observation.DataProducts.Output_Correlated_[178].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[178].stationSubband=334 +Observation.DataProducts.Output_Correlated_[178].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[178].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[178].subband=178 Observation.DataProducts.Output_Correlated_[179].SAP=0 Observation.DataProducts.Output_Correlated_[179].centralFrequency=65429687.500000 @@ -1232,6 +1406,8 @@ Observation.DataProducts.Output_Correlated_[179].percentageWritten=0 Observation.DataProducts.Output_Correlated_[179].size=0 Observation.DataProducts.Output_Correlated_[179].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[179].stationSubband=335 +Observation.DataProducts.Output_Correlated_[179].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[179].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[179].subband=179 Observation.DataProducts.Output_Correlated_[17].SAP=0 Observation.DataProducts.Output_Correlated_[17].centralFrequency=33789062.500000 @@ -1246,6 +1422,8 @@ Observation.DataProducts.Output_Correlated_[17].percentageWritten=0 Observation.DataProducts.Output_Correlated_[17].size=0 Observation.DataProducts.Output_Correlated_[17].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[17].stationSubband=173 +Observation.DataProducts.Output_Correlated_[17].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[17].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[17].subband=17 Observation.DataProducts.Output_Correlated_[180].SAP=0 Observation.DataProducts.Output_Correlated_[180].centralFrequency=65625000.000000 @@ -1260,6 +1438,8 @@ Observation.DataProducts.Output_Correlated_[180].percentageWritten=0 Observation.DataProducts.Output_Correlated_[180].size=0 Observation.DataProducts.Output_Correlated_[180].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[180].stationSubband=336 +Observation.DataProducts.Output_Correlated_[180].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[180].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[180].subband=180 Observation.DataProducts.Output_Correlated_[181].SAP=0 Observation.DataProducts.Output_Correlated_[181].centralFrequency=65820312.500000 @@ -1274,6 +1454,8 @@ Observation.DataProducts.Output_Correlated_[181].percentageWritten=0 Observation.DataProducts.Output_Correlated_[181].size=0 Observation.DataProducts.Output_Correlated_[181].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[181].stationSubband=337 +Observation.DataProducts.Output_Correlated_[181].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[181].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[181].subband=181 Observation.DataProducts.Output_Correlated_[182].SAP=0 Observation.DataProducts.Output_Correlated_[182].centralFrequency=66015625.000000 @@ -1288,6 +1470,8 @@ Observation.DataProducts.Output_Correlated_[182].percentageWritten=0 Observation.DataProducts.Output_Correlated_[182].size=0 Observation.DataProducts.Output_Correlated_[182].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[182].stationSubband=338 +Observation.DataProducts.Output_Correlated_[182].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[182].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[182].subband=182 Observation.DataProducts.Output_Correlated_[183].SAP=0 Observation.DataProducts.Output_Correlated_[183].centralFrequency=66210937.500000 @@ -1302,6 +1486,8 @@ Observation.DataProducts.Output_Correlated_[183].percentageWritten=0 Observation.DataProducts.Output_Correlated_[183].size=0 Observation.DataProducts.Output_Correlated_[183].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[183].stationSubband=339 +Observation.DataProducts.Output_Correlated_[183].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[183].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[183].subband=183 Observation.DataProducts.Output_Correlated_[184].SAP=0 Observation.DataProducts.Output_Correlated_[184].centralFrequency=66406250.000000 @@ -1316,6 +1502,8 @@ Observation.DataProducts.Output_Correlated_[184].percentageWritten=0 Observation.DataProducts.Output_Correlated_[184].size=0 Observation.DataProducts.Output_Correlated_[184].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[184].stationSubband=340 +Observation.DataProducts.Output_Correlated_[184].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[184].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[184].subband=184 Observation.DataProducts.Output_Correlated_[185].SAP=0 Observation.DataProducts.Output_Correlated_[185].centralFrequency=66601562.500000 @@ -1330,6 +1518,8 @@ Observation.DataProducts.Output_Correlated_[185].percentageWritten=0 Observation.DataProducts.Output_Correlated_[185].size=0 Observation.DataProducts.Output_Correlated_[185].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[185].stationSubband=341 +Observation.DataProducts.Output_Correlated_[185].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[185].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[185].subband=185 Observation.DataProducts.Output_Correlated_[186].SAP=0 Observation.DataProducts.Output_Correlated_[186].centralFrequency=66796875.000000 @@ -1344,6 +1534,8 @@ Observation.DataProducts.Output_Correlated_[186].percentageWritten=0 Observation.DataProducts.Output_Correlated_[186].size=0 Observation.DataProducts.Output_Correlated_[186].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[186].stationSubband=342 +Observation.DataProducts.Output_Correlated_[186].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[186].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[186].subband=186 Observation.DataProducts.Output_Correlated_[187].SAP=0 Observation.DataProducts.Output_Correlated_[187].centralFrequency=66992187.500000 @@ -1358,6 +1550,8 @@ Observation.DataProducts.Output_Correlated_[187].percentageWritten=0 Observation.DataProducts.Output_Correlated_[187].size=0 Observation.DataProducts.Output_Correlated_[187].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[187].stationSubband=343 +Observation.DataProducts.Output_Correlated_[187].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[187].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[187].subband=187 Observation.DataProducts.Output_Correlated_[188].SAP=0 Observation.DataProducts.Output_Correlated_[188].centralFrequency=67187500.000000 @@ -1372,6 +1566,8 @@ Observation.DataProducts.Output_Correlated_[188].percentageWritten=0 Observation.DataProducts.Output_Correlated_[188].size=0 Observation.DataProducts.Output_Correlated_[188].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[188].stationSubband=344 +Observation.DataProducts.Output_Correlated_[188].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[188].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[188].subband=188 Observation.DataProducts.Output_Correlated_[189].SAP=0 Observation.DataProducts.Output_Correlated_[189].centralFrequency=67382812.500000 @@ -1386,6 +1582,8 @@ Observation.DataProducts.Output_Correlated_[189].percentageWritten=0 Observation.DataProducts.Output_Correlated_[189].size=0 Observation.DataProducts.Output_Correlated_[189].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[189].stationSubband=345 +Observation.DataProducts.Output_Correlated_[189].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[189].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[189].subband=189 Observation.DataProducts.Output_Correlated_[18].SAP=0 Observation.DataProducts.Output_Correlated_[18].centralFrequency=33984375.000000 @@ -1400,6 +1598,8 @@ Observation.DataProducts.Output_Correlated_[18].percentageWritten=0 Observation.DataProducts.Output_Correlated_[18].size=0 Observation.DataProducts.Output_Correlated_[18].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[18].stationSubband=174 +Observation.DataProducts.Output_Correlated_[18].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[18].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[18].subband=18 Observation.DataProducts.Output_Correlated_[190].SAP=0 Observation.DataProducts.Output_Correlated_[190].centralFrequency=67578125.000000 @@ -1414,6 +1614,8 @@ Observation.DataProducts.Output_Correlated_[190].percentageWritten=0 Observation.DataProducts.Output_Correlated_[190].size=0 Observation.DataProducts.Output_Correlated_[190].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[190].stationSubband=346 +Observation.DataProducts.Output_Correlated_[190].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[190].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[190].subband=190 Observation.DataProducts.Output_Correlated_[191].SAP=0 Observation.DataProducts.Output_Correlated_[191].centralFrequency=67773437.500000 @@ -1428,6 +1630,8 @@ Observation.DataProducts.Output_Correlated_[191].percentageWritten=0 Observation.DataProducts.Output_Correlated_[191].size=0 Observation.DataProducts.Output_Correlated_[191].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[191].stationSubband=347 +Observation.DataProducts.Output_Correlated_[191].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[191].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[191].subband=191 Observation.DataProducts.Output_Correlated_[192].SAP=0 Observation.DataProducts.Output_Correlated_[192].centralFrequency=67968750.000000 @@ -1442,6 +1646,8 @@ Observation.DataProducts.Output_Correlated_[192].percentageWritten=0 Observation.DataProducts.Output_Correlated_[192].size=0 Observation.DataProducts.Output_Correlated_[192].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[192].stationSubband=348 +Observation.DataProducts.Output_Correlated_[192].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[192].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[192].subband=192 Observation.DataProducts.Output_Correlated_[193].SAP=0 Observation.DataProducts.Output_Correlated_[193].centralFrequency=68164062.500000 @@ -1456,6 +1662,8 @@ Observation.DataProducts.Output_Correlated_[193].percentageWritten=0 Observation.DataProducts.Output_Correlated_[193].size=0 Observation.DataProducts.Output_Correlated_[193].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[193].stationSubband=349 +Observation.DataProducts.Output_Correlated_[193].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[193].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[193].subband=193 Observation.DataProducts.Output_Correlated_[194].SAP=0 Observation.DataProducts.Output_Correlated_[194].centralFrequency=68359375.000000 @@ -1470,6 +1678,8 @@ Observation.DataProducts.Output_Correlated_[194].percentageWritten=0 Observation.DataProducts.Output_Correlated_[194].size=0 Observation.DataProducts.Output_Correlated_[194].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[194].stationSubband=350 +Observation.DataProducts.Output_Correlated_[194].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[194].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[194].subband=194 Observation.DataProducts.Output_Correlated_[195].SAP=0 Observation.DataProducts.Output_Correlated_[195].centralFrequency=68554687.500000 @@ -1484,6 +1694,8 @@ Observation.DataProducts.Output_Correlated_[195].percentageWritten=0 Observation.DataProducts.Output_Correlated_[195].size=0 Observation.DataProducts.Output_Correlated_[195].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[195].stationSubband=351 +Observation.DataProducts.Output_Correlated_[195].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[195].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[195].subband=195 Observation.DataProducts.Output_Correlated_[196].SAP=0 Observation.DataProducts.Output_Correlated_[196].centralFrequency=68750000.000000 @@ -1498,6 +1710,8 @@ Observation.DataProducts.Output_Correlated_[196].percentageWritten=0 Observation.DataProducts.Output_Correlated_[196].size=0 Observation.DataProducts.Output_Correlated_[196].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[196].stationSubband=352 +Observation.DataProducts.Output_Correlated_[196].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[196].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[196].subband=196 Observation.DataProducts.Output_Correlated_[197].SAP=0 Observation.DataProducts.Output_Correlated_[197].centralFrequency=68945312.500000 @@ -1512,6 +1726,8 @@ Observation.DataProducts.Output_Correlated_[197].percentageWritten=0 Observation.DataProducts.Output_Correlated_[197].size=0 Observation.DataProducts.Output_Correlated_[197].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[197].stationSubband=353 +Observation.DataProducts.Output_Correlated_[197].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[197].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[197].subband=197 Observation.DataProducts.Output_Correlated_[198].SAP=0 Observation.DataProducts.Output_Correlated_[198].centralFrequency=69140625.000000 @@ -1526,6 +1742,8 @@ Observation.DataProducts.Output_Correlated_[198].percentageWritten=0 Observation.DataProducts.Output_Correlated_[198].size=0 Observation.DataProducts.Output_Correlated_[198].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[198].stationSubband=354 +Observation.DataProducts.Output_Correlated_[198].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[198].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[198].subband=198 Observation.DataProducts.Output_Correlated_[199].SAP=0 Observation.DataProducts.Output_Correlated_[199].centralFrequency=69335937.500000 @@ -1540,6 +1758,8 @@ Observation.DataProducts.Output_Correlated_[199].percentageWritten=0 Observation.DataProducts.Output_Correlated_[199].size=0 Observation.DataProducts.Output_Correlated_[199].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[199].stationSubband=355 +Observation.DataProducts.Output_Correlated_[199].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[199].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[199].subband=199 Observation.DataProducts.Output_Correlated_[19].SAP=0 Observation.DataProducts.Output_Correlated_[19].centralFrequency=34179687.500000 @@ -1554,6 +1774,8 @@ Observation.DataProducts.Output_Correlated_[19].percentageWritten=0 Observation.DataProducts.Output_Correlated_[19].size=0 Observation.DataProducts.Output_Correlated_[19].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[19].stationSubband=175 +Observation.DataProducts.Output_Correlated_[19].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[19].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[19].subband=19 Observation.DataProducts.Output_Correlated_[1].SAP=0 Observation.DataProducts.Output_Correlated_[1].centralFrequency=30664062.500000 @@ -1568,6 +1790,8 @@ Observation.DataProducts.Output_Correlated_[1].percentageWritten=0 Observation.DataProducts.Output_Correlated_[1].size=0 Observation.DataProducts.Output_Correlated_[1].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[1].stationSubband=157 +Observation.DataProducts.Output_Correlated_[1].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[1].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[1].subband=1 Observation.DataProducts.Output_Correlated_[200].SAP=0 Observation.DataProducts.Output_Correlated_[200].centralFrequency=69531250.000000 @@ -1582,6 +1806,8 @@ Observation.DataProducts.Output_Correlated_[200].percentageWritten=0 Observation.DataProducts.Output_Correlated_[200].size=0 Observation.DataProducts.Output_Correlated_[200].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[200].stationSubband=356 +Observation.DataProducts.Output_Correlated_[200].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[200].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[200].subband=200 Observation.DataProducts.Output_Correlated_[201].SAP=0 Observation.DataProducts.Output_Correlated_[201].centralFrequency=69726562.500000 @@ -1596,6 +1822,8 @@ Observation.DataProducts.Output_Correlated_[201].percentageWritten=0 Observation.DataProducts.Output_Correlated_[201].size=0 Observation.DataProducts.Output_Correlated_[201].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[201].stationSubband=357 +Observation.DataProducts.Output_Correlated_[201].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[201].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[201].subband=201 Observation.DataProducts.Output_Correlated_[202].SAP=0 Observation.DataProducts.Output_Correlated_[202].centralFrequency=69921875.000000 @@ -1610,6 +1838,8 @@ Observation.DataProducts.Output_Correlated_[202].percentageWritten=0 Observation.DataProducts.Output_Correlated_[202].size=0 Observation.DataProducts.Output_Correlated_[202].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[202].stationSubband=358 +Observation.DataProducts.Output_Correlated_[202].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[202].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[202].subband=202 Observation.DataProducts.Output_Correlated_[203].SAP=0 Observation.DataProducts.Output_Correlated_[203].centralFrequency=70117187.500000 @@ -1624,6 +1854,8 @@ Observation.DataProducts.Output_Correlated_[203].percentageWritten=0 Observation.DataProducts.Output_Correlated_[203].size=0 Observation.DataProducts.Output_Correlated_[203].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[203].stationSubband=359 +Observation.DataProducts.Output_Correlated_[203].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[203].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[203].subband=203 Observation.DataProducts.Output_Correlated_[204].SAP=0 Observation.DataProducts.Output_Correlated_[204].centralFrequency=70312500.000000 @@ -1638,6 +1870,8 @@ Observation.DataProducts.Output_Correlated_[204].percentageWritten=0 Observation.DataProducts.Output_Correlated_[204].size=0 Observation.DataProducts.Output_Correlated_[204].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[204].stationSubband=360 +Observation.DataProducts.Output_Correlated_[204].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[204].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[204].subband=204 Observation.DataProducts.Output_Correlated_[205].SAP=0 Observation.DataProducts.Output_Correlated_[205].centralFrequency=70507812.500000 @@ -1652,6 +1886,8 @@ Observation.DataProducts.Output_Correlated_[205].percentageWritten=0 Observation.DataProducts.Output_Correlated_[205].size=0 Observation.DataProducts.Output_Correlated_[205].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[205].stationSubband=361 +Observation.DataProducts.Output_Correlated_[205].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[205].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[205].subband=205 Observation.DataProducts.Output_Correlated_[206].SAP=0 Observation.DataProducts.Output_Correlated_[206].centralFrequency=70703125.000000 @@ -1666,6 +1902,8 @@ Observation.DataProducts.Output_Correlated_[206].percentageWritten=0 Observation.DataProducts.Output_Correlated_[206].size=0 Observation.DataProducts.Output_Correlated_[206].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[206].stationSubband=362 +Observation.DataProducts.Output_Correlated_[206].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[206].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[206].subband=206 Observation.DataProducts.Output_Correlated_[207].SAP=0 Observation.DataProducts.Output_Correlated_[207].centralFrequency=70898437.500000 @@ -1680,6 +1918,8 @@ Observation.DataProducts.Output_Correlated_[207].percentageWritten=0 Observation.DataProducts.Output_Correlated_[207].size=0 Observation.DataProducts.Output_Correlated_[207].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[207].stationSubband=363 +Observation.DataProducts.Output_Correlated_[207].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[207].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[207].subband=207 Observation.DataProducts.Output_Correlated_[208].SAP=0 Observation.DataProducts.Output_Correlated_[208].centralFrequency=71093750.000000 @@ -1694,6 +1934,8 @@ Observation.DataProducts.Output_Correlated_[208].percentageWritten=0 Observation.DataProducts.Output_Correlated_[208].size=0 Observation.DataProducts.Output_Correlated_[208].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[208].stationSubband=364 +Observation.DataProducts.Output_Correlated_[208].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[208].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[208].subband=208 Observation.DataProducts.Output_Correlated_[209].SAP=0 Observation.DataProducts.Output_Correlated_[209].centralFrequency=71289062.500000 @@ -1708,6 +1950,8 @@ Observation.DataProducts.Output_Correlated_[209].percentageWritten=0 Observation.DataProducts.Output_Correlated_[209].size=0 Observation.DataProducts.Output_Correlated_[209].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[209].stationSubband=365 +Observation.DataProducts.Output_Correlated_[209].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[209].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[209].subband=209 Observation.DataProducts.Output_Correlated_[20].SAP=0 Observation.DataProducts.Output_Correlated_[20].centralFrequency=34375000.000000 @@ -1722,6 +1966,8 @@ Observation.DataProducts.Output_Correlated_[20].percentageWritten=0 Observation.DataProducts.Output_Correlated_[20].size=0 Observation.DataProducts.Output_Correlated_[20].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[20].stationSubband=176 +Observation.DataProducts.Output_Correlated_[20].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[20].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[20].subband=20 Observation.DataProducts.Output_Correlated_[210].SAP=0 Observation.DataProducts.Output_Correlated_[210].centralFrequency=71484375.000000 @@ -1736,6 +1982,8 @@ Observation.DataProducts.Output_Correlated_[210].percentageWritten=0 Observation.DataProducts.Output_Correlated_[210].size=0 Observation.DataProducts.Output_Correlated_[210].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[210].stationSubband=366 +Observation.DataProducts.Output_Correlated_[210].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[210].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[210].subband=210 Observation.DataProducts.Output_Correlated_[211].SAP=0 Observation.DataProducts.Output_Correlated_[211].centralFrequency=71679687.500000 @@ -1750,6 +1998,8 @@ Observation.DataProducts.Output_Correlated_[211].percentageWritten=0 Observation.DataProducts.Output_Correlated_[211].size=0 Observation.DataProducts.Output_Correlated_[211].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[211].stationSubband=367 +Observation.DataProducts.Output_Correlated_[211].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[211].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[211].subband=211 Observation.DataProducts.Output_Correlated_[212].SAP=0 Observation.DataProducts.Output_Correlated_[212].centralFrequency=71875000.000000 @@ -1764,6 +2014,8 @@ Observation.DataProducts.Output_Correlated_[212].percentageWritten=0 Observation.DataProducts.Output_Correlated_[212].size=0 Observation.DataProducts.Output_Correlated_[212].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[212].stationSubband=368 +Observation.DataProducts.Output_Correlated_[212].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[212].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[212].subband=212 Observation.DataProducts.Output_Correlated_[213].SAP=0 Observation.DataProducts.Output_Correlated_[213].centralFrequency=72070312.500000 @@ -1778,6 +2030,8 @@ Observation.DataProducts.Output_Correlated_[213].percentageWritten=0 Observation.DataProducts.Output_Correlated_[213].size=0 Observation.DataProducts.Output_Correlated_[213].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[213].stationSubband=369 +Observation.DataProducts.Output_Correlated_[213].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[213].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[213].subband=213 Observation.DataProducts.Output_Correlated_[214].SAP=0 Observation.DataProducts.Output_Correlated_[214].centralFrequency=72265625.000000 @@ -1792,6 +2046,8 @@ Observation.DataProducts.Output_Correlated_[214].percentageWritten=0 Observation.DataProducts.Output_Correlated_[214].size=0 Observation.DataProducts.Output_Correlated_[214].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[214].stationSubband=370 +Observation.DataProducts.Output_Correlated_[214].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[214].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[214].subband=214 Observation.DataProducts.Output_Correlated_[215].SAP=0 Observation.DataProducts.Output_Correlated_[215].centralFrequency=72460937.500000 @@ -1806,6 +2062,8 @@ Observation.DataProducts.Output_Correlated_[215].percentageWritten=0 Observation.DataProducts.Output_Correlated_[215].size=0 Observation.DataProducts.Output_Correlated_[215].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[215].stationSubband=371 +Observation.DataProducts.Output_Correlated_[215].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[215].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[215].subband=215 Observation.DataProducts.Output_Correlated_[216].SAP=0 Observation.DataProducts.Output_Correlated_[216].centralFrequency=72656250.000000 @@ -1820,6 +2078,8 @@ Observation.DataProducts.Output_Correlated_[216].percentageWritten=0 Observation.DataProducts.Output_Correlated_[216].size=0 Observation.DataProducts.Output_Correlated_[216].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[216].stationSubband=372 +Observation.DataProducts.Output_Correlated_[216].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[216].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[216].subband=216 Observation.DataProducts.Output_Correlated_[217].SAP=0 Observation.DataProducts.Output_Correlated_[217].centralFrequency=72851562.500000 @@ -1834,6 +2094,8 @@ Observation.DataProducts.Output_Correlated_[217].percentageWritten=0 Observation.DataProducts.Output_Correlated_[217].size=0 Observation.DataProducts.Output_Correlated_[217].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[217].stationSubband=373 +Observation.DataProducts.Output_Correlated_[217].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[217].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[217].subband=217 Observation.DataProducts.Output_Correlated_[218].SAP=0 Observation.DataProducts.Output_Correlated_[218].centralFrequency=73046875.000000 @@ -1848,6 +2110,8 @@ Observation.DataProducts.Output_Correlated_[218].percentageWritten=0 Observation.DataProducts.Output_Correlated_[218].size=0 Observation.DataProducts.Output_Correlated_[218].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[218].stationSubband=374 +Observation.DataProducts.Output_Correlated_[218].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[218].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[218].subband=218 Observation.DataProducts.Output_Correlated_[219].SAP=0 Observation.DataProducts.Output_Correlated_[219].centralFrequency=73242187.500000 @@ -1862,6 +2126,8 @@ Observation.DataProducts.Output_Correlated_[219].percentageWritten=0 Observation.DataProducts.Output_Correlated_[219].size=0 Observation.DataProducts.Output_Correlated_[219].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[219].stationSubband=375 +Observation.DataProducts.Output_Correlated_[219].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[219].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[219].subband=219 Observation.DataProducts.Output_Correlated_[21].SAP=0 Observation.DataProducts.Output_Correlated_[21].centralFrequency=34570312.500000 @@ -1876,6 +2142,8 @@ Observation.DataProducts.Output_Correlated_[21].percentageWritten=0 Observation.DataProducts.Output_Correlated_[21].size=0 Observation.DataProducts.Output_Correlated_[21].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[21].stationSubband=177 +Observation.DataProducts.Output_Correlated_[21].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[21].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[21].subband=21 Observation.DataProducts.Output_Correlated_[220].SAP=0 Observation.DataProducts.Output_Correlated_[220].centralFrequency=73437500.000000 @@ -1890,6 +2158,8 @@ Observation.DataProducts.Output_Correlated_[220].percentageWritten=0 Observation.DataProducts.Output_Correlated_[220].size=0 Observation.DataProducts.Output_Correlated_[220].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[220].stationSubband=376 +Observation.DataProducts.Output_Correlated_[220].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[220].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[220].subband=220 Observation.DataProducts.Output_Correlated_[221].SAP=0 Observation.DataProducts.Output_Correlated_[221].centralFrequency=73632812.500000 @@ -1904,6 +2174,8 @@ Observation.DataProducts.Output_Correlated_[221].percentageWritten=0 Observation.DataProducts.Output_Correlated_[221].size=0 Observation.DataProducts.Output_Correlated_[221].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[221].stationSubband=377 +Observation.DataProducts.Output_Correlated_[221].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[221].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[221].subband=221 Observation.DataProducts.Output_Correlated_[222].SAP=0 Observation.DataProducts.Output_Correlated_[222].centralFrequency=73828125.000000 @@ -1918,6 +2190,8 @@ Observation.DataProducts.Output_Correlated_[222].percentageWritten=0 Observation.DataProducts.Output_Correlated_[222].size=0 Observation.DataProducts.Output_Correlated_[222].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[222].stationSubband=378 +Observation.DataProducts.Output_Correlated_[222].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[222].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[222].subband=222 Observation.DataProducts.Output_Correlated_[223].SAP=0 Observation.DataProducts.Output_Correlated_[223].centralFrequency=74023437.500000 @@ -1932,6 +2206,8 @@ Observation.DataProducts.Output_Correlated_[223].percentageWritten=0 Observation.DataProducts.Output_Correlated_[223].size=0 Observation.DataProducts.Output_Correlated_[223].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[223].stationSubband=379 +Observation.DataProducts.Output_Correlated_[223].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[223].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[223].subband=223 Observation.DataProducts.Output_Correlated_[224].SAP=0 Observation.DataProducts.Output_Correlated_[224].centralFrequency=74218750.000000 @@ -1946,6 +2222,8 @@ Observation.DataProducts.Output_Correlated_[224].percentageWritten=0 Observation.DataProducts.Output_Correlated_[224].size=0 Observation.DataProducts.Output_Correlated_[224].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[224].stationSubband=380 +Observation.DataProducts.Output_Correlated_[224].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[224].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[224].subband=224 Observation.DataProducts.Output_Correlated_[225].SAP=0 Observation.DataProducts.Output_Correlated_[225].centralFrequency=74414062.500000 @@ -1960,6 +2238,8 @@ Observation.DataProducts.Output_Correlated_[225].percentageWritten=0 Observation.DataProducts.Output_Correlated_[225].size=0 Observation.DataProducts.Output_Correlated_[225].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[225].stationSubband=381 +Observation.DataProducts.Output_Correlated_[225].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[225].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[225].subband=225 Observation.DataProducts.Output_Correlated_[226].SAP=0 Observation.DataProducts.Output_Correlated_[226].centralFrequency=74609375.000000 @@ -1974,6 +2254,8 @@ Observation.DataProducts.Output_Correlated_[226].percentageWritten=0 Observation.DataProducts.Output_Correlated_[226].size=0 Observation.DataProducts.Output_Correlated_[226].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[226].stationSubband=382 +Observation.DataProducts.Output_Correlated_[226].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[226].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[226].subband=226 Observation.DataProducts.Output_Correlated_[227].SAP=0 Observation.DataProducts.Output_Correlated_[227].centralFrequency=74804687.500000 @@ -1988,6 +2270,8 @@ Observation.DataProducts.Output_Correlated_[227].percentageWritten=0 Observation.DataProducts.Output_Correlated_[227].size=0 Observation.DataProducts.Output_Correlated_[227].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[227].stationSubband=383 +Observation.DataProducts.Output_Correlated_[227].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[227].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[227].subband=227 Observation.DataProducts.Output_Correlated_[228].SAP=0 Observation.DataProducts.Output_Correlated_[228].centralFrequency=75000000.000000 @@ -2002,6 +2286,8 @@ Observation.DataProducts.Output_Correlated_[228].percentageWritten=0 Observation.DataProducts.Output_Correlated_[228].size=0 Observation.DataProducts.Output_Correlated_[228].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[228].stationSubband=384 +Observation.DataProducts.Output_Correlated_[228].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[228].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[228].subband=228 Observation.DataProducts.Output_Correlated_[229].SAP=0 Observation.DataProducts.Output_Correlated_[229].centralFrequency=75195312.500000 @@ -2016,6 +2302,8 @@ Observation.DataProducts.Output_Correlated_[229].percentageWritten=0 Observation.DataProducts.Output_Correlated_[229].size=0 Observation.DataProducts.Output_Correlated_[229].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[229].stationSubband=385 +Observation.DataProducts.Output_Correlated_[229].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[229].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[229].subband=229 Observation.DataProducts.Output_Correlated_[22].SAP=0 Observation.DataProducts.Output_Correlated_[22].centralFrequency=34765625.000000 @@ -2030,6 +2318,8 @@ Observation.DataProducts.Output_Correlated_[22].percentageWritten=0 Observation.DataProducts.Output_Correlated_[22].size=0 Observation.DataProducts.Output_Correlated_[22].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[22].stationSubband=178 +Observation.DataProducts.Output_Correlated_[22].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[22].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[22].subband=22 Observation.DataProducts.Output_Correlated_[230].SAP=0 Observation.DataProducts.Output_Correlated_[230].centralFrequency=75390625.000000 @@ -2044,6 +2334,8 @@ Observation.DataProducts.Output_Correlated_[230].percentageWritten=0 Observation.DataProducts.Output_Correlated_[230].size=0 Observation.DataProducts.Output_Correlated_[230].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[230].stationSubband=386 +Observation.DataProducts.Output_Correlated_[230].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[230].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[230].subband=230 Observation.DataProducts.Output_Correlated_[231].SAP=0 Observation.DataProducts.Output_Correlated_[231].centralFrequency=75585937.500000 @@ -2058,6 +2350,8 @@ Observation.DataProducts.Output_Correlated_[231].percentageWritten=0 Observation.DataProducts.Output_Correlated_[231].size=0 Observation.DataProducts.Output_Correlated_[231].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[231].stationSubband=387 +Observation.DataProducts.Output_Correlated_[231].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[231].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[231].subband=231 Observation.DataProducts.Output_Correlated_[232].SAP=0 Observation.DataProducts.Output_Correlated_[232].centralFrequency=75781250.000000 @@ -2072,6 +2366,8 @@ Observation.DataProducts.Output_Correlated_[232].percentageWritten=0 Observation.DataProducts.Output_Correlated_[232].size=0 Observation.DataProducts.Output_Correlated_[232].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[232].stationSubband=388 +Observation.DataProducts.Output_Correlated_[232].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[232].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[232].subband=232 Observation.DataProducts.Output_Correlated_[233].SAP=0 Observation.DataProducts.Output_Correlated_[233].centralFrequency=75976562.500000 @@ -2086,6 +2382,8 @@ Observation.DataProducts.Output_Correlated_[233].percentageWritten=0 Observation.DataProducts.Output_Correlated_[233].size=0 Observation.DataProducts.Output_Correlated_[233].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[233].stationSubband=389 +Observation.DataProducts.Output_Correlated_[233].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[233].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[233].subband=233 Observation.DataProducts.Output_Correlated_[234].SAP=0 Observation.DataProducts.Output_Correlated_[234].centralFrequency=76171875.000000 @@ -2100,6 +2398,8 @@ Observation.DataProducts.Output_Correlated_[234].percentageWritten=0 Observation.DataProducts.Output_Correlated_[234].size=0 Observation.DataProducts.Output_Correlated_[234].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[234].stationSubband=390 +Observation.DataProducts.Output_Correlated_[234].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[234].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[234].subband=234 Observation.DataProducts.Output_Correlated_[235].SAP=0 Observation.DataProducts.Output_Correlated_[235].centralFrequency=76367187.500000 @@ -2114,6 +2414,8 @@ Observation.DataProducts.Output_Correlated_[235].percentageWritten=0 Observation.DataProducts.Output_Correlated_[235].size=0 Observation.DataProducts.Output_Correlated_[235].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[235].stationSubband=391 +Observation.DataProducts.Output_Correlated_[235].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[235].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[235].subband=235 Observation.DataProducts.Output_Correlated_[236].SAP=0 Observation.DataProducts.Output_Correlated_[236].centralFrequency=76562500.000000 @@ -2128,6 +2430,8 @@ Observation.DataProducts.Output_Correlated_[236].percentageWritten=0 Observation.DataProducts.Output_Correlated_[236].size=0 Observation.DataProducts.Output_Correlated_[236].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[236].stationSubband=392 +Observation.DataProducts.Output_Correlated_[236].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[236].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[236].subband=236 Observation.DataProducts.Output_Correlated_[237].SAP=0 Observation.DataProducts.Output_Correlated_[237].centralFrequency=76757812.500000 @@ -2142,6 +2446,8 @@ Observation.DataProducts.Output_Correlated_[237].percentageWritten=0 Observation.DataProducts.Output_Correlated_[237].size=0 Observation.DataProducts.Output_Correlated_[237].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[237].stationSubband=393 +Observation.DataProducts.Output_Correlated_[237].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[237].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[237].subband=237 Observation.DataProducts.Output_Correlated_[238].SAP=0 Observation.DataProducts.Output_Correlated_[238].centralFrequency=76953125.000000 @@ -2156,6 +2462,8 @@ Observation.DataProducts.Output_Correlated_[238].percentageWritten=0 Observation.DataProducts.Output_Correlated_[238].size=0 Observation.DataProducts.Output_Correlated_[238].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[238].stationSubband=394 +Observation.DataProducts.Output_Correlated_[238].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[238].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[238].subband=238 Observation.DataProducts.Output_Correlated_[239].SAP=0 Observation.DataProducts.Output_Correlated_[239].centralFrequency=77148437.500000 @@ -2170,6 +2478,8 @@ Observation.DataProducts.Output_Correlated_[239].percentageWritten=0 Observation.DataProducts.Output_Correlated_[239].size=0 Observation.DataProducts.Output_Correlated_[239].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[239].stationSubband=395 +Observation.DataProducts.Output_Correlated_[239].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[239].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[239].subband=239 Observation.DataProducts.Output_Correlated_[23].SAP=0 Observation.DataProducts.Output_Correlated_[23].centralFrequency=34960937.500000 @@ -2184,6 +2494,8 @@ Observation.DataProducts.Output_Correlated_[23].percentageWritten=0 Observation.DataProducts.Output_Correlated_[23].size=0 Observation.DataProducts.Output_Correlated_[23].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[23].stationSubband=179 +Observation.DataProducts.Output_Correlated_[23].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[23].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[23].subband=23 Observation.DataProducts.Output_Correlated_[240].SAP=0 Observation.DataProducts.Output_Correlated_[240].centralFrequency=77343750.000000 @@ -2198,6 +2510,8 @@ Observation.DataProducts.Output_Correlated_[240].percentageWritten=0 Observation.DataProducts.Output_Correlated_[240].size=0 Observation.DataProducts.Output_Correlated_[240].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[240].stationSubband=396 +Observation.DataProducts.Output_Correlated_[240].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[240].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[240].subband=240 Observation.DataProducts.Output_Correlated_[241].SAP=0 Observation.DataProducts.Output_Correlated_[241].centralFrequency=77539062.500000 @@ -2212,6 +2526,8 @@ Observation.DataProducts.Output_Correlated_[241].percentageWritten=0 Observation.DataProducts.Output_Correlated_[241].size=0 Observation.DataProducts.Output_Correlated_[241].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[241].stationSubband=397 +Observation.DataProducts.Output_Correlated_[241].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[241].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[241].subband=241 Observation.DataProducts.Output_Correlated_[242].SAP=0 Observation.DataProducts.Output_Correlated_[242].centralFrequency=77734375.000000 @@ -2226,6 +2542,8 @@ Observation.DataProducts.Output_Correlated_[242].percentageWritten=0 Observation.DataProducts.Output_Correlated_[242].size=0 Observation.DataProducts.Output_Correlated_[242].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[242].stationSubband=398 +Observation.DataProducts.Output_Correlated_[242].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[242].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[242].subband=242 Observation.DataProducts.Output_Correlated_[243].SAP=0 Observation.DataProducts.Output_Correlated_[243].centralFrequency=77929687.500000 @@ -2240,6 +2558,8 @@ Observation.DataProducts.Output_Correlated_[243].percentageWritten=0 Observation.DataProducts.Output_Correlated_[243].size=0 Observation.DataProducts.Output_Correlated_[243].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[243].stationSubband=399 +Observation.DataProducts.Output_Correlated_[243].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[243].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[243].subband=243 Observation.DataProducts.Output_Correlated_[24].SAP=0 Observation.DataProducts.Output_Correlated_[24].centralFrequency=35156250.000000 @@ -2254,6 +2574,8 @@ Observation.DataProducts.Output_Correlated_[24].percentageWritten=0 Observation.DataProducts.Output_Correlated_[24].size=0 Observation.DataProducts.Output_Correlated_[24].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[24].stationSubband=180 +Observation.DataProducts.Output_Correlated_[24].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[24].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[24].subband=24 Observation.DataProducts.Output_Correlated_[25].SAP=0 Observation.DataProducts.Output_Correlated_[25].centralFrequency=35351562.500000 @@ -2268,6 +2590,8 @@ Observation.DataProducts.Output_Correlated_[25].percentageWritten=0 Observation.DataProducts.Output_Correlated_[25].size=0 Observation.DataProducts.Output_Correlated_[25].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[25].stationSubband=181 +Observation.DataProducts.Output_Correlated_[25].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[25].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[25].subband=25 Observation.DataProducts.Output_Correlated_[26].SAP=0 Observation.DataProducts.Output_Correlated_[26].centralFrequency=35546875.000000 @@ -2282,6 +2606,8 @@ Observation.DataProducts.Output_Correlated_[26].percentageWritten=0 Observation.DataProducts.Output_Correlated_[26].size=0 Observation.DataProducts.Output_Correlated_[26].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[26].stationSubband=182 +Observation.DataProducts.Output_Correlated_[26].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[26].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[26].subband=26 Observation.DataProducts.Output_Correlated_[27].SAP=0 Observation.DataProducts.Output_Correlated_[27].centralFrequency=35742187.500000 @@ -2296,6 +2622,8 @@ Observation.DataProducts.Output_Correlated_[27].percentageWritten=0 Observation.DataProducts.Output_Correlated_[27].size=0 Observation.DataProducts.Output_Correlated_[27].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[27].stationSubband=183 +Observation.DataProducts.Output_Correlated_[27].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[27].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[27].subband=27 Observation.DataProducts.Output_Correlated_[28].SAP=0 Observation.DataProducts.Output_Correlated_[28].centralFrequency=35937500.000000 @@ -2310,6 +2638,8 @@ Observation.DataProducts.Output_Correlated_[28].percentageWritten=0 Observation.DataProducts.Output_Correlated_[28].size=0 Observation.DataProducts.Output_Correlated_[28].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[28].stationSubband=184 +Observation.DataProducts.Output_Correlated_[28].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[28].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[28].subband=28 Observation.DataProducts.Output_Correlated_[29].SAP=0 Observation.DataProducts.Output_Correlated_[29].centralFrequency=36132812.500000 @@ -2324,6 +2654,8 @@ Observation.DataProducts.Output_Correlated_[29].percentageWritten=0 Observation.DataProducts.Output_Correlated_[29].size=0 Observation.DataProducts.Output_Correlated_[29].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[29].stationSubband=185 +Observation.DataProducts.Output_Correlated_[29].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[29].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[29].subband=29 Observation.DataProducts.Output_Correlated_[2].SAP=0 Observation.DataProducts.Output_Correlated_[2].centralFrequency=30859375.000000 @@ -2338,6 +2670,8 @@ Observation.DataProducts.Output_Correlated_[2].percentageWritten=0 Observation.DataProducts.Output_Correlated_[2].size=0 Observation.DataProducts.Output_Correlated_[2].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[2].stationSubband=158 +Observation.DataProducts.Output_Correlated_[2].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[2].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[2].subband=2 Observation.DataProducts.Output_Correlated_[30].SAP=0 Observation.DataProducts.Output_Correlated_[30].centralFrequency=36328125.000000 @@ -2352,6 +2686,8 @@ Observation.DataProducts.Output_Correlated_[30].percentageWritten=0 Observation.DataProducts.Output_Correlated_[30].size=0 Observation.DataProducts.Output_Correlated_[30].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[30].stationSubband=186 +Observation.DataProducts.Output_Correlated_[30].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[30].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[30].subband=30 Observation.DataProducts.Output_Correlated_[31].SAP=0 Observation.DataProducts.Output_Correlated_[31].centralFrequency=36523437.500000 @@ -2366,6 +2702,8 @@ Observation.DataProducts.Output_Correlated_[31].percentageWritten=0 Observation.DataProducts.Output_Correlated_[31].size=0 Observation.DataProducts.Output_Correlated_[31].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[31].stationSubband=187 +Observation.DataProducts.Output_Correlated_[31].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[31].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[31].subband=31 Observation.DataProducts.Output_Correlated_[32].SAP=0 Observation.DataProducts.Output_Correlated_[32].centralFrequency=36718750.000000 @@ -2380,6 +2718,8 @@ Observation.DataProducts.Output_Correlated_[32].percentageWritten=0 Observation.DataProducts.Output_Correlated_[32].size=0 Observation.DataProducts.Output_Correlated_[32].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[32].stationSubband=188 +Observation.DataProducts.Output_Correlated_[32].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[32].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[32].subband=32 Observation.DataProducts.Output_Correlated_[33].SAP=0 Observation.DataProducts.Output_Correlated_[33].centralFrequency=36914062.500000 @@ -2394,6 +2734,8 @@ Observation.DataProducts.Output_Correlated_[33].percentageWritten=0 Observation.DataProducts.Output_Correlated_[33].size=0 Observation.DataProducts.Output_Correlated_[33].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[33].stationSubband=189 +Observation.DataProducts.Output_Correlated_[33].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[33].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[33].subband=33 Observation.DataProducts.Output_Correlated_[34].SAP=0 Observation.DataProducts.Output_Correlated_[34].centralFrequency=37109375.000000 @@ -2408,6 +2750,8 @@ Observation.DataProducts.Output_Correlated_[34].percentageWritten=0 Observation.DataProducts.Output_Correlated_[34].size=0 Observation.DataProducts.Output_Correlated_[34].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[34].stationSubband=190 +Observation.DataProducts.Output_Correlated_[34].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[34].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[34].subband=34 Observation.DataProducts.Output_Correlated_[35].SAP=0 Observation.DataProducts.Output_Correlated_[35].centralFrequency=37304687.500000 @@ -2422,6 +2766,8 @@ Observation.DataProducts.Output_Correlated_[35].percentageWritten=0 Observation.DataProducts.Output_Correlated_[35].size=0 Observation.DataProducts.Output_Correlated_[35].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[35].stationSubband=191 +Observation.DataProducts.Output_Correlated_[35].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[35].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[35].subband=35 Observation.DataProducts.Output_Correlated_[36].SAP=0 Observation.DataProducts.Output_Correlated_[36].centralFrequency=37500000.000000 @@ -2436,6 +2782,8 @@ Observation.DataProducts.Output_Correlated_[36].percentageWritten=0 Observation.DataProducts.Output_Correlated_[36].size=0 Observation.DataProducts.Output_Correlated_[36].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[36].stationSubband=192 +Observation.DataProducts.Output_Correlated_[36].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[36].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[36].subband=36 Observation.DataProducts.Output_Correlated_[37].SAP=0 Observation.DataProducts.Output_Correlated_[37].centralFrequency=37695312.500000 @@ -2450,6 +2798,8 @@ Observation.DataProducts.Output_Correlated_[37].percentageWritten=0 Observation.DataProducts.Output_Correlated_[37].size=0 Observation.DataProducts.Output_Correlated_[37].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[37].stationSubband=193 +Observation.DataProducts.Output_Correlated_[37].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[37].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[37].subband=37 Observation.DataProducts.Output_Correlated_[38].SAP=0 Observation.DataProducts.Output_Correlated_[38].centralFrequency=37890625.000000 @@ -2464,6 +2814,8 @@ Observation.DataProducts.Output_Correlated_[38].percentageWritten=0 Observation.DataProducts.Output_Correlated_[38].size=0 Observation.DataProducts.Output_Correlated_[38].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[38].stationSubband=194 +Observation.DataProducts.Output_Correlated_[38].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[38].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[38].subband=38 Observation.DataProducts.Output_Correlated_[39].SAP=0 Observation.DataProducts.Output_Correlated_[39].centralFrequency=38085937.500000 @@ -2478,6 +2830,8 @@ Observation.DataProducts.Output_Correlated_[39].percentageWritten=0 Observation.DataProducts.Output_Correlated_[39].size=0 Observation.DataProducts.Output_Correlated_[39].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[39].stationSubband=195 +Observation.DataProducts.Output_Correlated_[39].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[39].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[39].subband=39 Observation.DataProducts.Output_Correlated_[3].SAP=0 Observation.DataProducts.Output_Correlated_[3].centralFrequency=31054687.500000 @@ -2492,6 +2846,8 @@ Observation.DataProducts.Output_Correlated_[3].percentageWritten=0 Observation.DataProducts.Output_Correlated_[3].size=0 Observation.DataProducts.Output_Correlated_[3].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[3].stationSubband=159 +Observation.DataProducts.Output_Correlated_[3].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[3].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[3].subband=3 Observation.DataProducts.Output_Correlated_[40].SAP=0 Observation.DataProducts.Output_Correlated_[40].centralFrequency=38281250.000000 @@ -2506,6 +2862,8 @@ Observation.DataProducts.Output_Correlated_[40].percentageWritten=0 Observation.DataProducts.Output_Correlated_[40].size=0 Observation.DataProducts.Output_Correlated_[40].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[40].stationSubband=196 +Observation.DataProducts.Output_Correlated_[40].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[40].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[40].subband=40 Observation.DataProducts.Output_Correlated_[41].SAP=0 Observation.DataProducts.Output_Correlated_[41].centralFrequency=38476562.500000 @@ -2520,6 +2878,8 @@ Observation.DataProducts.Output_Correlated_[41].percentageWritten=0 Observation.DataProducts.Output_Correlated_[41].size=0 Observation.DataProducts.Output_Correlated_[41].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[41].stationSubband=197 +Observation.DataProducts.Output_Correlated_[41].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[41].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[41].subband=41 Observation.DataProducts.Output_Correlated_[42].SAP=0 Observation.DataProducts.Output_Correlated_[42].centralFrequency=38671875.000000 @@ -2534,6 +2894,8 @@ Observation.DataProducts.Output_Correlated_[42].percentageWritten=0 Observation.DataProducts.Output_Correlated_[42].size=0 Observation.DataProducts.Output_Correlated_[42].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[42].stationSubband=198 +Observation.DataProducts.Output_Correlated_[42].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[42].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[42].subband=42 Observation.DataProducts.Output_Correlated_[43].SAP=0 Observation.DataProducts.Output_Correlated_[43].centralFrequency=38867187.500000 @@ -2548,6 +2910,8 @@ Observation.DataProducts.Output_Correlated_[43].percentageWritten=0 Observation.DataProducts.Output_Correlated_[43].size=0 Observation.DataProducts.Output_Correlated_[43].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[43].stationSubband=199 +Observation.DataProducts.Output_Correlated_[43].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[43].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[43].subband=43 Observation.DataProducts.Output_Correlated_[44].SAP=0 Observation.DataProducts.Output_Correlated_[44].centralFrequency=39062500.000000 @@ -2562,6 +2926,8 @@ Observation.DataProducts.Output_Correlated_[44].percentageWritten=0 Observation.DataProducts.Output_Correlated_[44].size=0 Observation.DataProducts.Output_Correlated_[44].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[44].stationSubband=200 +Observation.DataProducts.Output_Correlated_[44].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[44].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[44].subband=44 Observation.DataProducts.Output_Correlated_[45].SAP=0 Observation.DataProducts.Output_Correlated_[45].centralFrequency=39257812.500000 @@ -2576,6 +2942,8 @@ Observation.DataProducts.Output_Correlated_[45].percentageWritten=0 Observation.DataProducts.Output_Correlated_[45].size=0 Observation.DataProducts.Output_Correlated_[45].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[45].stationSubband=201 +Observation.DataProducts.Output_Correlated_[45].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[45].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[45].subband=45 Observation.DataProducts.Output_Correlated_[46].SAP=0 Observation.DataProducts.Output_Correlated_[46].centralFrequency=39453125.000000 @@ -2590,6 +2958,8 @@ Observation.DataProducts.Output_Correlated_[46].percentageWritten=0 Observation.DataProducts.Output_Correlated_[46].size=0 Observation.DataProducts.Output_Correlated_[46].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[46].stationSubband=202 +Observation.DataProducts.Output_Correlated_[46].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[46].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[46].subband=46 Observation.DataProducts.Output_Correlated_[47].SAP=0 Observation.DataProducts.Output_Correlated_[47].centralFrequency=39648437.500000 @@ -2604,6 +2974,8 @@ Observation.DataProducts.Output_Correlated_[47].percentageWritten=0 Observation.DataProducts.Output_Correlated_[47].size=0 Observation.DataProducts.Output_Correlated_[47].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[47].stationSubband=203 +Observation.DataProducts.Output_Correlated_[47].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[47].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[47].subband=47 Observation.DataProducts.Output_Correlated_[48].SAP=0 Observation.DataProducts.Output_Correlated_[48].centralFrequency=39843750.000000 @@ -2618,6 +2990,8 @@ Observation.DataProducts.Output_Correlated_[48].percentageWritten=0 Observation.DataProducts.Output_Correlated_[48].size=0 Observation.DataProducts.Output_Correlated_[48].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[48].stationSubband=204 +Observation.DataProducts.Output_Correlated_[48].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[48].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[48].subband=48 Observation.DataProducts.Output_Correlated_[49].SAP=0 Observation.DataProducts.Output_Correlated_[49].centralFrequency=40039062.500000 @@ -2632,6 +3006,8 @@ Observation.DataProducts.Output_Correlated_[49].percentageWritten=0 Observation.DataProducts.Output_Correlated_[49].size=0 Observation.DataProducts.Output_Correlated_[49].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[49].stationSubband=205 +Observation.DataProducts.Output_Correlated_[49].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[49].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[49].subband=49 Observation.DataProducts.Output_Correlated_[4].SAP=0 Observation.DataProducts.Output_Correlated_[4].centralFrequency=31250000.000000 @@ -2646,6 +3022,8 @@ Observation.DataProducts.Output_Correlated_[4].percentageWritten=0 Observation.DataProducts.Output_Correlated_[4].size=0 Observation.DataProducts.Output_Correlated_[4].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[4].stationSubband=160 +Observation.DataProducts.Output_Correlated_[4].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[4].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[4].subband=4 Observation.DataProducts.Output_Correlated_[50].SAP=0 Observation.DataProducts.Output_Correlated_[50].centralFrequency=40234375.000000 @@ -2660,6 +3038,8 @@ Observation.DataProducts.Output_Correlated_[50].percentageWritten=0 Observation.DataProducts.Output_Correlated_[50].size=0 Observation.DataProducts.Output_Correlated_[50].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[50].stationSubband=206 +Observation.DataProducts.Output_Correlated_[50].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[50].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[50].subband=50 Observation.DataProducts.Output_Correlated_[51].SAP=0 Observation.DataProducts.Output_Correlated_[51].centralFrequency=40429687.500000 @@ -2674,6 +3054,8 @@ Observation.DataProducts.Output_Correlated_[51].percentageWritten=0 Observation.DataProducts.Output_Correlated_[51].size=0 Observation.DataProducts.Output_Correlated_[51].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[51].stationSubband=207 +Observation.DataProducts.Output_Correlated_[51].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[51].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[51].subband=51 Observation.DataProducts.Output_Correlated_[52].SAP=0 Observation.DataProducts.Output_Correlated_[52].centralFrequency=40625000.000000 @@ -2688,6 +3070,8 @@ Observation.DataProducts.Output_Correlated_[52].percentageWritten=0 Observation.DataProducts.Output_Correlated_[52].size=0 Observation.DataProducts.Output_Correlated_[52].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[52].stationSubband=208 +Observation.DataProducts.Output_Correlated_[52].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[52].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[52].subband=52 Observation.DataProducts.Output_Correlated_[53].SAP=0 Observation.DataProducts.Output_Correlated_[53].centralFrequency=40820312.500000 @@ -2702,6 +3086,8 @@ Observation.DataProducts.Output_Correlated_[53].percentageWritten=0 Observation.DataProducts.Output_Correlated_[53].size=0 Observation.DataProducts.Output_Correlated_[53].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[53].stationSubband=209 +Observation.DataProducts.Output_Correlated_[53].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[53].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[53].subband=53 Observation.DataProducts.Output_Correlated_[54].SAP=0 Observation.DataProducts.Output_Correlated_[54].centralFrequency=41015625.000000 @@ -2716,6 +3102,8 @@ Observation.DataProducts.Output_Correlated_[54].percentageWritten=0 Observation.DataProducts.Output_Correlated_[54].size=0 Observation.DataProducts.Output_Correlated_[54].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[54].stationSubband=210 +Observation.DataProducts.Output_Correlated_[54].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[54].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[54].subband=54 Observation.DataProducts.Output_Correlated_[55].SAP=0 Observation.DataProducts.Output_Correlated_[55].centralFrequency=41210937.500000 @@ -2730,6 +3118,8 @@ Observation.DataProducts.Output_Correlated_[55].percentageWritten=0 Observation.DataProducts.Output_Correlated_[55].size=0 Observation.DataProducts.Output_Correlated_[55].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[55].stationSubband=211 +Observation.DataProducts.Output_Correlated_[55].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[55].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[55].subband=55 Observation.DataProducts.Output_Correlated_[56].SAP=0 Observation.DataProducts.Output_Correlated_[56].centralFrequency=41406250.000000 @@ -2744,6 +3134,8 @@ Observation.DataProducts.Output_Correlated_[56].percentageWritten=0 Observation.DataProducts.Output_Correlated_[56].size=0 Observation.DataProducts.Output_Correlated_[56].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[56].stationSubband=212 +Observation.DataProducts.Output_Correlated_[56].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[56].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[56].subband=56 Observation.DataProducts.Output_Correlated_[57].SAP=0 Observation.DataProducts.Output_Correlated_[57].centralFrequency=41601562.500000 @@ -2758,6 +3150,8 @@ Observation.DataProducts.Output_Correlated_[57].percentageWritten=0 Observation.DataProducts.Output_Correlated_[57].size=0 Observation.DataProducts.Output_Correlated_[57].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[57].stationSubband=213 +Observation.DataProducts.Output_Correlated_[57].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[57].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[57].subband=57 Observation.DataProducts.Output_Correlated_[58].SAP=0 Observation.DataProducts.Output_Correlated_[58].centralFrequency=41796875.000000 @@ -2772,6 +3166,8 @@ Observation.DataProducts.Output_Correlated_[58].percentageWritten=0 Observation.DataProducts.Output_Correlated_[58].size=0 Observation.DataProducts.Output_Correlated_[58].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[58].stationSubband=214 +Observation.DataProducts.Output_Correlated_[58].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[58].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[58].subband=58 Observation.DataProducts.Output_Correlated_[59].SAP=0 Observation.DataProducts.Output_Correlated_[59].centralFrequency=41992187.500000 @@ -2786,6 +3182,8 @@ Observation.DataProducts.Output_Correlated_[59].percentageWritten=0 Observation.DataProducts.Output_Correlated_[59].size=0 Observation.DataProducts.Output_Correlated_[59].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[59].stationSubband=215 +Observation.DataProducts.Output_Correlated_[59].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[59].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[59].subband=59 Observation.DataProducts.Output_Correlated_[5].SAP=0 Observation.DataProducts.Output_Correlated_[5].centralFrequency=31445312.500000 @@ -2800,6 +3198,8 @@ Observation.DataProducts.Output_Correlated_[5].percentageWritten=0 Observation.DataProducts.Output_Correlated_[5].size=0 Observation.DataProducts.Output_Correlated_[5].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[5].stationSubband=161 +Observation.DataProducts.Output_Correlated_[5].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[5].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[5].subband=5 Observation.DataProducts.Output_Correlated_[60].SAP=0 Observation.DataProducts.Output_Correlated_[60].centralFrequency=42187500.000000 @@ -2814,6 +3214,8 @@ Observation.DataProducts.Output_Correlated_[60].percentageWritten=0 Observation.DataProducts.Output_Correlated_[60].size=0 Observation.DataProducts.Output_Correlated_[60].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[60].stationSubband=216 +Observation.DataProducts.Output_Correlated_[60].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[60].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[60].subband=60 Observation.DataProducts.Output_Correlated_[61].SAP=0 Observation.DataProducts.Output_Correlated_[61].centralFrequency=42382812.500000 @@ -2828,6 +3230,8 @@ Observation.DataProducts.Output_Correlated_[61].percentageWritten=0 Observation.DataProducts.Output_Correlated_[61].size=0 Observation.DataProducts.Output_Correlated_[61].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[61].stationSubband=217 +Observation.DataProducts.Output_Correlated_[61].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[61].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[61].subband=61 Observation.DataProducts.Output_Correlated_[62].SAP=0 Observation.DataProducts.Output_Correlated_[62].centralFrequency=42578125.000000 @@ -2842,6 +3246,8 @@ Observation.DataProducts.Output_Correlated_[62].percentageWritten=0 Observation.DataProducts.Output_Correlated_[62].size=0 Observation.DataProducts.Output_Correlated_[62].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[62].stationSubband=218 +Observation.DataProducts.Output_Correlated_[62].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[62].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[62].subband=62 Observation.DataProducts.Output_Correlated_[63].SAP=0 Observation.DataProducts.Output_Correlated_[63].centralFrequency=42773437.500000 @@ -2856,6 +3262,8 @@ Observation.DataProducts.Output_Correlated_[63].percentageWritten=0 Observation.DataProducts.Output_Correlated_[63].size=0 Observation.DataProducts.Output_Correlated_[63].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[63].stationSubband=219 +Observation.DataProducts.Output_Correlated_[63].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[63].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[63].subband=63 Observation.DataProducts.Output_Correlated_[64].SAP=0 Observation.DataProducts.Output_Correlated_[64].centralFrequency=42968750.000000 @@ -2870,6 +3278,8 @@ Observation.DataProducts.Output_Correlated_[64].percentageWritten=0 Observation.DataProducts.Output_Correlated_[64].size=0 Observation.DataProducts.Output_Correlated_[64].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[64].stationSubband=220 +Observation.DataProducts.Output_Correlated_[64].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[64].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[64].subband=64 Observation.DataProducts.Output_Correlated_[65].SAP=0 Observation.DataProducts.Output_Correlated_[65].centralFrequency=43164062.500000 @@ -2884,6 +3294,8 @@ Observation.DataProducts.Output_Correlated_[65].percentageWritten=0 Observation.DataProducts.Output_Correlated_[65].size=0 Observation.DataProducts.Output_Correlated_[65].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[65].stationSubband=221 +Observation.DataProducts.Output_Correlated_[65].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[65].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[65].subband=65 Observation.DataProducts.Output_Correlated_[66].SAP=0 Observation.DataProducts.Output_Correlated_[66].centralFrequency=43359375.000000 @@ -2898,6 +3310,8 @@ Observation.DataProducts.Output_Correlated_[66].percentageWritten=0 Observation.DataProducts.Output_Correlated_[66].size=0 Observation.DataProducts.Output_Correlated_[66].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[66].stationSubband=222 +Observation.DataProducts.Output_Correlated_[66].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[66].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[66].subband=66 Observation.DataProducts.Output_Correlated_[67].SAP=0 Observation.DataProducts.Output_Correlated_[67].centralFrequency=43554687.500000 @@ -2912,6 +3326,8 @@ Observation.DataProducts.Output_Correlated_[67].percentageWritten=0 Observation.DataProducts.Output_Correlated_[67].size=0 Observation.DataProducts.Output_Correlated_[67].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[67].stationSubband=223 +Observation.DataProducts.Output_Correlated_[67].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[67].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[67].subband=67 Observation.DataProducts.Output_Correlated_[68].SAP=0 Observation.DataProducts.Output_Correlated_[68].centralFrequency=43750000.000000 @@ -2926,6 +3342,8 @@ Observation.DataProducts.Output_Correlated_[68].percentageWritten=0 Observation.DataProducts.Output_Correlated_[68].size=0 Observation.DataProducts.Output_Correlated_[68].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[68].stationSubband=224 +Observation.DataProducts.Output_Correlated_[68].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[68].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[68].subband=68 Observation.DataProducts.Output_Correlated_[69].SAP=0 Observation.DataProducts.Output_Correlated_[69].centralFrequency=43945312.500000 @@ -2940,6 +3358,8 @@ Observation.DataProducts.Output_Correlated_[69].percentageWritten=0 Observation.DataProducts.Output_Correlated_[69].size=0 Observation.DataProducts.Output_Correlated_[69].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[69].stationSubband=225 +Observation.DataProducts.Output_Correlated_[69].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[69].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[69].subband=69 Observation.DataProducts.Output_Correlated_[6].SAP=0 Observation.DataProducts.Output_Correlated_[6].centralFrequency=31640625.000000 @@ -2954,6 +3374,8 @@ Observation.DataProducts.Output_Correlated_[6].percentageWritten=0 Observation.DataProducts.Output_Correlated_[6].size=0 Observation.DataProducts.Output_Correlated_[6].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[6].stationSubband=162 +Observation.DataProducts.Output_Correlated_[6].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[6].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[6].subband=6 Observation.DataProducts.Output_Correlated_[70].SAP=0 Observation.DataProducts.Output_Correlated_[70].centralFrequency=44140625.000000 @@ -2968,6 +3390,8 @@ Observation.DataProducts.Output_Correlated_[70].percentageWritten=0 Observation.DataProducts.Output_Correlated_[70].size=0 Observation.DataProducts.Output_Correlated_[70].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[70].stationSubband=226 +Observation.DataProducts.Output_Correlated_[70].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[70].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[70].subband=70 Observation.DataProducts.Output_Correlated_[71].SAP=0 Observation.DataProducts.Output_Correlated_[71].centralFrequency=44335937.500000 @@ -2982,6 +3406,8 @@ Observation.DataProducts.Output_Correlated_[71].percentageWritten=0 Observation.DataProducts.Output_Correlated_[71].size=0 Observation.DataProducts.Output_Correlated_[71].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[71].stationSubband=227 +Observation.DataProducts.Output_Correlated_[71].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[71].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[71].subband=71 Observation.DataProducts.Output_Correlated_[72].SAP=0 Observation.DataProducts.Output_Correlated_[72].centralFrequency=44531250.000000 @@ -2996,6 +3422,8 @@ Observation.DataProducts.Output_Correlated_[72].percentageWritten=0 Observation.DataProducts.Output_Correlated_[72].size=0 Observation.DataProducts.Output_Correlated_[72].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[72].stationSubband=228 +Observation.DataProducts.Output_Correlated_[72].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[72].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[72].subband=72 Observation.DataProducts.Output_Correlated_[73].SAP=0 Observation.DataProducts.Output_Correlated_[73].centralFrequency=44726562.500000 @@ -3010,6 +3438,8 @@ Observation.DataProducts.Output_Correlated_[73].percentageWritten=0 Observation.DataProducts.Output_Correlated_[73].size=0 Observation.DataProducts.Output_Correlated_[73].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[73].stationSubband=229 +Observation.DataProducts.Output_Correlated_[73].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[73].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[73].subband=73 Observation.DataProducts.Output_Correlated_[74].SAP=0 Observation.DataProducts.Output_Correlated_[74].centralFrequency=44921875.000000 @@ -3024,6 +3454,8 @@ Observation.DataProducts.Output_Correlated_[74].percentageWritten=0 Observation.DataProducts.Output_Correlated_[74].size=0 Observation.DataProducts.Output_Correlated_[74].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[74].stationSubband=230 +Observation.DataProducts.Output_Correlated_[74].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[74].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[74].subband=74 Observation.DataProducts.Output_Correlated_[75].SAP=0 Observation.DataProducts.Output_Correlated_[75].centralFrequency=45117187.500000 @@ -3038,6 +3470,8 @@ Observation.DataProducts.Output_Correlated_[75].percentageWritten=0 Observation.DataProducts.Output_Correlated_[75].size=0 Observation.DataProducts.Output_Correlated_[75].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[75].stationSubband=231 +Observation.DataProducts.Output_Correlated_[75].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[75].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[75].subband=75 Observation.DataProducts.Output_Correlated_[76].SAP=0 Observation.DataProducts.Output_Correlated_[76].centralFrequency=45312500.000000 @@ -3052,6 +3486,8 @@ Observation.DataProducts.Output_Correlated_[76].percentageWritten=0 Observation.DataProducts.Output_Correlated_[76].size=0 Observation.DataProducts.Output_Correlated_[76].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[76].stationSubband=232 +Observation.DataProducts.Output_Correlated_[76].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[76].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[76].subband=76 Observation.DataProducts.Output_Correlated_[77].SAP=0 Observation.DataProducts.Output_Correlated_[77].centralFrequency=45507812.500000 @@ -3066,6 +3502,8 @@ Observation.DataProducts.Output_Correlated_[77].percentageWritten=0 Observation.DataProducts.Output_Correlated_[77].size=0 Observation.DataProducts.Output_Correlated_[77].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[77].stationSubband=233 +Observation.DataProducts.Output_Correlated_[77].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[77].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[77].subband=77 Observation.DataProducts.Output_Correlated_[78].SAP=0 Observation.DataProducts.Output_Correlated_[78].centralFrequency=45703125.000000 @@ -3080,6 +3518,8 @@ Observation.DataProducts.Output_Correlated_[78].percentageWritten=0 Observation.DataProducts.Output_Correlated_[78].size=0 Observation.DataProducts.Output_Correlated_[78].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[78].stationSubband=234 +Observation.DataProducts.Output_Correlated_[78].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[78].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[78].subband=78 Observation.DataProducts.Output_Correlated_[79].SAP=0 Observation.DataProducts.Output_Correlated_[79].centralFrequency=45898437.500000 @@ -3094,6 +3534,8 @@ Observation.DataProducts.Output_Correlated_[79].percentageWritten=0 Observation.DataProducts.Output_Correlated_[79].size=0 Observation.DataProducts.Output_Correlated_[79].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[79].stationSubband=235 +Observation.DataProducts.Output_Correlated_[79].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[79].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[79].subband=79 Observation.DataProducts.Output_Correlated_[7].SAP=0 Observation.DataProducts.Output_Correlated_[7].centralFrequency=31835937.500000 @@ -3108,6 +3550,8 @@ Observation.DataProducts.Output_Correlated_[7].percentageWritten=0 Observation.DataProducts.Output_Correlated_[7].size=0 Observation.DataProducts.Output_Correlated_[7].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[7].stationSubband=163 +Observation.DataProducts.Output_Correlated_[7].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[7].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[7].subband=7 Observation.DataProducts.Output_Correlated_[80].SAP=0 Observation.DataProducts.Output_Correlated_[80].centralFrequency=46093750.000000 @@ -3122,6 +3566,8 @@ Observation.DataProducts.Output_Correlated_[80].percentageWritten=0 Observation.DataProducts.Output_Correlated_[80].size=0 Observation.DataProducts.Output_Correlated_[80].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[80].stationSubband=236 +Observation.DataProducts.Output_Correlated_[80].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[80].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[80].subband=80 Observation.DataProducts.Output_Correlated_[81].SAP=0 Observation.DataProducts.Output_Correlated_[81].centralFrequency=46289062.500000 @@ -3136,6 +3582,8 @@ Observation.DataProducts.Output_Correlated_[81].percentageWritten=0 Observation.DataProducts.Output_Correlated_[81].size=0 Observation.DataProducts.Output_Correlated_[81].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[81].stationSubband=237 +Observation.DataProducts.Output_Correlated_[81].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[81].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[81].subband=81 Observation.DataProducts.Output_Correlated_[82].SAP=0 Observation.DataProducts.Output_Correlated_[82].centralFrequency=46484375.000000 @@ -3150,6 +3598,8 @@ Observation.DataProducts.Output_Correlated_[82].percentageWritten=0 Observation.DataProducts.Output_Correlated_[82].size=0 Observation.DataProducts.Output_Correlated_[82].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[82].stationSubband=238 +Observation.DataProducts.Output_Correlated_[82].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[82].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[82].subband=82 Observation.DataProducts.Output_Correlated_[83].SAP=0 Observation.DataProducts.Output_Correlated_[83].centralFrequency=46679687.500000 @@ -3164,6 +3614,8 @@ Observation.DataProducts.Output_Correlated_[83].percentageWritten=0 Observation.DataProducts.Output_Correlated_[83].size=0 Observation.DataProducts.Output_Correlated_[83].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[83].stationSubband=239 +Observation.DataProducts.Output_Correlated_[83].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[83].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[83].subband=83 Observation.DataProducts.Output_Correlated_[84].SAP=0 Observation.DataProducts.Output_Correlated_[84].centralFrequency=46875000.000000 @@ -3178,6 +3630,8 @@ Observation.DataProducts.Output_Correlated_[84].percentageWritten=0 Observation.DataProducts.Output_Correlated_[84].size=0 Observation.DataProducts.Output_Correlated_[84].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[84].stationSubband=240 +Observation.DataProducts.Output_Correlated_[84].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[84].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[84].subband=84 Observation.DataProducts.Output_Correlated_[85].SAP=0 Observation.DataProducts.Output_Correlated_[85].centralFrequency=47070312.500000 @@ -3192,6 +3646,8 @@ Observation.DataProducts.Output_Correlated_[85].percentageWritten=0 Observation.DataProducts.Output_Correlated_[85].size=0 Observation.DataProducts.Output_Correlated_[85].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[85].stationSubband=241 +Observation.DataProducts.Output_Correlated_[85].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[85].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[85].subband=85 Observation.DataProducts.Output_Correlated_[86].SAP=0 Observation.DataProducts.Output_Correlated_[86].centralFrequency=47265625.000000 @@ -3206,6 +3662,8 @@ Observation.DataProducts.Output_Correlated_[86].percentageWritten=0 Observation.DataProducts.Output_Correlated_[86].size=0 Observation.DataProducts.Output_Correlated_[86].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[86].stationSubband=242 +Observation.DataProducts.Output_Correlated_[86].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[86].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[86].subband=86 Observation.DataProducts.Output_Correlated_[87].SAP=0 Observation.DataProducts.Output_Correlated_[87].centralFrequency=47460937.500000 @@ -3220,6 +3678,8 @@ Observation.DataProducts.Output_Correlated_[87].percentageWritten=0 Observation.DataProducts.Output_Correlated_[87].size=0 Observation.DataProducts.Output_Correlated_[87].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[87].stationSubband=243 +Observation.DataProducts.Output_Correlated_[87].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[87].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[87].subband=87 Observation.DataProducts.Output_Correlated_[88].SAP=0 Observation.DataProducts.Output_Correlated_[88].centralFrequency=47656250.000000 @@ -3234,6 +3694,8 @@ Observation.DataProducts.Output_Correlated_[88].percentageWritten=0 Observation.DataProducts.Output_Correlated_[88].size=0 Observation.DataProducts.Output_Correlated_[88].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[88].stationSubband=244 +Observation.DataProducts.Output_Correlated_[88].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[88].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[88].subband=88 Observation.DataProducts.Output_Correlated_[89].SAP=0 Observation.DataProducts.Output_Correlated_[89].centralFrequency=47851562.500000 @@ -3248,6 +3710,8 @@ Observation.DataProducts.Output_Correlated_[89].percentageWritten=0 Observation.DataProducts.Output_Correlated_[89].size=0 Observation.DataProducts.Output_Correlated_[89].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[89].stationSubband=245 +Observation.DataProducts.Output_Correlated_[89].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[89].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[89].subband=89 Observation.DataProducts.Output_Correlated_[8].SAP=0 Observation.DataProducts.Output_Correlated_[8].centralFrequency=32031250.000000 @@ -3262,6 +3726,8 @@ Observation.DataProducts.Output_Correlated_[8].percentageWritten=0 Observation.DataProducts.Output_Correlated_[8].size=0 Observation.DataProducts.Output_Correlated_[8].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[8].stationSubband=164 +Observation.DataProducts.Output_Correlated_[8].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[8].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[8].subband=8 Observation.DataProducts.Output_Correlated_[90].SAP=0 Observation.DataProducts.Output_Correlated_[90].centralFrequency=48046875.000000 @@ -3276,6 +3742,8 @@ Observation.DataProducts.Output_Correlated_[90].percentageWritten=0 Observation.DataProducts.Output_Correlated_[90].size=0 Observation.DataProducts.Output_Correlated_[90].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[90].stationSubband=246 +Observation.DataProducts.Output_Correlated_[90].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[90].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[90].subband=90 Observation.DataProducts.Output_Correlated_[91].SAP=0 Observation.DataProducts.Output_Correlated_[91].centralFrequency=48242187.500000 @@ -3290,6 +3758,8 @@ Observation.DataProducts.Output_Correlated_[91].percentageWritten=0 Observation.DataProducts.Output_Correlated_[91].size=0 Observation.DataProducts.Output_Correlated_[91].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[91].stationSubband=247 +Observation.DataProducts.Output_Correlated_[91].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[91].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[91].subband=91 Observation.DataProducts.Output_Correlated_[92].SAP=0 Observation.DataProducts.Output_Correlated_[92].centralFrequency=48437500.000000 @@ -3304,6 +3774,8 @@ Observation.DataProducts.Output_Correlated_[92].percentageWritten=0 Observation.DataProducts.Output_Correlated_[92].size=0 Observation.DataProducts.Output_Correlated_[92].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[92].stationSubband=248 +Observation.DataProducts.Output_Correlated_[92].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[92].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[92].subband=92 Observation.DataProducts.Output_Correlated_[93].SAP=0 Observation.DataProducts.Output_Correlated_[93].centralFrequency=48632812.500000 @@ -3318,6 +3790,8 @@ Observation.DataProducts.Output_Correlated_[93].percentageWritten=0 Observation.DataProducts.Output_Correlated_[93].size=0 Observation.DataProducts.Output_Correlated_[93].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[93].stationSubband=249 +Observation.DataProducts.Output_Correlated_[93].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[93].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[93].subband=93 Observation.DataProducts.Output_Correlated_[94].SAP=0 Observation.DataProducts.Output_Correlated_[94].centralFrequency=48828125.000000 @@ -3332,6 +3806,8 @@ Observation.DataProducts.Output_Correlated_[94].percentageWritten=0 Observation.DataProducts.Output_Correlated_[94].size=0 Observation.DataProducts.Output_Correlated_[94].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[94].stationSubband=250 +Observation.DataProducts.Output_Correlated_[94].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[94].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[94].subband=94 Observation.DataProducts.Output_Correlated_[95].SAP=0 Observation.DataProducts.Output_Correlated_[95].centralFrequency=49023437.500000 @@ -3346,6 +3822,8 @@ Observation.DataProducts.Output_Correlated_[95].percentageWritten=0 Observation.DataProducts.Output_Correlated_[95].size=0 Observation.DataProducts.Output_Correlated_[95].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[95].stationSubband=251 +Observation.DataProducts.Output_Correlated_[95].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[95].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[95].subband=95 Observation.DataProducts.Output_Correlated_[96].SAP=0 Observation.DataProducts.Output_Correlated_[96].centralFrequency=49218750.000000 @@ -3360,6 +3838,8 @@ Observation.DataProducts.Output_Correlated_[96].percentageWritten=0 Observation.DataProducts.Output_Correlated_[96].size=0 Observation.DataProducts.Output_Correlated_[96].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[96].stationSubband=252 +Observation.DataProducts.Output_Correlated_[96].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[96].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[96].subband=96 Observation.DataProducts.Output_Correlated_[97].SAP=0 Observation.DataProducts.Output_Correlated_[97].centralFrequency=49414062.500000 @@ -3374,6 +3854,8 @@ Observation.DataProducts.Output_Correlated_[97].percentageWritten=0 Observation.DataProducts.Output_Correlated_[97].size=0 Observation.DataProducts.Output_Correlated_[97].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[97].stationSubband=253 +Observation.DataProducts.Output_Correlated_[97].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[97].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[97].subband=97 Observation.DataProducts.Output_Correlated_[98].SAP=0 Observation.DataProducts.Output_Correlated_[98].centralFrequency=49609375.000000 @@ -3388,6 +3870,8 @@ Observation.DataProducts.Output_Correlated_[98].percentageWritten=0 Observation.DataProducts.Output_Correlated_[98].size=0 Observation.DataProducts.Output_Correlated_[98].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[98].stationSubband=254 +Observation.DataProducts.Output_Correlated_[98].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[98].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[98].subband=98 Observation.DataProducts.Output_Correlated_[99].SAP=0 Observation.DataProducts.Output_Correlated_[99].centralFrequency=49804687.500000 @@ -3402,6 +3886,8 @@ Observation.DataProducts.Output_Correlated_[99].percentageWritten=0 Observation.DataProducts.Output_Correlated_[99].size=0 Observation.DataProducts.Output_Correlated_[99].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[99].stationSubband=255 +Observation.DataProducts.Output_Correlated_[99].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[99].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[99].subband=99 Observation.DataProducts.Output_Correlated_[9].SAP=0 Observation.DataProducts.Output_Correlated_[9].centralFrequency=32226562.500000 @@ -3416,7 +3902,10 @@ Observation.DataProducts.Output_Correlated_[9].percentageWritten=0 Observation.DataProducts.Output_Correlated_[9].size=0 Observation.DataProducts.Output_Correlated_[9].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[9].stationSubband=165 +Observation.DataProducts.Output_Correlated_[9].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[9].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[9].subband=9 Observation.DataProducts.nrOfOutput_Beamformed_=0 Observation.DataProducts.nrOfOutput_Correlated_=244 _isCobalt=T +feedback_version=03.01.00 diff --git a/RTCP/Cobalt/CoInterface/test/tLTAFeedback.in_reference/Observation220134_feedback b/RTCP/Cobalt/CoInterface/test/tLTAFeedback.in_reference/Observation220134_feedback index 939c1850e87629a976c8e6dc03d858c6ec502503..8426e8f485fd18cd4a14b5ef6c4784f93b7e2c32 100644 --- a/RTCP/Cobalt/CoInterface/test/tLTAFeedback.in_reference/Observation220134_feedback +++ b/RTCP/Cobalt/CoInterface/test/tLTAFeedback.in_reference/Observation220134_feedback @@ -14,6 +14,8 @@ Observation.DataProducts.Output_Correlated_[0].percentageWritten=0 Observation.DataProducts.Output_Correlated_[0].size=0 Observation.DataProducts.Output_Correlated_[0].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[0].stationSubband=156 +Observation.DataProducts.Output_Correlated_[0].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[0].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[0].subband=0 Observation.DataProducts.Output_Correlated_[100].SAP=0 Observation.DataProducts.Output_Correlated_[100].centralFrequency=50000000.000000 @@ -28,6 +30,8 @@ Observation.DataProducts.Output_Correlated_[100].percentageWritten=0 Observation.DataProducts.Output_Correlated_[100].size=0 Observation.DataProducts.Output_Correlated_[100].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[100].stationSubband=256 +Observation.DataProducts.Output_Correlated_[100].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[100].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[100].subband=100 Observation.DataProducts.Output_Correlated_[101].SAP=0 Observation.DataProducts.Output_Correlated_[101].centralFrequency=50195312.500000 @@ -42,6 +46,8 @@ Observation.DataProducts.Output_Correlated_[101].percentageWritten=0 Observation.DataProducts.Output_Correlated_[101].size=0 Observation.DataProducts.Output_Correlated_[101].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[101].stationSubband=257 +Observation.DataProducts.Output_Correlated_[101].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[101].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[101].subband=101 Observation.DataProducts.Output_Correlated_[102].SAP=0 Observation.DataProducts.Output_Correlated_[102].centralFrequency=50390625.000000 @@ -56,6 +62,8 @@ Observation.DataProducts.Output_Correlated_[102].percentageWritten=0 Observation.DataProducts.Output_Correlated_[102].size=0 Observation.DataProducts.Output_Correlated_[102].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[102].stationSubband=258 +Observation.DataProducts.Output_Correlated_[102].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[102].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[102].subband=102 Observation.DataProducts.Output_Correlated_[103].SAP=0 Observation.DataProducts.Output_Correlated_[103].centralFrequency=50585937.500000 @@ -70,6 +78,8 @@ Observation.DataProducts.Output_Correlated_[103].percentageWritten=0 Observation.DataProducts.Output_Correlated_[103].size=0 Observation.DataProducts.Output_Correlated_[103].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[103].stationSubband=259 +Observation.DataProducts.Output_Correlated_[103].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[103].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[103].subband=103 Observation.DataProducts.Output_Correlated_[104].SAP=0 Observation.DataProducts.Output_Correlated_[104].centralFrequency=50781250.000000 @@ -84,6 +94,8 @@ Observation.DataProducts.Output_Correlated_[104].percentageWritten=0 Observation.DataProducts.Output_Correlated_[104].size=0 Observation.DataProducts.Output_Correlated_[104].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[104].stationSubband=260 +Observation.DataProducts.Output_Correlated_[104].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[104].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[104].subband=104 Observation.DataProducts.Output_Correlated_[105].SAP=0 Observation.DataProducts.Output_Correlated_[105].centralFrequency=50976562.500000 @@ -98,6 +110,8 @@ Observation.DataProducts.Output_Correlated_[105].percentageWritten=0 Observation.DataProducts.Output_Correlated_[105].size=0 Observation.DataProducts.Output_Correlated_[105].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[105].stationSubband=261 +Observation.DataProducts.Output_Correlated_[105].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[105].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[105].subband=105 Observation.DataProducts.Output_Correlated_[106].SAP=0 Observation.DataProducts.Output_Correlated_[106].centralFrequency=51171875.000000 @@ -112,6 +126,8 @@ Observation.DataProducts.Output_Correlated_[106].percentageWritten=0 Observation.DataProducts.Output_Correlated_[106].size=0 Observation.DataProducts.Output_Correlated_[106].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[106].stationSubband=262 +Observation.DataProducts.Output_Correlated_[106].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[106].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[106].subband=106 Observation.DataProducts.Output_Correlated_[107].SAP=0 Observation.DataProducts.Output_Correlated_[107].centralFrequency=51367187.500000 @@ -126,6 +142,8 @@ Observation.DataProducts.Output_Correlated_[107].percentageWritten=0 Observation.DataProducts.Output_Correlated_[107].size=0 Observation.DataProducts.Output_Correlated_[107].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[107].stationSubband=263 +Observation.DataProducts.Output_Correlated_[107].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[107].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[107].subband=107 Observation.DataProducts.Output_Correlated_[108].SAP=0 Observation.DataProducts.Output_Correlated_[108].centralFrequency=51562500.000000 @@ -140,6 +158,8 @@ Observation.DataProducts.Output_Correlated_[108].percentageWritten=0 Observation.DataProducts.Output_Correlated_[108].size=0 Observation.DataProducts.Output_Correlated_[108].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[108].stationSubband=264 +Observation.DataProducts.Output_Correlated_[108].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[108].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[108].subband=108 Observation.DataProducts.Output_Correlated_[109].SAP=0 Observation.DataProducts.Output_Correlated_[109].centralFrequency=51757812.500000 @@ -154,6 +174,8 @@ Observation.DataProducts.Output_Correlated_[109].percentageWritten=0 Observation.DataProducts.Output_Correlated_[109].size=0 Observation.DataProducts.Output_Correlated_[109].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[109].stationSubband=265 +Observation.DataProducts.Output_Correlated_[109].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[109].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[109].subband=109 Observation.DataProducts.Output_Correlated_[10].SAP=0 Observation.DataProducts.Output_Correlated_[10].centralFrequency=32421875.000000 @@ -168,6 +190,8 @@ Observation.DataProducts.Output_Correlated_[10].percentageWritten=0 Observation.DataProducts.Output_Correlated_[10].size=0 Observation.DataProducts.Output_Correlated_[10].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[10].stationSubband=166 +Observation.DataProducts.Output_Correlated_[10].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[10].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[10].subband=10 Observation.DataProducts.Output_Correlated_[110].SAP=0 Observation.DataProducts.Output_Correlated_[110].centralFrequency=51953125.000000 @@ -182,6 +206,8 @@ Observation.DataProducts.Output_Correlated_[110].percentageWritten=0 Observation.DataProducts.Output_Correlated_[110].size=0 Observation.DataProducts.Output_Correlated_[110].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[110].stationSubband=266 +Observation.DataProducts.Output_Correlated_[110].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[110].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[110].subband=110 Observation.DataProducts.Output_Correlated_[111].SAP=0 Observation.DataProducts.Output_Correlated_[111].centralFrequency=52148437.500000 @@ -196,6 +222,8 @@ Observation.DataProducts.Output_Correlated_[111].percentageWritten=0 Observation.DataProducts.Output_Correlated_[111].size=0 Observation.DataProducts.Output_Correlated_[111].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[111].stationSubband=267 +Observation.DataProducts.Output_Correlated_[111].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[111].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[111].subband=111 Observation.DataProducts.Output_Correlated_[112].SAP=0 Observation.DataProducts.Output_Correlated_[112].centralFrequency=52343750.000000 @@ -210,6 +238,8 @@ Observation.DataProducts.Output_Correlated_[112].percentageWritten=0 Observation.DataProducts.Output_Correlated_[112].size=0 Observation.DataProducts.Output_Correlated_[112].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[112].stationSubband=268 +Observation.DataProducts.Output_Correlated_[112].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[112].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[112].subband=112 Observation.DataProducts.Output_Correlated_[113].SAP=0 Observation.DataProducts.Output_Correlated_[113].centralFrequency=52539062.500000 @@ -224,6 +254,8 @@ Observation.DataProducts.Output_Correlated_[113].percentageWritten=0 Observation.DataProducts.Output_Correlated_[113].size=0 Observation.DataProducts.Output_Correlated_[113].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[113].stationSubband=269 +Observation.DataProducts.Output_Correlated_[113].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[113].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[113].subband=113 Observation.DataProducts.Output_Correlated_[114].SAP=0 Observation.DataProducts.Output_Correlated_[114].centralFrequency=52734375.000000 @@ -238,6 +270,8 @@ Observation.DataProducts.Output_Correlated_[114].percentageWritten=0 Observation.DataProducts.Output_Correlated_[114].size=0 Observation.DataProducts.Output_Correlated_[114].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[114].stationSubband=270 +Observation.DataProducts.Output_Correlated_[114].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[114].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[114].subband=114 Observation.DataProducts.Output_Correlated_[115].SAP=0 Observation.DataProducts.Output_Correlated_[115].centralFrequency=52929687.500000 @@ -252,6 +286,8 @@ Observation.DataProducts.Output_Correlated_[115].percentageWritten=0 Observation.DataProducts.Output_Correlated_[115].size=0 Observation.DataProducts.Output_Correlated_[115].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[115].stationSubband=271 +Observation.DataProducts.Output_Correlated_[115].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[115].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[115].subband=115 Observation.DataProducts.Output_Correlated_[116].SAP=0 Observation.DataProducts.Output_Correlated_[116].centralFrequency=53125000.000000 @@ -266,6 +302,8 @@ Observation.DataProducts.Output_Correlated_[116].percentageWritten=0 Observation.DataProducts.Output_Correlated_[116].size=0 Observation.DataProducts.Output_Correlated_[116].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[116].stationSubband=272 +Observation.DataProducts.Output_Correlated_[116].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[116].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[116].subband=116 Observation.DataProducts.Output_Correlated_[117].SAP=0 Observation.DataProducts.Output_Correlated_[117].centralFrequency=53320312.500000 @@ -280,6 +318,8 @@ Observation.DataProducts.Output_Correlated_[117].percentageWritten=0 Observation.DataProducts.Output_Correlated_[117].size=0 Observation.DataProducts.Output_Correlated_[117].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[117].stationSubband=273 +Observation.DataProducts.Output_Correlated_[117].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[117].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[117].subband=117 Observation.DataProducts.Output_Correlated_[118].SAP=0 Observation.DataProducts.Output_Correlated_[118].centralFrequency=53515625.000000 @@ -294,6 +334,8 @@ Observation.DataProducts.Output_Correlated_[118].percentageWritten=0 Observation.DataProducts.Output_Correlated_[118].size=0 Observation.DataProducts.Output_Correlated_[118].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[118].stationSubband=274 +Observation.DataProducts.Output_Correlated_[118].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[118].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[118].subband=118 Observation.DataProducts.Output_Correlated_[119].SAP=0 Observation.DataProducts.Output_Correlated_[119].centralFrequency=53710937.500000 @@ -308,6 +350,8 @@ Observation.DataProducts.Output_Correlated_[119].percentageWritten=0 Observation.DataProducts.Output_Correlated_[119].size=0 Observation.DataProducts.Output_Correlated_[119].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[119].stationSubband=275 +Observation.DataProducts.Output_Correlated_[119].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[119].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[119].subband=119 Observation.DataProducts.Output_Correlated_[11].SAP=0 Observation.DataProducts.Output_Correlated_[11].centralFrequency=32617187.500000 @@ -322,6 +366,8 @@ Observation.DataProducts.Output_Correlated_[11].percentageWritten=0 Observation.DataProducts.Output_Correlated_[11].size=0 Observation.DataProducts.Output_Correlated_[11].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[11].stationSubband=167 +Observation.DataProducts.Output_Correlated_[11].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[11].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[11].subband=11 Observation.DataProducts.Output_Correlated_[120].SAP=0 Observation.DataProducts.Output_Correlated_[120].centralFrequency=53906250.000000 @@ -336,6 +382,8 @@ Observation.DataProducts.Output_Correlated_[120].percentageWritten=0 Observation.DataProducts.Output_Correlated_[120].size=0 Observation.DataProducts.Output_Correlated_[120].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[120].stationSubband=276 +Observation.DataProducts.Output_Correlated_[120].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[120].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[120].subband=120 Observation.DataProducts.Output_Correlated_[121].SAP=0 Observation.DataProducts.Output_Correlated_[121].centralFrequency=54101562.500000 @@ -350,6 +398,8 @@ Observation.DataProducts.Output_Correlated_[121].percentageWritten=0 Observation.DataProducts.Output_Correlated_[121].size=0 Observation.DataProducts.Output_Correlated_[121].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[121].stationSubband=277 +Observation.DataProducts.Output_Correlated_[121].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[121].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[121].subband=121 Observation.DataProducts.Output_Correlated_[122].SAP=0 Observation.DataProducts.Output_Correlated_[122].centralFrequency=54296875.000000 @@ -364,6 +414,8 @@ Observation.DataProducts.Output_Correlated_[122].percentageWritten=0 Observation.DataProducts.Output_Correlated_[122].size=0 Observation.DataProducts.Output_Correlated_[122].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[122].stationSubband=278 +Observation.DataProducts.Output_Correlated_[122].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[122].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[122].subband=122 Observation.DataProducts.Output_Correlated_[123].SAP=0 Observation.DataProducts.Output_Correlated_[123].centralFrequency=54492187.500000 @@ -378,6 +430,8 @@ Observation.DataProducts.Output_Correlated_[123].percentageWritten=0 Observation.DataProducts.Output_Correlated_[123].size=0 Observation.DataProducts.Output_Correlated_[123].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[123].stationSubband=279 +Observation.DataProducts.Output_Correlated_[123].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[123].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[123].subband=123 Observation.DataProducts.Output_Correlated_[124].SAP=0 Observation.DataProducts.Output_Correlated_[124].centralFrequency=54687500.000000 @@ -392,6 +446,8 @@ Observation.DataProducts.Output_Correlated_[124].percentageWritten=0 Observation.DataProducts.Output_Correlated_[124].size=0 Observation.DataProducts.Output_Correlated_[124].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[124].stationSubband=280 +Observation.DataProducts.Output_Correlated_[124].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[124].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[124].subband=124 Observation.DataProducts.Output_Correlated_[125].SAP=0 Observation.DataProducts.Output_Correlated_[125].centralFrequency=54882812.500000 @@ -406,6 +462,8 @@ Observation.DataProducts.Output_Correlated_[125].percentageWritten=0 Observation.DataProducts.Output_Correlated_[125].size=0 Observation.DataProducts.Output_Correlated_[125].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[125].stationSubband=281 +Observation.DataProducts.Output_Correlated_[125].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[125].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[125].subband=125 Observation.DataProducts.Output_Correlated_[126].SAP=0 Observation.DataProducts.Output_Correlated_[126].centralFrequency=55078125.000000 @@ -420,6 +478,8 @@ Observation.DataProducts.Output_Correlated_[126].percentageWritten=0 Observation.DataProducts.Output_Correlated_[126].size=0 Observation.DataProducts.Output_Correlated_[126].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[126].stationSubband=282 +Observation.DataProducts.Output_Correlated_[126].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[126].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[126].subband=126 Observation.DataProducts.Output_Correlated_[127].SAP=0 Observation.DataProducts.Output_Correlated_[127].centralFrequency=55273437.500000 @@ -434,6 +494,8 @@ Observation.DataProducts.Output_Correlated_[127].percentageWritten=0 Observation.DataProducts.Output_Correlated_[127].size=0 Observation.DataProducts.Output_Correlated_[127].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[127].stationSubband=283 +Observation.DataProducts.Output_Correlated_[127].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[127].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[127].subband=127 Observation.DataProducts.Output_Correlated_[128].SAP=0 Observation.DataProducts.Output_Correlated_[128].centralFrequency=55468750.000000 @@ -448,6 +510,8 @@ Observation.DataProducts.Output_Correlated_[128].percentageWritten=0 Observation.DataProducts.Output_Correlated_[128].size=0 Observation.DataProducts.Output_Correlated_[128].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[128].stationSubband=284 +Observation.DataProducts.Output_Correlated_[128].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[128].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[128].subband=128 Observation.DataProducts.Output_Correlated_[129].SAP=0 Observation.DataProducts.Output_Correlated_[129].centralFrequency=55664062.500000 @@ -462,6 +526,8 @@ Observation.DataProducts.Output_Correlated_[129].percentageWritten=0 Observation.DataProducts.Output_Correlated_[129].size=0 Observation.DataProducts.Output_Correlated_[129].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[129].stationSubband=285 +Observation.DataProducts.Output_Correlated_[129].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[129].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[129].subband=129 Observation.DataProducts.Output_Correlated_[12].SAP=0 Observation.DataProducts.Output_Correlated_[12].centralFrequency=32812500.000000 @@ -476,6 +542,8 @@ Observation.DataProducts.Output_Correlated_[12].percentageWritten=0 Observation.DataProducts.Output_Correlated_[12].size=0 Observation.DataProducts.Output_Correlated_[12].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[12].stationSubband=168 +Observation.DataProducts.Output_Correlated_[12].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[12].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[12].subband=12 Observation.DataProducts.Output_Correlated_[130].SAP=0 Observation.DataProducts.Output_Correlated_[130].centralFrequency=55859375.000000 @@ -490,6 +558,8 @@ Observation.DataProducts.Output_Correlated_[130].percentageWritten=0 Observation.DataProducts.Output_Correlated_[130].size=0 Observation.DataProducts.Output_Correlated_[130].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[130].stationSubband=286 +Observation.DataProducts.Output_Correlated_[130].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[130].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[130].subband=130 Observation.DataProducts.Output_Correlated_[131].SAP=0 Observation.DataProducts.Output_Correlated_[131].centralFrequency=56054687.500000 @@ -504,6 +574,8 @@ Observation.DataProducts.Output_Correlated_[131].percentageWritten=0 Observation.DataProducts.Output_Correlated_[131].size=0 Observation.DataProducts.Output_Correlated_[131].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[131].stationSubband=287 +Observation.DataProducts.Output_Correlated_[131].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[131].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[131].subband=131 Observation.DataProducts.Output_Correlated_[132].SAP=0 Observation.DataProducts.Output_Correlated_[132].centralFrequency=56250000.000000 @@ -518,6 +590,8 @@ Observation.DataProducts.Output_Correlated_[132].percentageWritten=0 Observation.DataProducts.Output_Correlated_[132].size=0 Observation.DataProducts.Output_Correlated_[132].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[132].stationSubband=288 +Observation.DataProducts.Output_Correlated_[132].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[132].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[132].subband=132 Observation.DataProducts.Output_Correlated_[133].SAP=0 Observation.DataProducts.Output_Correlated_[133].centralFrequency=56445312.500000 @@ -532,6 +606,8 @@ Observation.DataProducts.Output_Correlated_[133].percentageWritten=0 Observation.DataProducts.Output_Correlated_[133].size=0 Observation.DataProducts.Output_Correlated_[133].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[133].stationSubband=289 +Observation.DataProducts.Output_Correlated_[133].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[133].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[133].subband=133 Observation.DataProducts.Output_Correlated_[134].SAP=0 Observation.DataProducts.Output_Correlated_[134].centralFrequency=56640625.000000 @@ -546,6 +622,8 @@ Observation.DataProducts.Output_Correlated_[134].percentageWritten=0 Observation.DataProducts.Output_Correlated_[134].size=0 Observation.DataProducts.Output_Correlated_[134].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[134].stationSubband=290 +Observation.DataProducts.Output_Correlated_[134].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[134].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[134].subband=134 Observation.DataProducts.Output_Correlated_[135].SAP=0 Observation.DataProducts.Output_Correlated_[135].centralFrequency=56835937.500000 @@ -560,6 +638,8 @@ Observation.DataProducts.Output_Correlated_[135].percentageWritten=0 Observation.DataProducts.Output_Correlated_[135].size=0 Observation.DataProducts.Output_Correlated_[135].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[135].stationSubband=291 +Observation.DataProducts.Output_Correlated_[135].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[135].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[135].subband=135 Observation.DataProducts.Output_Correlated_[136].SAP=0 Observation.DataProducts.Output_Correlated_[136].centralFrequency=57031250.000000 @@ -574,6 +654,8 @@ Observation.DataProducts.Output_Correlated_[136].percentageWritten=0 Observation.DataProducts.Output_Correlated_[136].size=0 Observation.DataProducts.Output_Correlated_[136].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[136].stationSubband=292 +Observation.DataProducts.Output_Correlated_[136].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[136].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[136].subband=136 Observation.DataProducts.Output_Correlated_[137].SAP=0 Observation.DataProducts.Output_Correlated_[137].centralFrequency=57226562.500000 @@ -588,6 +670,8 @@ Observation.DataProducts.Output_Correlated_[137].percentageWritten=0 Observation.DataProducts.Output_Correlated_[137].size=0 Observation.DataProducts.Output_Correlated_[137].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[137].stationSubband=293 +Observation.DataProducts.Output_Correlated_[137].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[137].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[137].subband=137 Observation.DataProducts.Output_Correlated_[138].SAP=0 Observation.DataProducts.Output_Correlated_[138].centralFrequency=57421875.000000 @@ -602,6 +686,8 @@ Observation.DataProducts.Output_Correlated_[138].percentageWritten=0 Observation.DataProducts.Output_Correlated_[138].size=0 Observation.DataProducts.Output_Correlated_[138].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[138].stationSubband=294 +Observation.DataProducts.Output_Correlated_[138].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[138].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[138].subband=138 Observation.DataProducts.Output_Correlated_[139].SAP=0 Observation.DataProducts.Output_Correlated_[139].centralFrequency=57617187.500000 @@ -616,6 +702,8 @@ Observation.DataProducts.Output_Correlated_[139].percentageWritten=0 Observation.DataProducts.Output_Correlated_[139].size=0 Observation.DataProducts.Output_Correlated_[139].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[139].stationSubband=295 +Observation.DataProducts.Output_Correlated_[139].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[139].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[139].subband=139 Observation.DataProducts.Output_Correlated_[13].SAP=0 Observation.DataProducts.Output_Correlated_[13].centralFrequency=33007812.500000 @@ -630,6 +718,8 @@ Observation.DataProducts.Output_Correlated_[13].percentageWritten=0 Observation.DataProducts.Output_Correlated_[13].size=0 Observation.DataProducts.Output_Correlated_[13].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[13].stationSubband=169 +Observation.DataProducts.Output_Correlated_[13].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[13].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[13].subband=13 Observation.DataProducts.Output_Correlated_[140].SAP=0 Observation.DataProducts.Output_Correlated_[140].centralFrequency=57812500.000000 @@ -644,6 +734,8 @@ Observation.DataProducts.Output_Correlated_[140].percentageWritten=0 Observation.DataProducts.Output_Correlated_[140].size=0 Observation.DataProducts.Output_Correlated_[140].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[140].stationSubband=296 +Observation.DataProducts.Output_Correlated_[140].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[140].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[140].subband=140 Observation.DataProducts.Output_Correlated_[141].SAP=0 Observation.DataProducts.Output_Correlated_[141].centralFrequency=58007812.500000 @@ -658,6 +750,8 @@ Observation.DataProducts.Output_Correlated_[141].percentageWritten=0 Observation.DataProducts.Output_Correlated_[141].size=0 Observation.DataProducts.Output_Correlated_[141].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[141].stationSubband=297 +Observation.DataProducts.Output_Correlated_[141].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[141].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[141].subband=141 Observation.DataProducts.Output_Correlated_[142].SAP=0 Observation.DataProducts.Output_Correlated_[142].centralFrequency=58203125.000000 @@ -672,6 +766,8 @@ Observation.DataProducts.Output_Correlated_[142].percentageWritten=0 Observation.DataProducts.Output_Correlated_[142].size=0 Observation.DataProducts.Output_Correlated_[142].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[142].stationSubband=298 +Observation.DataProducts.Output_Correlated_[142].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[142].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[142].subband=142 Observation.DataProducts.Output_Correlated_[143].SAP=0 Observation.DataProducts.Output_Correlated_[143].centralFrequency=58398437.500000 @@ -686,6 +782,8 @@ Observation.DataProducts.Output_Correlated_[143].percentageWritten=0 Observation.DataProducts.Output_Correlated_[143].size=0 Observation.DataProducts.Output_Correlated_[143].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[143].stationSubband=299 +Observation.DataProducts.Output_Correlated_[143].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[143].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[143].subband=143 Observation.DataProducts.Output_Correlated_[144].SAP=0 Observation.DataProducts.Output_Correlated_[144].centralFrequency=58593750.000000 @@ -700,6 +798,8 @@ Observation.DataProducts.Output_Correlated_[144].percentageWritten=0 Observation.DataProducts.Output_Correlated_[144].size=0 Observation.DataProducts.Output_Correlated_[144].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[144].stationSubband=300 +Observation.DataProducts.Output_Correlated_[144].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[144].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[144].subband=144 Observation.DataProducts.Output_Correlated_[145].SAP=0 Observation.DataProducts.Output_Correlated_[145].centralFrequency=58789062.500000 @@ -714,6 +814,8 @@ Observation.DataProducts.Output_Correlated_[145].percentageWritten=0 Observation.DataProducts.Output_Correlated_[145].size=0 Observation.DataProducts.Output_Correlated_[145].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[145].stationSubband=301 +Observation.DataProducts.Output_Correlated_[145].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[145].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[145].subband=145 Observation.DataProducts.Output_Correlated_[146].SAP=0 Observation.DataProducts.Output_Correlated_[146].centralFrequency=58984375.000000 @@ -728,6 +830,8 @@ Observation.DataProducts.Output_Correlated_[146].percentageWritten=0 Observation.DataProducts.Output_Correlated_[146].size=0 Observation.DataProducts.Output_Correlated_[146].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[146].stationSubband=302 +Observation.DataProducts.Output_Correlated_[146].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[146].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[146].subband=146 Observation.DataProducts.Output_Correlated_[147].SAP=0 Observation.DataProducts.Output_Correlated_[147].centralFrequency=59179687.500000 @@ -742,6 +846,8 @@ Observation.DataProducts.Output_Correlated_[147].percentageWritten=0 Observation.DataProducts.Output_Correlated_[147].size=0 Observation.DataProducts.Output_Correlated_[147].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[147].stationSubband=303 +Observation.DataProducts.Output_Correlated_[147].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[147].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[147].subband=147 Observation.DataProducts.Output_Correlated_[148].SAP=0 Observation.DataProducts.Output_Correlated_[148].centralFrequency=59375000.000000 @@ -756,6 +862,8 @@ Observation.DataProducts.Output_Correlated_[148].percentageWritten=0 Observation.DataProducts.Output_Correlated_[148].size=0 Observation.DataProducts.Output_Correlated_[148].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[148].stationSubband=304 +Observation.DataProducts.Output_Correlated_[148].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[148].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[148].subband=148 Observation.DataProducts.Output_Correlated_[149].SAP=0 Observation.DataProducts.Output_Correlated_[149].centralFrequency=59570312.500000 @@ -770,6 +878,8 @@ Observation.DataProducts.Output_Correlated_[149].percentageWritten=0 Observation.DataProducts.Output_Correlated_[149].size=0 Observation.DataProducts.Output_Correlated_[149].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[149].stationSubband=305 +Observation.DataProducts.Output_Correlated_[149].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[149].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[149].subband=149 Observation.DataProducts.Output_Correlated_[14].SAP=0 Observation.DataProducts.Output_Correlated_[14].centralFrequency=33203125.000000 @@ -784,6 +894,8 @@ Observation.DataProducts.Output_Correlated_[14].percentageWritten=0 Observation.DataProducts.Output_Correlated_[14].size=0 Observation.DataProducts.Output_Correlated_[14].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[14].stationSubband=170 +Observation.DataProducts.Output_Correlated_[14].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[14].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[14].subband=14 Observation.DataProducts.Output_Correlated_[150].SAP=0 Observation.DataProducts.Output_Correlated_[150].centralFrequency=59765625.000000 @@ -798,6 +910,8 @@ Observation.DataProducts.Output_Correlated_[150].percentageWritten=0 Observation.DataProducts.Output_Correlated_[150].size=0 Observation.DataProducts.Output_Correlated_[150].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[150].stationSubband=306 +Observation.DataProducts.Output_Correlated_[150].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[150].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[150].subband=150 Observation.DataProducts.Output_Correlated_[151].SAP=0 Observation.DataProducts.Output_Correlated_[151].centralFrequency=59960937.500000 @@ -812,6 +926,8 @@ Observation.DataProducts.Output_Correlated_[151].percentageWritten=0 Observation.DataProducts.Output_Correlated_[151].size=0 Observation.DataProducts.Output_Correlated_[151].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[151].stationSubband=307 +Observation.DataProducts.Output_Correlated_[151].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[151].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[151].subband=151 Observation.DataProducts.Output_Correlated_[152].SAP=0 Observation.DataProducts.Output_Correlated_[152].centralFrequency=60156250.000000 @@ -826,6 +942,8 @@ Observation.DataProducts.Output_Correlated_[152].percentageWritten=0 Observation.DataProducts.Output_Correlated_[152].size=0 Observation.DataProducts.Output_Correlated_[152].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[152].stationSubband=308 +Observation.DataProducts.Output_Correlated_[152].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[152].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[152].subband=152 Observation.DataProducts.Output_Correlated_[153].SAP=0 Observation.DataProducts.Output_Correlated_[153].centralFrequency=60351562.500000 @@ -840,6 +958,8 @@ Observation.DataProducts.Output_Correlated_[153].percentageWritten=0 Observation.DataProducts.Output_Correlated_[153].size=0 Observation.DataProducts.Output_Correlated_[153].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[153].stationSubband=309 +Observation.DataProducts.Output_Correlated_[153].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[153].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[153].subband=153 Observation.DataProducts.Output_Correlated_[154].SAP=0 Observation.DataProducts.Output_Correlated_[154].centralFrequency=60546875.000000 @@ -854,6 +974,8 @@ Observation.DataProducts.Output_Correlated_[154].percentageWritten=0 Observation.DataProducts.Output_Correlated_[154].size=0 Observation.DataProducts.Output_Correlated_[154].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[154].stationSubband=310 +Observation.DataProducts.Output_Correlated_[154].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[154].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[154].subband=154 Observation.DataProducts.Output_Correlated_[155].SAP=0 Observation.DataProducts.Output_Correlated_[155].centralFrequency=60742187.500000 @@ -868,6 +990,8 @@ Observation.DataProducts.Output_Correlated_[155].percentageWritten=0 Observation.DataProducts.Output_Correlated_[155].size=0 Observation.DataProducts.Output_Correlated_[155].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[155].stationSubband=311 +Observation.DataProducts.Output_Correlated_[155].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[155].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[155].subband=155 Observation.DataProducts.Output_Correlated_[156].SAP=0 Observation.DataProducts.Output_Correlated_[156].centralFrequency=60937500.000000 @@ -882,6 +1006,8 @@ Observation.DataProducts.Output_Correlated_[156].percentageWritten=0 Observation.DataProducts.Output_Correlated_[156].size=0 Observation.DataProducts.Output_Correlated_[156].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[156].stationSubband=312 +Observation.DataProducts.Output_Correlated_[156].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[156].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[156].subband=156 Observation.DataProducts.Output_Correlated_[157].SAP=0 Observation.DataProducts.Output_Correlated_[157].centralFrequency=61132812.500000 @@ -896,6 +1022,8 @@ Observation.DataProducts.Output_Correlated_[157].percentageWritten=0 Observation.DataProducts.Output_Correlated_[157].size=0 Observation.DataProducts.Output_Correlated_[157].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[157].stationSubband=313 +Observation.DataProducts.Output_Correlated_[157].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[157].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[157].subband=157 Observation.DataProducts.Output_Correlated_[158].SAP=0 Observation.DataProducts.Output_Correlated_[158].centralFrequency=61328125.000000 @@ -910,6 +1038,8 @@ Observation.DataProducts.Output_Correlated_[158].percentageWritten=0 Observation.DataProducts.Output_Correlated_[158].size=0 Observation.DataProducts.Output_Correlated_[158].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[158].stationSubband=314 +Observation.DataProducts.Output_Correlated_[158].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[158].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[158].subband=158 Observation.DataProducts.Output_Correlated_[159].SAP=0 Observation.DataProducts.Output_Correlated_[159].centralFrequency=61523437.500000 @@ -924,6 +1054,8 @@ Observation.DataProducts.Output_Correlated_[159].percentageWritten=0 Observation.DataProducts.Output_Correlated_[159].size=0 Observation.DataProducts.Output_Correlated_[159].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[159].stationSubband=315 +Observation.DataProducts.Output_Correlated_[159].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[159].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[159].subband=159 Observation.DataProducts.Output_Correlated_[15].SAP=0 Observation.DataProducts.Output_Correlated_[15].centralFrequency=33398437.500000 @@ -938,6 +1070,8 @@ Observation.DataProducts.Output_Correlated_[15].percentageWritten=0 Observation.DataProducts.Output_Correlated_[15].size=0 Observation.DataProducts.Output_Correlated_[15].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[15].stationSubband=171 +Observation.DataProducts.Output_Correlated_[15].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[15].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[15].subband=15 Observation.DataProducts.Output_Correlated_[160].SAP=0 Observation.DataProducts.Output_Correlated_[160].centralFrequency=61718750.000000 @@ -952,6 +1086,8 @@ Observation.DataProducts.Output_Correlated_[160].percentageWritten=0 Observation.DataProducts.Output_Correlated_[160].size=0 Observation.DataProducts.Output_Correlated_[160].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[160].stationSubband=316 +Observation.DataProducts.Output_Correlated_[160].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[160].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[160].subband=160 Observation.DataProducts.Output_Correlated_[161].SAP=0 Observation.DataProducts.Output_Correlated_[161].centralFrequency=61914062.500000 @@ -966,6 +1102,8 @@ Observation.DataProducts.Output_Correlated_[161].percentageWritten=0 Observation.DataProducts.Output_Correlated_[161].size=0 Observation.DataProducts.Output_Correlated_[161].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[161].stationSubband=317 +Observation.DataProducts.Output_Correlated_[161].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[161].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[161].subband=161 Observation.DataProducts.Output_Correlated_[162].SAP=0 Observation.DataProducts.Output_Correlated_[162].centralFrequency=62109375.000000 @@ -980,6 +1118,8 @@ Observation.DataProducts.Output_Correlated_[162].percentageWritten=0 Observation.DataProducts.Output_Correlated_[162].size=0 Observation.DataProducts.Output_Correlated_[162].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[162].stationSubband=318 +Observation.DataProducts.Output_Correlated_[162].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[162].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[162].subband=162 Observation.DataProducts.Output_Correlated_[163].SAP=0 Observation.DataProducts.Output_Correlated_[163].centralFrequency=62304687.500000 @@ -994,6 +1134,8 @@ Observation.DataProducts.Output_Correlated_[163].percentageWritten=0 Observation.DataProducts.Output_Correlated_[163].size=0 Observation.DataProducts.Output_Correlated_[163].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[163].stationSubband=319 +Observation.DataProducts.Output_Correlated_[163].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[163].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[163].subband=163 Observation.DataProducts.Output_Correlated_[164].SAP=0 Observation.DataProducts.Output_Correlated_[164].centralFrequency=62500000.000000 @@ -1008,6 +1150,8 @@ Observation.DataProducts.Output_Correlated_[164].percentageWritten=0 Observation.DataProducts.Output_Correlated_[164].size=0 Observation.DataProducts.Output_Correlated_[164].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[164].stationSubband=320 +Observation.DataProducts.Output_Correlated_[164].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[164].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[164].subband=164 Observation.DataProducts.Output_Correlated_[165].SAP=0 Observation.DataProducts.Output_Correlated_[165].centralFrequency=62695312.500000 @@ -1022,6 +1166,8 @@ Observation.DataProducts.Output_Correlated_[165].percentageWritten=0 Observation.DataProducts.Output_Correlated_[165].size=0 Observation.DataProducts.Output_Correlated_[165].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[165].stationSubband=321 +Observation.DataProducts.Output_Correlated_[165].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[165].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[165].subband=165 Observation.DataProducts.Output_Correlated_[166].SAP=0 Observation.DataProducts.Output_Correlated_[166].centralFrequency=62890625.000000 @@ -1036,6 +1182,8 @@ Observation.DataProducts.Output_Correlated_[166].percentageWritten=0 Observation.DataProducts.Output_Correlated_[166].size=0 Observation.DataProducts.Output_Correlated_[166].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[166].stationSubband=322 +Observation.DataProducts.Output_Correlated_[166].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[166].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[166].subband=166 Observation.DataProducts.Output_Correlated_[167].SAP=0 Observation.DataProducts.Output_Correlated_[167].centralFrequency=63085937.500000 @@ -1050,6 +1198,8 @@ Observation.DataProducts.Output_Correlated_[167].percentageWritten=0 Observation.DataProducts.Output_Correlated_[167].size=0 Observation.DataProducts.Output_Correlated_[167].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[167].stationSubband=323 +Observation.DataProducts.Output_Correlated_[167].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[167].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[167].subband=167 Observation.DataProducts.Output_Correlated_[168].SAP=0 Observation.DataProducts.Output_Correlated_[168].centralFrequency=63281250.000000 @@ -1064,6 +1214,8 @@ Observation.DataProducts.Output_Correlated_[168].percentageWritten=0 Observation.DataProducts.Output_Correlated_[168].size=0 Observation.DataProducts.Output_Correlated_[168].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[168].stationSubband=324 +Observation.DataProducts.Output_Correlated_[168].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[168].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[168].subband=168 Observation.DataProducts.Output_Correlated_[169].SAP=0 Observation.DataProducts.Output_Correlated_[169].centralFrequency=63476562.500000 @@ -1078,6 +1230,8 @@ Observation.DataProducts.Output_Correlated_[169].percentageWritten=0 Observation.DataProducts.Output_Correlated_[169].size=0 Observation.DataProducts.Output_Correlated_[169].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[169].stationSubband=325 +Observation.DataProducts.Output_Correlated_[169].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[169].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[169].subband=169 Observation.DataProducts.Output_Correlated_[16].SAP=0 Observation.DataProducts.Output_Correlated_[16].centralFrequency=33593750.000000 @@ -1092,6 +1246,8 @@ Observation.DataProducts.Output_Correlated_[16].percentageWritten=0 Observation.DataProducts.Output_Correlated_[16].size=0 Observation.DataProducts.Output_Correlated_[16].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[16].stationSubband=172 +Observation.DataProducts.Output_Correlated_[16].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[16].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[16].subband=16 Observation.DataProducts.Output_Correlated_[170].SAP=0 Observation.DataProducts.Output_Correlated_[170].centralFrequency=63671875.000000 @@ -1106,6 +1262,8 @@ Observation.DataProducts.Output_Correlated_[170].percentageWritten=0 Observation.DataProducts.Output_Correlated_[170].size=0 Observation.DataProducts.Output_Correlated_[170].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[170].stationSubband=326 +Observation.DataProducts.Output_Correlated_[170].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[170].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[170].subband=170 Observation.DataProducts.Output_Correlated_[171].SAP=0 Observation.DataProducts.Output_Correlated_[171].centralFrequency=63867187.500000 @@ -1120,6 +1278,8 @@ Observation.DataProducts.Output_Correlated_[171].percentageWritten=0 Observation.DataProducts.Output_Correlated_[171].size=0 Observation.DataProducts.Output_Correlated_[171].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[171].stationSubband=327 +Observation.DataProducts.Output_Correlated_[171].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[171].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[171].subband=171 Observation.DataProducts.Output_Correlated_[172].SAP=0 Observation.DataProducts.Output_Correlated_[172].centralFrequency=64062500.000000 @@ -1134,6 +1294,8 @@ Observation.DataProducts.Output_Correlated_[172].percentageWritten=0 Observation.DataProducts.Output_Correlated_[172].size=0 Observation.DataProducts.Output_Correlated_[172].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[172].stationSubband=328 +Observation.DataProducts.Output_Correlated_[172].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[172].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[172].subband=172 Observation.DataProducts.Output_Correlated_[173].SAP=0 Observation.DataProducts.Output_Correlated_[173].centralFrequency=64257812.500000 @@ -1148,6 +1310,8 @@ Observation.DataProducts.Output_Correlated_[173].percentageWritten=0 Observation.DataProducts.Output_Correlated_[173].size=0 Observation.DataProducts.Output_Correlated_[173].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[173].stationSubband=329 +Observation.DataProducts.Output_Correlated_[173].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[173].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[173].subband=173 Observation.DataProducts.Output_Correlated_[174].SAP=0 Observation.DataProducts.Output_Correlated_[174].centralFrequency=64453125.000000 @@ -1162,6 +1326,8 @@ Observation.DataProducts.Output_Correlated_[174].percentageWritten=0 Observation.DataProducts.Output_Correlated_[174].size=0 Observation.DataProducts.Output_Correlated_[174].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[174].stationSubband=330 +Observation.DataProducts.Output_Correlated_[174].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[174].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[174].subband=174 Observation.DataProducts.Output_Correlated_[175].SAP=0 Observation.DataProducts.Output_Correlated_[175].centralFrequency=64648437.500000 @@ -1176,6 +1342,8 @@ Observation.DataProducts.Output_Correlated_[175].percentageWritten=0 Observation.DataProducts.Output_Correlated_[175].size=0 Observation.DataProducts.Output_Correlated_[175].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[175].stationSubband=331 +Observation.DataProducts.Output_Correlated_[175].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[175].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[175].subband=175 Observation.DataProducts.Output_Correlated_[176].SAP=0 Observation.DataProducts.Output_Correlated_[176].centralFrequency=64843750.000000 @@ -1190,6 +1358,8 @@ Observation.DataProducts.Output_Correlated_[176].percentageWritten=0 Observation.DataProducts.Output_Correlated_[176].size=0 Observation.DataProducts.Output_Correlated_[176].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[176].stationSubband=332 +Observation.DataProducts.Output_Correlated_[176].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[176].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[176].subband=176 Observation.DataProducts.Output_Correlated_[177].SAP=0 Observation.DataProducts.Output_Correlated_[177].centralFrequency=65039062.500000 @@ -1204,6 +1374,8 @@ Observation.DataProducts.Output_Correlated_[177].percentageWritten=0 Observation.DataProducts.Output_Correlated_[177].size=0 Observation.DataProducts.Output_Correlated_[177].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[177].stationSubband=333 +Observation.DataProducts.Output_Correlated_[177].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[177].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[177].subband=177 Observation.DataProducts.Output_Correlated_[178].SAP=0 Observation.DataProducts.Output_Correlated_[178].centralFrequency=65234375.000000 @@ -1218,6 +1390,8 @@ Observation.DataProducts.Output_Correlated_[178].percentageWritten=0 Observation.DataProducts.Output_Correlated_[178].size=0 Observation.DataProducts.Output_Correlated_[178].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[178].stationSubband=334 +Observation.DataProducts.Output_Correlated_[178].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[178].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[178].subband=178 Observation.DataProducts.Output_Correlated_[179].SAP=0 Observation.DataProducts.Output_Correlated_[179].centralFrequency=65429687.500000 @@ -1232,6 +1406,8 @@ Observation.DataProducts.Output_Correlated_[179].percentageWritten=0 Observation.DataProducts.Output_Correlated_[179].size=0 Observation.DataProducts.Output_Correlated_[179].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[179].stationSubband=335 +Observation.DataProducts.Output_Correlated_[179].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[179].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[179].subband=179 Observation.DataProducts.Output_Correlated_[17].SAP=0 Observation.DataProducts.Output_Correlated_[17].centralFrequency=33789062.500000 @@ -1246,6 +1422,8 @@ Observation.DataProducts.Output_Correlated_[17].percentageWritten=0 Observation.DataProducts.Output_Correlated_[17].size=0 Observation.DataProducts.Output_Correlated_[17].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[17].stationSubband=173 +Observation.DataProducts.Output_Correlated_[17].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[17].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[17].subband=17 Observation.DataProducts.Output_Correlated_[180].SAP=0 Observation.DataProducts.Output_Correlated_[180].centralFrequency=65625000.000000 @@ -1260,6 +1438,8 @@ Observation.DataProducts.Output_Correlated_[180].percentageWritten=0 Observation.DataProducts.Output_Correlated_[180].size=0 Observation.DataProducts.Output_Correlated_[180].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[180].stationSubband=336 +Observation.DataProducts.Output_Correlated_[180].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[180].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[180].subband=180 Observation.DataProducts.Output_Correlated_[181].SAP=0 Observation.DataProducts.Output_Correlated_[181].centralFrequency=65820312.500000 @@ -1274,6 +1454,8 @@ Observation.DataProducts.Output_Correlated_[181].percentageWritten=0 Observation.DataProducts.Output_Correlated_[181].size=0 Observation.DataProducts.Output_Correlated_[181].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[181].stationSubband=337 +Observation.DataProducts.Output_Correlated_[181].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[181].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[181].subband=181 Observation.DataProducts.Output_Correlated_[182].SAP=0 Observation.DataProducts.Output_Correlated_[182].centralFrequency=66015625.000000 @@ -1288,6 +1470,8 @@ Observation.DataProducts.Output_Correlated_[182].percentageWritten=0 Observation.DataProducts.Output_Correlated_[182].size=0 Observation.DataProducts.Output_Correlated_[182].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[182].stationSubband=338 +Observation.DataProducts.Output_Correlated_[182].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[182].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[182].subband=182 Observation.DataProducts.Output_Correlated_[183].SAP=0 Observation.DataProducts.Output_Correlated_[183].centralFrequency=66210937.500000 @@ -1302,6 +1486,8 @@ Observation.DataProducts.Output_Correlated_[183].percentageWritten=0 Observation.DataProducts.Output_Correlated_[183].size=0 Observation.DataProducts.Output_Correlated_[183].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[183].stationSubband=339 +Observation.DataProducts.Output_Correlated_[183].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[183].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[183].subband=183 Observation.DataProducts.Output_Correlated_[184].SAP=0 Observation.DataProducts.Output_Correlated_[184].centralFrequency=66406250.000000 @@ -1316,6 +1502,8 @@ Observation.DataProducts.Output_Correlated_[184].percentageWritten=0 Observation.DataProducts.Output_Correlated_[184].size=0 Observation.DataProducts.Output_Correlated_[184].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[184].stationSubband=340 +Observation.DataProducts.Output_Correlated_[184].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[184].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[184].subband=184 Observation.DataProducts.Output_Correlated_[185].SAP=0 Observation.DataProducts.Output_Correlated_[185].centralFrequency=66601562.500000 @@ -1330,6 +1518,8 @@ Observation.DataProducts.Output_Correlated_[185].percentageWritten=0 Observation.DataProducts.Output_Correlated_[185].size=0 Observation.DataProducts.Output_Correlated_[185].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[185].stationSubband=341 +Observation.DataProducts.Output_Correlated_[185].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[185].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[185].subband=185 Observation.DataProducts.Output_Correlated_[186].SAP=0 Observation.DataProducts.Output_Correlated_[186].centralFrequency=66796875.000000 @@ -1344,6 +1534,8 @@ Observation.DataProducts.Output_Correlated_[186].percentageWritten=0 Observation.DataProducts.Output_Correlated_[186].size=0 Observation.DataProducts.Output_Correlated_[186].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[186].stationSubband=342 +Observation.DataProducts.Output_Correlated_[186].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[186].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[186].subband=186 Observation.DataProducts.Output_Correlated_[187].SAP=0 Observation.DataProducts.Output_Correlated_[187].centralFrequency=66992187.500000 @@ -1358,6 +1550,8 @@ Observation.DataProducts.Output_Correlated_[187].percentageWritten=0 Observation.DataProducts.Output_Correlated_[187].size=0 Observation.DataProducts.Output_Correlated_[187].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[187].stationSubband=343 +Observation.DataProducts.Output_Correlated_[187].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[187].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[187].subband=187 Observation.DataProducts.Output_Correlated_[188].SAP=0 Observation.DataProducts.Output_Correlated_[188].centralFrequency=67187500.000000 @@ -1372,6 +1566,8 @@ Observation.DataProducts.Output_Correlated_[188].percentageWritten=0 Observation.DataProducts.Output_Correlated_[188].size=0 Observation.DataProducts.Output_Correlated_[188].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[188].stationSubband=344 +Observation.DataProducts.Output_Correlated_[188].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[188].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[188].subband=188 Observation.DataProducts.Output_Correlated_[189].SAP=0 Observation.DataProducts.Output_Correlated_[189].centralFrequency=67382812.500000 @@ -1386,6 +1582,8 @@ Observation.DataProducts.Output_Correlated_[189].percentageWritten=0 Observation.DataProducts.Output_Correlated_[189].size=0 Observation.DataProducts.Output_Correlated_[189].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[189].stationSubband=345 +Observation.DataProducts.Output_Correlated_[189].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[189].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[189].subband=189 Observation.DataProducts.Output_Correlated_[18].SAP=0 Observation.DataProducts.Output_Correlated_[18].centralFrequency=33984375.000000 @@ -1400,6 +1598,8 @@ Observation.DataProducts.Output_Correlated_[18].percentageWritten=0 Observation.DataProducts.Output_Correlated_[18].size=0 Observation.DataProducts.Output_Correlated_[18].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[18].stationSubband=174 +Observation.DataProducts.Output_Correlated_[18].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[18].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[18].subband=18 Observation.DataProducts.Output_Correlated_[190].SAP=0 Observation.DataProducts.Output_Correlated_[190].centralFrequency=67578125.000000 @@ -1414,6 +1614,8 @@ Observation.DataProducts.Output_Correlated_[190].percentageWritten=0 Observation.DataProducts.Output_Correlated_[190].size=0 Observation.DataProducts.Output_Correlated_[190].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[190].stationSubband=346 +Observation.DataProducts.Output_Correlated_[190].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[190].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[190].subband=190 Observation.DataProducts.Output_Correlated_[191].SAP=0 Observation.DataProducts.Output_Correlated_[191].centralFrequency=67773437.500000 @@ -1428,6 +1630,8 @@ Observation.DataProducts.Output_Correlated_[191].percentageWritten=0 Observation.DataProducts.Output_Correlated_[191].size=0 Observation.DataProducts.Output_Correlated_[191].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[191].stationSubband=347 +Observation.DataProducts.Output_Correlated_[191].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[191].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[191].subband=191 Observation.DataProducts.Output_Correlated_[192].SAP=0 Observation.DataProducts.Output_Correlated_[192].centralFrequency=67968750.000000 @@ -1442,6 +1646,8 @@ Observation.DataProducts.Output_Correlated_[192].percentageWritten=0 Observation.DataProducts.Output_Correlated_[192].size=0 Observation.DataProducts.Output_Correlated_[192].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[192].stationSubband=348 +Observation.DataProducts.Output_Correlated_[192].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[192].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[192].subband=192 Observation.DataProducts.Output_Correlated_[193].SAP=0 Observation.DataProducts.Output_Correlated_[193].centralFrequency=68164062.500000 @@ -1456,6 +1662,8 @@ Observation.DataProducts.Output_Correlated_[193].percentageWritten=0 Observation.DataProducts.Output_Correlated_[193].size=0 Observation.DataProducts.Output_Correlated_[193].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[193].stationSubband=349 +Observation.DataProducts.Output_Correlated_[193].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[193].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[193].subband=193 Observation.DataProducts.Output_Correlated_[194].SAP=0 Observation.DataProducts.Output_Correlated_[194].centralFrequency=68359375.000000 @@ -1470,6 +1678,8 @@ Observation.DataProducts.Output_Correlated_[194].percentageWritten=0 Observation.DataProducts.Output_Correlated_[194].size=0 Observation.DataProducts.Output_Correlated_[194].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[194].stationSubband=350 +Observation.DataProducts.Output_Correlated_[194].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[194].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[194].subband=194 Observation.DataProducts.Output_Correlated_[195].SAP=0 Observation.DataProducts.Output_Correlated_[195].centralFrequency=68554687.500000 @@ -1484,6 +1694,8 @@ Observation.DataProducts.Output_Correlated_[195].percentageWritten=0 Observation.DataProducts.Output_Correlated_[195].size=0 Observation.DataProducts.Output_Correlated_[195].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[195].stationSubband=351 +Observation.DataProducts.Output_Correlated_[195].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[195].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[195].subband=195 Observation.DataProducts.Output_Correlated_[196].SAP=0 Observation.DataProducts.Output_Correlated_[196].centralFrequency=68750000.000000 @@ -1498,6 +1710,8 @@ Observation.DataProducts.Output_Correlated_[196].percentageWritten=0 Observation.DataProducts.Output_Correlated_[196].size=0 Observation.DataProducts.Output_Correlated_[196].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[196].stationSubband=352 +Observation.DataProducts.Output_Correlated_[196].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[196].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[196].subband=196 Observation.DataProducts.Output_Correlated_[197].SAP=0 Observation.DataProducts.Output_Correlated_[197].centralFrequency=68945312.500000 @@ -1512,6 +1726,8 @@ Observation.DataProducts.Output_Correlated_[197].percentageWritten=0 Observation.DataProducts.Output_Correlated_[197].size=0 Observation.DataProducts.Output_Correlated_[197].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[197].stationSubband=353 +Observation.DataProducts.Output_Correlated_[197].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[197].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[197].subband=197 Observation.DataProducts.Output_Correlated_[198].SAP=0 Observation.DataProducts.Output_Correlated_[198].centralFrequency=69140625.000000 @@ -1526,6 +1742,8 @@ Observation.DataProducts.Output_Correlated_[198].percentageWritten=0 Observation.DataProducts.Output_Correlated_[198].size=0 Observation.DataProducts.Output_Correlated_[198].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[198].stationSubband=354 +Observation.DataProducts.Output_Correlated_[198].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[198].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[198].subband=198 Observation.DataProducts.Output_Correlated_[199].SAP=0 Observation.DataProducts.Output_Correlated_[199].centralFrequency=69335937.500000 @@ -1540,6 +1758,8 @@ Observation.DataProducts.Output_Correlated_[199].percentageWritten=0 Observation.DataProducts.Output_Correlated_[199].size=0 Observation.DataProducts.Output_Correlated_[199].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[199].stationSubband=355 +Observation.DataProducts.Output_Correlated_[199].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[199].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[199].subband=199 Observation.DataProducts.Output_Correlated_[19].SAP=0 Observation.DataProducts.Output_Correlated_[19].centralFrequency=34179687.500000 @@ -1554,6 +1774,8 @@ Observation.DataProducts.Output_Correlated_[19].percentageWritten=0 Observation.DataProducts.Output_Correlated_[19].size=0 Observation.DataProducts.Output_Correlated_[19].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[19].stationSubband=175 +Observation.DataProducts.Output_Correlated_[19].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[19].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[19].subband=19 Observation.DataProducts.Output_Correlated_[1].SAP=0 Observation.DataProducts.Output_Correlated_[1].centralFrequency=30664062.500000 @@ -1568,6 +1790,8 @@ Observation.DataProducts.Output_Correlated_[1].percentageWritten=0 Observation.DataProducts.Output_Correlated_[1].size=0 Observation.DataProducts.Output_Correlated_[1].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[1].stationSubband=157 +Observation.DataProducts.Output_Correlated_[1].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[1].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[1].subband=1 Observation.DataProducts.Output_Correlated_[200].SAP=0 Observation.DataProducts.Output_Correlated_[200].centralFrequency=69531250.000000 @@ -1582,6 +1806,8 @@ Observation.DataProducts.Output_Correlated_[200].percentageWritten=0 Observation.DataProducts.Output_Correlated_[200].size=0 Observation.DataProducts.Output_Correlated_[200].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[200].stationSubband=356 +Observation.DataProducts.Output_Correlated_[200].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[200].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[200].subband=200 Observation.DataProducts.Output_Correlated_[201].SAP=0 Observation.DataProducts.Output_Correlated_[201].centralFrequency=69726562.500000 @@ -1596,6 +1822,8 @@ Observation.DataProducts.Output_Correlated_[201].percentageWritten=0 Observation.DataProducts.Output_Correlated_[201].size=0 Observation.DataProducts.Output_Correlated_[201].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[201].stationSubband=357 +Observation.DataProducts.Output_Correlated_[201].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[201].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[201].subband=201 Observation.DataProducts.Output_Correlated_[202].SAP=0 Observation.DataProducts.Output_Correlated_[202].centralFrequency=69921875.000000 @@ -1610,6 +1838,8 @@ Observation.DataProducts.Output_Correlated_[202].percentageWritten=0 Observation.DataProducts.Output_Correlated_[202].size=0 Observation.DataProducts.Output_Correlated_[202].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[202].stationSubband=358 +Observation.DataProducts.Output_Correlated_[202].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[202].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[202].subband=202 Observation.DataProducts.Output_Correlated_[203].SAP=0 Observation.DataProducts.Output_Correlated_[203].centralFrequency=70117187.500000 @@ -1624,6 +1854,8 @@ Observation.DataProducts.Output_Correlated_[203].percentageWritten=0 Observation.DataProducts.Output_Correlated_[203].size=0 Observation.DataProducts.Output_Correlated_[203].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[203].stationSubband=359 +Observation.DataProducts.Output_Correlated_[203].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[203].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[203].subband=203 Observation.DataProducts.Output_Correlated_[204].SAP=0 Observation.DataProducts.Output_Correlated_[204].centralFrequency=70312500.000000 @@ -1638,6 +1870,8 @@ Observation.DataProducts.Output_Correlated_[204].percentageWritten=0 Observation.DataProducts.Output_Correlated_[204].size=0 Observation.DataProducts.Output_Correlated_[204].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[204].stationSubband=360 +Observation.DataProducts.Output_Correlated_[204].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[204].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[204].subband=204 Observation.DataProducts.Output_Correlated_[205].SAP=0 Observation.DataProducts.Output_Correlated_[205].centralFrequency=70507812.500000 @@ -1652,6 +1886,8 @@ Observation.DataProducts.Output_Correlated_[205].percentageWritten=0 Observation.DataProducts.Output_Correlated_[205].size=0 Observation.DataProducts.Output_Correlated_[205].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[205].stationSubband=361 +Observation.DataProducts.Output_Correlated_[205].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[205].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[205].subband=205 Observation.DataProducts.Output_Correlated_[206].SAP=0 Observation.DataProducts.Output_Correlated_[206].centralFrequency=70703125.000000 @@ -1666,6 +1902,8 @@ Observation.DataProducts.Output_Correlated_[206].percentageWritten=0 Observation.DataProducts.Output_Correlated_[206].size=0 Observation.DataProducts.Output_Correlated_[206].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[206].stationSubband=362 +Observation.DataProducts.Output_Correlated_[206].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[206].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[206].subband=206 Observation.DataProducts.Output_Correlated_[207].SAP=0 Observation.DataProducts.Output_Correlated_[207].centralFrequency=70898437.500000 @@ -1680,6 +1918,8 @@ Observation.DataProducts.Output_Correlated_[207].percentageWritten=0 Observation.DataProducts.Output_Correlated_[207].size=0 Observation.DataProducts.Output_Correlated_[207].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[207].stationSubband=363 +Observation.DataProducts.Output_Correlated_[207].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[207].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[207].subband=207 Observation.DataProducts.Output_Correlated_[208].SAP=0 Observation.DataProducts.Output_Correlated_[208].centralFrequency=71093750.000000 @@ -1694,6 +1934,8 @@ Observation.DataProducts.Output_Correlated_[208].percentageWritten=0 Observation.DataProducts.Output_Correlated_[208].size=0 Observation.DataProducts.Output_Correlated_[208].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[208].stationSubband=364 +Observation.DataProducts.Output_Correlated_[208].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[208].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[208].subband=208 Observation.DataProducts.Output_Correlated_[209].SAP=0 Observation.DataProducts.Output_Correlated_[209].centralFrequency=71289062.500000 @@ -1708,6 +1950,8 @@ Observation.DataProducts.Output_Correlated_[209].percentageWritten=0 Observation.DataProducts.Output_Correlated_[209].size=0 Observation.DataProducts.Output_Correlated_[209].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[209].stationSubband=365 +Observation.DataProducts.Output_Correlated_[209].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[209].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[209].subband=209 Observation.DataProducts.Output_Correlated_[20].SAP=0 Observation.DataProducts.Output_Correlated_[20].centralFrequency=34375000.000000 @@ -1722,6 +1966,8 @@ Observation.DataProducts.Output_Correlated_[20].percentageWritten=0 Observation.DataProducts.Output_Correlated_[20].size=0 Observation.DataProducts.Output_Correlated_[20].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[20].stationSubband=176 +Observation.DataProducts.Output_Correlated_[20].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[20].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[20].subband=20 Observation.DataProducts.Output_Correlated_[210].SAP=0 Observation.DataProducts.Output_Correlated_[210].centralFrequency=71484375.000000 @@ -1736,6 +1982,8 @@ Observation.DataProducts.Output_Correlated_[210].percentageWritten=0 Observation.DataProducts.Output_Correlated_[210].size=0 Observation.DataProducts.Output_Correlated_[210].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[210].stationSubband=366 +Observation.DataProducts.Output_Correlated_[210].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[210].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[210].subband=210 Observation.DataProducts.Output_Correlated_[211].SAP=0 Observation.DataProducts.Output_Correlated_[211].centralFrequency=71679687.500000 @@ -1750,6 +1998,8 @@ Observation.DataProducts.Output_Correlated_[211].percentageWritten=0 Observation.DataProducts.Output_Correlated_[211].size=0 Observation.DataProducts.Output_Correlated_[211].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[211].stationSubband=367 +Observation.DataProducts.Output_Correlated_[211].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[211].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[211].subband=211 Observation.DataProducts.Output_Correlated_[212].SAP=0 Observation.DataProducts.Output_Correlated_[212].centralFrequency=71875000.000000 @@ -1764,6 +2014,8 @@ Observation.DataProducts.Output_Correlated_[212].percentageWritten=0 Observation.DataProducts.Output_Correlated_[212].size=0 Observation.DataProducts.Output_Correlated_[212].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[212].stationSubband=368 +Observation.DataProducts.Output_Correlated_[212].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[212].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[212].subband=212 Observation.DataProducts.Output_Correlated_[213].SAP=0 Observation.DataProducts.Output_Correlated_[213].centralFrequency=72070312.500000 @@ -1778,6 +2030,8 @@ Observation.DataProducts.Output_Correlated_[213].percentageWritten=0 Observation.DataProducts.Output_Correlated_[213].size=0 Observation.DataProducts.Output_Correlated_[213].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[213].stationSubband=369 +Observation.DataProducts.Output_Correlated_[213].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[213].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[213].subband=213 Observation.DataProducts.Output_Correlated_[214].SAP=0 Observation.DataProducts.Output_Correlated_[214].centralFrequency=72265625.000000 @@ -1792,6 +2046,8 @@ Observation.DataProducts.Output_Correlated_[214].percentageWritten=0 Observation.DataProducts.Output_Correlated_[214].size=0 Observation.DataProducts.Output_Correlated_[214].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[214].stationSubband=370 +Observation.DataProducts.Output_Correlated_[214].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[214].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[214].subband=214 Observation.DataProducts.Output_Correlated_[215].SAP=0 Observation.DataProducts.Output_Correlated_[215].centralFrequency=72460937.500000 @@ -1806,6 +2062,8 @@ Observation.DataProducts.Output_Correlated_[215].percentageWritten=0 Observation.DataProducts.Output_Correlated_[215].size=0 Observation.DataProducts.Output_Correlated_[215].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[215].stationSubband=371 +Observation.DataProducts.Output_Correlated_[215].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[215].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[215].subband=215 Observation.DataProducts.Output_Correlated_[216].SAP=0 Observation.DataProducts.Output_Correlated_[216].centralFrequency=72656250.000000 @@ -1820,6 +2078,8 @@ Observation.DataProducts.Output_Correlated_[216].percentageWritten=0 Observation.DataProducts.Output_Correlated_[216].size=0 Observation.DataProducts.Output_Correlated_[216].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[216].stationSubband=372 +Observation.DataProducts.Output_Correlated_[216].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[216].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[216].subband=216 Observation.DataProducts.Output_Correlated_[217].SAP=0 Observation.DataProducts.Output_Correlated_[217].centralFrequency=72851562.500000 @@ -1834,6 +2094,8 @@ Observation.DataProducts.Output_Correlated_[217].percentageWritten=0 Observation.DataProducts.Output_Correlated_[217].size=0 Observation.DataProducts.Output_Correlated_[217].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[217].stationSubband=373 +Observation.DataProducts.Output_Correlated_[217].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[217].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[217].subband=217 Observation.DataProducts.Output_Correlated_[218].SAP=0 Observation.DataProducts.Output_Correlated_[218].centralFrequency=73046875.000000 @@ -1848,6 +2110,8 @@ Observation.DataProducts.Output_Correlated_[218].percentageWritten=0 Observation.DataProducts.Output_Correlated_[218].size=0 Observation.DataProducts.Output_Correlated_[218].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[218].stationSubband=374 +Observation.DataProducts.Output_Correlated_[218].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[218].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[218].subband=218 Observation.DataProducts.Output_Correlated_[219].SAP=0 Observation.DataProducts.Output_Correlated_[219].centralFrequency=73242187.500000 @@ -1862,6 +2126,8 @@ Observation.DataProducts.Output_Correlated_[219].percentageWritten=0 Observation.DataProducts.Output_Correlated_[219].size=0 Observation.DataProducts.Output_Correlated_[219].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[219].stationSubband=375 +Observation.DataProducts.Output_Correlated_[219].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[219].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[219].subband=219 Observation.DataProducts.Output_Correlated_[21].SAP=0 Observation.DataProducts.Output_Correlated_[21].centralFrequency=34570312.500000 @@ -1876,6 +2142,8 @@ Observation.DataProducts.Output_Correlated_[21].percentageWritten=0 Observation.DataProducts.Output_Correlated_[21].size=0 Observation.DataProducts.Output_Correlated_[21].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[21].stationSubband=177 +Observation.DataProducts.Output_Correlated_[21].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[21].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[21].subband=21 Observation.DataProducts.Output_Correlated_[220].SAP=0 Observation.DataProducts.Output_Correlated_[220].centralFrequency=73437500.000000 @@ -1890,6 +2158,8 @@ Observation.DataProducts.Output_Correlated_[220].percentageWritten=0 Observation.DataProducts.Output_Correlated_[220].size=0 Observation.DataProducts.Output_Correlated_[220].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[220].stationSubband=376 +Observation.DataProducts.Output_Correlated_[220].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[220].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[220].subband=220 Observation.DataProducts.Output_Correlated_[221].SAP=0 Observation.DataProducts.Output_Correlated_[221].centralFrequency=73632812.500000 @@ -1904,6 +2174,8 @@ Observation.DataProducts.Output_Correlated_[221].percentageWritten=0 Observation.DataProducts.Output_Correlated_[221].size=0 Observation.DataProducts.Output_Correlated_[221].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[221].stationSubband=377 +Observation.DataProducts.Output_Correlated_[221].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[221].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[221].subband=221 Observation.DataProducts.Output_Correlated_[222].SAP=0 Observation.DataProducts.Output_Correlated_[222].centralFrequency=73828125.000000 @@ -1918,6 +2190,8 @@ Observation.DataProducts.Output_Correlated_[222].percentageWritten=0 Observation.DataProducts.Output_Correlated_[222].size=0 Observation.DataProducts.Output_Correlated_[222].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[222].stationSubband=378 +Observation.DataProducts.Output_Correlated_[222].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[222].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[222].subband=222 Observation.DataProducts.Output_Correlated_[223].SAP=0 Observation.DataProducts.Output_Correlated_[223].centralFrequency=74023437.500000 @@ -1932,6 +2206,8 @@ Observation.DataProducts.Output_Correlated_[223].percentageWritten=0 Observation.DataProducts.Output_Correlated_[223].size=0 Observation.DataProducts.Output_Correlated_[223].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[223].stationSubband=379 +Observation.DataProducts.Output_Correlated_[223].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[223].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[223].subband=223 Observation.DataProducts.Output_Correlated_[224].SAP=0 Observation.DataProducts.Output_Correlated_[224].centralFrequency=74218750.000000 @@ -1946,6 +2222,8 @@ Observation.DataProducts.Output_Correlated_[224].percentageWritten=0 Observation.DataProducts.Output_Correlated_[224].size=0 Observation.DataProducts.Output_Correlated_[224].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[224].stationSubband=380 +Observation.DataProducts.Output_Correlated_[224].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[224].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[224].subband=224 Observation.DataProducts.Output_Correlated_[225].SAP=0 Observation.DataProducts.Output_Correlated_[225].centralFrequency=74414062.500000 @@ -1960,6 +2238,8 @@ Observation.DataProducts.Output_Correlated_[225].percentageWritten=0 Observation.DataProducts.Output_Correlated_[225].size=0 Observation.DataProducts.Output_Correlated_[225].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[225].stationSubband=381 +Observation.DataProducts.Output_Correlated_[225].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[225].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[225].subband=225 Observation.DataProducts.Output_Correlated_[226].SAP=0 Observation.DataProducts.Output_Correlated_[226].centralFrequency=74609375.000000 @@ -1974,6 +2254,8 @@ Observation.DataProducts.Output_Correlated_[226].percentageWritten=0 Observation.DataProducts.Output_Correlated_[226].size=0 Observation.DataProducts.Output_Correlated_[226].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[226].stationSubband=382 +Observation.DataProducts.Output_Correlated_[226].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[226].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[226].subband=226 Observation.DataProducts.Output_Correlated_[227].SAP=0 Observation.DataProducts.Output_Correlated_[227].centralFrequency=74804687.500000 @@ -1988,6 +2270,8 @@ Observation.DataProducts.Output_Correlated_[227].percentageWritten=0 Observation.DataProducts.Output_Correlated_[227].size=0 Observation.DataProducts.Output_Correlated_[227].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[227].stationSubband=383 +Observation.DataProducts.Output_Correlated_[227].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[227].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[227].subband=227 Observation.DataProducts.Output_Correlated_[228].SAP=0 Observation.DataProducts.Output_Correlated_[228].centralFrequency=75000000.000000 @@ -2002,6 +2286,8 @@ Observation.DataProducts.Output_Correlated_[228].percentageWritten=0 Observation.DataProducts.Output_Correlated_[228].size=0 Observation.DataProducts.Output_Correlated_[228].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[228].stationSubband=384 +Observation.DataProducts.Output_Correlated_[228].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[228].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[228].subband=228 Observation.DataProducts.Output_Correlated_[229].SAP=0 Observation.DataProducts.Output_Correlated_[229].centralFrequency=75195312.500000 @@ -2016,6 +2302,8 @@ Observation.DataProducts.Output_Correlated_[229].percentageWritten=0 Observation.DataProducts.Output_Correlated_[229].size=0 Observation.DataProducts.Output_Correlated_[229].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[229].stationSubband=385 +Observation.DataProducts.Output_Correlated_[229].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[229].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[229].subband=229 Observation.DataProducts.Output_Correlated_[22].SAP=0 Observation.DataProducts.Output_Correlated_[22].centralFrequency=34765625.000000 @@ -2030,6 +2318,8 @@ Observation.DataProducts.Output_Correlated_[22].percentageWritten=0 Observation.DataProducts.Output_Correlated_[22].size=0 Observation.DataProducts.Output_Correlated_[22].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[22].stationSubband=178 +Observation.DataProducts.Output_Correlated_[22].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[22].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[22].subband=22 Observation.DataProducts.Output_Correlated_[230].SAP=0 Observation.DataProducts.Output_Correlated_[230].centralFrequency=75390625.000000 @@ -2044,6 +2334,8 @@ Observation.DataProducts.Output_Correlated_[230].percentageWritten=0 Observation.DataProducts.Output_Correlated_[230].size=0 Observation.DataProducts.Output_Correlated_[230].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[230].stationSubband=386 +Observation.DataProducts.Output_Correlated_[230].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[230].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[230].subband=230 Observation.DataProducts.Output_Correlated_[231].SAP=0 Observation.DataProducts.Output_Correlated_[231].centralFrequency=75585937.500000 @@ -2058,6 +2350,8 @@ Observation.DataProducts.Output_Correlated_[231].percentageWritten=0 Observation.DataProducts.Output_Correlated_[231].size=0 Observation.DataProducts.Output_Correlated_[231].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[231].stationSubband=387 +Observation.DataProducts.Output_Correlated_[231].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[231].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[231].subband=231 Observation.DataProducts.Output_Correlated_[232].SAP=0 Observation.DataProducts.Output_Correlated_[232].centralFrequency=75781250.000000 @@ -2072,6 +2366,8 @@ Observation.DataProducts.Output_Correlated_[232].percentageWritten=0 Observation.DataProducts.Output_Correlated_[232].size=0 Observation.DataProducts.Output_Correlated_[232].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[232].stationSubband=388 +Observation.DataProducts.Output_Correlated_[232].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[232].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[232].subband=232 Observation.DataProducts.Output_Correlated_[233].SAP=0 Observation.DataProducts.Output_Correlated_[233].centralFrequency=75976562.500000 @@ -2086,6 +2382,8 @@ Observation.DataProducts.Output_Correlated_[233].percentageWritten=0 Observation.DataProducts.Output_Correlated_[233].size=0 Observation.DataProducts.Output_Correlated_[233].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[233].stationSubband=389 +Observation.DataProducts.Output_Correlated_[233].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[233].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[233].subband=233 Observation.DataProducts.Output_Correlated_[234].SAP=0 Observation.DataProducts.Output_Correlated_[234].centralFrequency=76171875.000000 @@ -2100,6 +2398,8 @@ Observation.DataProducts.Output_Correlated_[234].percentageWritten=0 Observation.DataProducts.Output_Correlated_[234].size=0 Observation.DataProducts.Output_Correlated_[234].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[234].stationSubband=390 +Observation.DataProducts.Output_Correlated_[234].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[234].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[234].subband=234 Observation.DataProducts.Output_Correlated_[235].SAP=0 Observation.DataProducts.Output_Correlated_[235].centralFrequency=76367187.500000 @@ -2114,6 +2414,8 @@ Observation.DataProducts.Output_Correlated_[235].percentageWritten=0 Observation.DataProducts.Output_Correlated_[235].size=0 Observation.DataProducts.Output_Correlated_[235].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[235].stationSubband=391 +Observation.DataProducts.Output_Correlated_[235].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[235].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[235].subband=235 Observation.DataProducts.Output_Correlated_[236].SAP=0 Observation.DataProducts.Output_Correlated_[236].centralFrequency=76562500.000000 @@ -2128,6 +2430,8 @@ Observation.DataProducts.Output_Correlated_[236].percentageWritten=0 Observation.DataProducts.Output_Correlated_[236].size=0 Observation.DataProducts.Output_Correlated_[236].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[236].stationSubband=392 +Observation.DataProducts.Output_Correlated_[236].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[236].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[236].subband=236 Observation.DataProducts.Output_Correlated_[237].SAP=0 Observation.DataProducts.Output_Correlated_[237].centralFrequency=76757812.500000 @@ -2142,6 +2446,8 @@ Observation.DataProducts.Output_Correlated_[237].percentageWritten=0 Observation.DataProducts.Output_Correlated_[237].size=0 Observation.DataProducts.Output_Correlated_[237].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[237].stationSubband=393 +Observation.DataProducts.Output_Correlated_[237].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[237].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[237].subband=237 Observation.DataProducts.Output_Correlated_[238].SAP=0 Observation.DataProducts.Output_Correlated_[238].centralFrequency=76953125.000000 @@ -2156,6 +2462,8 @@ Observation.DataProducts.Output_Correlated_[238].percentageWritten=0 Observation.DataProducts.Output_Correlated_[238].size=0 Observation.DataProducts.Output_Correlated_[238].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[238].stationSubband=394 +Observation.DataProducts.Output_Correlated_[238].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[238].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[238].subband=238 Observation.DataProducts.Output_Correlated_[239].SAP=0 Observation.DataProducts.Output_Correlated_[239].centralFrequency=77148437.500000 @@ -2170,6 +2478,8 @@ Observation.DataProducts.Output_Correlated_[239].percentageWritten=0 Observation.DataProducts.Output_Correlated_[239].size=0 Observation.DataProducts.Output_Correlated_[239].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[239].stationSubband=395 +Observation.DataProducts.Output_Correlated_[239].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[239].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[239].subband=239 Observation.DataProducts.Output_Correlated_[23].SAP=0 Observation.DataProducts.Output_Correlated_[23].centralFrequency=34960937.500000 @@ -2184,6 +2494,8 @@ Observation.DataProducts.Output_Correlated_[23].percentageWritten=0 Observation.DataProducts.Output_Correlated_[23].size=0 Observation.DataProducts.Output_Correlated_[23].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[23].stationSubband=179 +Observation.DataProducts.Output_Correlated_[23].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[23].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[23].subband=23 Observation.DataProducts.Output_Correlated_[240].SAP=0 Observation.DataProducts.Output_Correlated_[240].centralFrequency=77343750.000000 @@ -2198,6 +2510,8 @@ Observation.DataProducts.Output_Correlated_[240].percentageWritten=0 Observation.DataProducts.Output_Correlated_[240].size=0 Observation.DataProducts.Output_Correlated_[240].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[240].stationSubband=396 +Observation.DataProducts.Output_Correlated_[240].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[240].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[240].subband=240 Observation.DataProducts.Output_Correlated_[241].SAP=0 Observation.DataProducts.Output_Correlated_[241].centralFrequency=77539062.500000 @@ -2212,6 +2526,8 @@ Observation.DataProducts.Output_Correlated_[241].percentageWritten=0 Observation.DataProducts.Output_Correlated_[241].size=0 Observation.DataProducts.Output_Correlated_[241].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[241].stationSubband=397 +Observation.DataProducts.Output_Correlated_[241].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[241].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[241].subband=241 Observation.DataProducts.Output_Correlated_[242].SAP=0 Observation.DataProducts.Output_Correlated_[242].centralFrequency=77734375.000000 @@ -2226,6 +2542,8 @@ Observation.DataProducts.Output_Correlated_[242].percentageWritten=0 Observation.DataProducts.Output_Correlated_[242].size=0 Observation.DataProducts.Output_Correlated_[242].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[242].stationSubband=398 +Observation.DataProducts.Output_Correlated_[242].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[242].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[242].subband=242 Observation.DataProducts.Output_Correlated_[243].SAP=0 Observation.DataProducts.Output_Correlated_[243].centralFrequency=77929687.500000 @@ -2240,6 +2558,8 @@ Observation.DataProducts.Output_Correlated_[243].percentageWritten=0 Observation.DataProducts.Output_Correlated_[243].size=0 Observation.DataProducts.Output_Correlated_[243].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[243].stationSubband=399 +Observation.DataProducts.Output_Correlated_[243].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[243].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[243].subband=243 Observation.DataProducts.Output_Correlated_[24].SAP=0 Observation.DataProducts.Output_Correlated_[24].centralFrequency=35156250.000000 @@ -2254,6 +2574,8 @@ Observation.DataProducts.Output_Correlated_[24].percentageWritten=0 Observation.DataProducts.Output_Correlated_[24].size=0 Observation.DataProducts.Output_Correlated_[24].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[24].stationSubband=180 +Observation.DataProducts.Output_Correlated_[24].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[24].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[24].subband=24 Observation.DataProducts.Output_Correlated_[25].SAP=0 Observation.DataProducts.Output_Correlated_[25].centralFrequency=35351562.500000 @@ -2268,6 +2590,8 @@ Observation.DataProducts.Output_Correlated_[25].percentageWritten=0 Observation.DataProducts.Output_Correlated_[25].size=0 Observation.DataProducts.Output_Correlated_[25].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[25].stationSubband=181 +Observation.DataProducts.Output_Correlated_[25].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[25].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[25].subband=25 Observation.DataProducts.Output_Correlated_[26].SAP=0 Observation.DataProducts.Output_Correlated_[26].centralFrequency=35546875.000000 @@ -2282,6 +2606,8 @@ Observation.DataProducts.Output_Correlated_[26].percentageWritten=0 Observation.DataProducts.Output_Correlated_[26].size=0 Observation.DataProducts.Output_Correlated_[26].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[26].stationSubband=182 +Observation.DataProducts.Output_Correlated_[26].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[26].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[26].subband=26 Observation.DataProducts.Output_Correlated_[27].SAP=0 Observation.DataProducts.Output_Correlated_[27].centralFrequency=35742187.500000 @@ -2296,6 +2622,8 @@ Observation.DataProducts.Output_Correlated_[27].percentageWritten=0 Observation.DataProducts.Output_Correlated_[27].size=0 Observation.DataProducts.Output_Correlated_[27].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[27].stationSubband=183 +Observation.DataProducts.Output_Correlated_[27].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[27].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[27].subband=27 Observation.DataProducts.Output_Correlated_[28].SAP=0 Observation.DataProducts.Output_Correlated_[28].centralFrequency=35937500.000000 @@ -2310,6 +2638,8 @@ Observation.DataProducts.Output_Correlated_[28].percentageWritten=0 Observation.DataProducts.Output_Correlated_[28].size=0 Observation.DataProducts.Output_Correlated_[28].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[28].stationSubband=184 +Observation.DataProducts.Output_Correlated_[28].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[28].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[28].subband=28 Observation.DataProducts.Output_Correlated_[29].SAP=0 Observation.DataProducts.Output_Correlated_[29].centralFrequency=36132812.500000 @@ -2324,6 +2654,8 @@ Observation.DataProducts.Output_Correlated_[29].percentageWritten=0 Observation.DataProducts.Output_Correlated_[29].size=0 Observation.DataProducts.Output_Correlated_[29].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[29].stationSubband=185 +Observation.DataProducts.Output_Correlated_[29].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[29].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[29].subband=29 Observation.DataProducts.Output_Correlated_[2].SAP=0 Observation.DataProducts.Output_Correlated_[2].centralFrequency=30859375.000000 @@ -2338,6 +2670,8 @@ Observation.DataProducts.Output_Correlated_[2].percentageWritten=0 Observation.DataProducts.Output_Correlated_[2].size=0 Observation.DataProducts.Output_Correlated_[2].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[2].stationSubband=158 +Observation.DataProducts.Output_Correlated_[2].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[2].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[2].subband=2 Observation.DataProducts.Output_Correlated_[30].SAP=0 Observation.DataProducts.Output_Correlated_[30].centralFrequency=36328125.000000 @@ -2352,6 +2686,8 @@ Observation.DataProducts.Output_Correlated_[30].percentageWritten=0 Observation.DataProducts.Output_Correlated_[30].size=0 Observation.DataProducts.Output_Correlated_[30].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[30].stationSubband=186 +Observation.DataProducts.Output_Correlated_[30].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[30].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[30].subband=30 Observation.DataProducts.Output_Correlated_[31].SAP=0 Observation.DataProducts.Output_Correlated_[31].centralFrequency=36523437.500000 @@ -2366,6 +2702,8 @@ Observation.DataProducts.Output_Correlated_[31].percentageWritten=0 Observation.DataProducts.Output_Correlated_[31].size=0 Observation.DataProducts.Output_Correlated_[31].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[31].stationSubband=187 +Observation.DataProducts.Output_Correlated_[31].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[31].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[31].subband=31 Observation.DataProducts.Output_Correlated_[32].SAP=0 Observation.DataProducts.Output_Correlated_[32].centralFrequency=36718750.000000 @@ -2380,6 +2718,8 @@ Observation.DataProducts.Output_Correlated_[32].percentageWritten=0 Observation.DataProducts.Output_Correlated_[32].size=0 Observation.DataProducts.Output_Correlated_[32].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[32].stationSubband=188 +Observation.DataProducts.Output_Correlated_[32].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[32].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[32].subband=32 Observation.DataProducts.Output_Correlated_[33].SAP=0 Observation.DataProducts.Output_Correlated_[33].centralFrequency=36914062.500000 @@ -2394,6 +2734,8 @@ Observation.DataProducts.Output_Correlated_[33].percentageWritten=0 Observation.DataProducts.Output_Correlated_[33].size=0 Observation.DataProducts.Output_Correlated_[33].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[33].stationSubband=189 +Observation.DataProducts.Output_Correlated_[33].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[33].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[33].subband=33 Observation.DataProducts.Output_Correlated_[34].SAP=0 Observation.DataProducts.Output_Correlated_[34].centralFrequency=37109375.000000 @@ -2408,6 +2750,8 @@ Observation.DataProducts.Output_Correlated_[34].percentageWritten=0 Observation.DataProducts.Output_Correlated_[34].size=0 Observation.DataProducts.Output_Correlated_[34].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[34].stationSubband=190 +Observation.DataProducts.Output_Correlated_[34].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[34].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[34].subband=34 Observation.DataProducts.Output_Correlated_[35].SAP=0 Observation.DataProducts.Output_Correlated_[35].centralFrequency=37304687.500000 @@ -2422,6 +2766,8 @@ Observation.DataProducts.Output_Correlated_[35].percentageWritten=0 Observation.DataProducts.Output_Correlated_[35].size=0 Observation.DataProducts.Output_Correlated_[35].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[35].stationSubband=191 +Observation.DataProducts.Output_Correlated_[35].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[35].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[35].subband=35 Observation.DataProducts.Output_Correlated_[36].SAP=0 Observation.DataProducts.Output_Correlated_[36].centralFrequency=37500000.000000 @@ -2436,6 +2782,8 @@ Observation.DataProducts.Output_Correlated_[36].percentageWritten=0 Observation.DataProducts.Output_Correlated_[36].size=0 Observation.DataProducts.Output_Correlated_[36].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[36].stationSubband=192 +Observation.DataProducts.Output_Correlated_[36].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[36].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[36].subband=36 Observation.DataProducts.Output_Correlated_[37].SAP=0 Observation.DataProducts.Output_Correlated_[37].centralFrequency=37695312.500000 @@ -2450,6 +2798,8 @@ Observation.DataProducts.Output_Correlated_[37].percentageWritten=0 Observation.DataProducts.Output_Correlated_[37].size=0 Observation.DataProducts.Output_Correlated_[37].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[37].stationSubband=193 +Observation.DataProducts.Output_Correlated_[37].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[37].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[37].subband=37 Observation.DataProducts.Output_Correlated_[38].SAP=0 Observation.DataProducts.Output_Correlated_[38].centralFrequency=37890625.000000 @@ -2464,6 +2814,8 @@ Observation.DataProducts.Output_Correlated_[38].percentageWritten=0 Observation.DataProducts.Output_Correlated_[38].size=0 Observation.DataProducts.Output_Correlated_[38].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[38].stationSubband=194 +Observation.DataProducts.Output_Correlated_[38].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[38].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[38].subband=38 Observation.DataProducts.Output_Correlated_[39].SAP=0 Observation.DataProducts.Output_Correlated_[39].centralFrequency=38085937.500000 @@ -2478,6 +2830,8 @@ Observation.DataProducts.Output_Correlated_[39].percentageWritten=0 Observation.DataProducts.Output_Correlated_[39].size=0 Observation.DataProducts.Output_Correlated_[39].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[39].stationSubband=195 +Observation.DataProducts.Output_Correlated_[39].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[39].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[39].subband=39 Observation.DataProducts.Output_Correlated_[3].SAP=0 Observation.DataProducts.Output_Correlated_[3].centralFrequency=31054687.500000 @@ -2492,6 +2846,8 @@ Observation.DataProducts.Output_Correlated_[3].percentageWritten=0 Observation.DataProducts.Output_Correlated_[3].size=0 Observation.DataProducts.Output_Correlated_[3].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[3].stationSubband=159 +Observation.DataProducts.Output_Correlated_[3].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[3].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[3].subband=3 Observation.DataProducts.Output_Correlated_[40].SAP=0 Observation.DataProducts.Output_Correlated_[40].centralFrequency=38281250.000000 @@ -2506,6 +2862,8 @@ Observation.DataProducts.Output_Correlated_[40].percentageWritten=0 Observation.DataProducts.Output_Correlated_[40].size=0 Observation.DataProducts.Output_Correlated_[40].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[40].stationSubband=196 +Observation.DataProducts.Output_Correlated_[40].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[40].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[40].subband=40 Observation.DataProducts.Output_Correlated_[41].SAP=0 Observation.DataProducts.Output_Correlated_[41].centralFrequency=38476562.500000 @@ -2520,6 +2878,8 @@ Observation.DataProducts.Output_Correlated_[41].percentageWritten=0 Observation.DataProducts.Output_Correlated_[41].size=0 Observation.DataProducts.Output_Correlated_[41].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[41].stationSubband=197 +Observation.DataProducts.Output_Correlated_[41].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[41].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[41].subband=41 Observation.DataProducts.Output_Correlated_[42].SAP=0 Observation.DataProducts.Output_Correlated_[42].centralFrequency=38671875.000000 @@ -2534,6 +2894,8 @@ Observation.DataProducts.Output_Correlated_[42].percentageWritten=0 Observation.DataProducts.Output_Correlated_[42].size=0 Observation.DataProducts.Output_Correlated_[42].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[42].stationSubband=198 +Observation.DataProducts.Output_Correlated_[42].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[42].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[42].subband=42 Observation.DataProducts.Output_Correlated_[43].SAP=0 Observation.DataProducts.Output_Correlated_[43].centralFrequency=38867187.500000 @@ -2548,6 +2910,8 @@ Observation.DataProducts.Output_Correlated_[43].percentageWritten=0 Observation.DataProducts.Output_Correlated_[43].size=0 Observation.DataProducts.Output_Correlated_[43].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[43].stationSubband=199 +Observation.DataProducts.Output_Correlated_[43].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[43].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[43].subband=43 Observation.DataProducts.Output_Correlated_[44].SAP=0 Observation.DataProducts.Output_Correlated_[44].centralFrequency=39062500.000000 @@ -2562,6 +2926,8 @@ Observation.DataProducts.Output_Correlated_[44].percentageWritten=0 Observation.DataProducts.Output_Correlated_[44].size=0 Observation.DataProducts.Output_Correlated_[44].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[44].stationSubband=200 +Observation.DataProducts.Output_Correlated_[44].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[44].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[44].subband=44 Observation.DataProducts.Output_Correlated_[45].SAP=0 Observation.DataProducts.Output_Correlated_[45].centralFrequency=39257812.500000 @@ -2576,6 +2942,8 @@ Observation.DataProducts.Output_Correlated_[45].percentageWritten=0 Observation.DataProducts.Output_Correlated_[45].size=0 Observation.DataProducts.Output_Correlated_[45].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[45].stationSubband=201 +Observation.DataProducts.Output_Correlated_[45].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[45].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[45].subband=45 Observation.DataProducts.Output_Correlated_[46].SAP=0 Observation.DataProducts.Output_Correlated_[46].centralFrequency=39453125.000000 @@ -2590,6 +2958,8 @@ Observation.DataProducts.Output_Correlated_[46].percentageWritten=0 Observation.DataProducts.Output_Correlated_[46].size=0 Observation.DataProducts.Output_Correlated_[46].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[46].stationSubband=202 +Observation.DataProducts.Output_Correlated_[46].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[46].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[46].subband=46 Observation.DataProducts.Output_Correlated_[47].SAP=0 Observation.DataProducts.Output_Correlated_[47].centralFrequency=39648437.500000 @@ -2604,6 +2974,8 @@ Observation.DataProducts.Output_Correlated_[47].percentageWritten=0 Observation.DataProducts.Output_Correlated_[47].size=0 Observation.DataProducts.Output_Correlated_[47].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[47].stationSubband=203 +Observation.DataProducts.Output_Correlated_[47].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[47].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[47].subband=47 Observation.DataProducts.Output_Correlated_[48].SAP=0 Observation.DataProducts.Output_Correlated_[48].centralFrequency=39843750.000000 @@ -2618,6 +2990,8 @@ Observation.DataProducts.Output_Correlated_[48].percentageWritten=0 Observation.DataProducts.Output_Correlated_[48].size=0 Observation.DataProducts.Output_Correlated_[48].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[48].stationSubband=204 +Observation.DataProducts.Output_Correlated_[48].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[48].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[48].subband=48 Observation.DataProducts.Output_Correlated_[49].SAP=0 Observation.DataProducts.Output_Correlated_[49].centralFrequency=40039062.500000 @@ -2632,6 +3006,8 @@ Observation.DataProducts.Output_Correlated_[49].percentageWritten=0 Observation.DataProducts.Output_Correlated_[49].size=0 Observation.DataProducts.Output_Correlated_[49].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[49].stationSubband=205 +Observation.DataProducts.Output_Correlated_[49].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[49].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[49].subband=49 Observation.DataProducts.Output_Correlated_[4].SAP=0 Observation.DataProducts.Output_Correlated_[4].centralFrequency=31250000.000000 @@ -2646,6 +3022,8 @@ Observation.DataProducts.Output_Correlated_[4].percentageWritten=0 Observation.DataProducts.Output_Correlated_[4].size=0 Observation.DataProducts.Output_Correlated_[4].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[4].stationSubband=160 +Observation.DataProducts.Output_Correlated_[4].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[4].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[4].subband=4 Observation.DataProducts.Output_Correlated_[50].SAP=0 Observation.DataProducts.Output_Correlated_[50].centralFrequency=40234375.000000 @@ -2660,6 +3038,8 @@ Observation.DataProducts.Output_Correlated_[50].percentageWritten=0 Observation.DataProducts.Output_Correlated_[50].size=0 Observation.DataProducts.Output_Correlated_[50].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[50].stationSubband=206 +Observation.DataProducts.Output_Correlated_[50].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[50].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[50].subband=50 Observation.DataProducts.Output_Correlated_[51].SAP=0 Observation.DataProducts.Output_Correlated_[51].centralFrequency=40429687.500000 @@ -2674,6 +3054,8 @@ Observation.DataProducts.Output_Correlated_[51].percentageWritten=0 Observation.DataProducts.Output_Correlated_[51].size=0 Observation.DataProducts.Output_Correlated_[51].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[51].stationSubband=207 +Observation.DataProducts.Output_Correlated_[51].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[51].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[51].subband=51 Observation.DataProducts.Output_Correlated_[52].SAP=0 Observation.DataProducts.Output_Correlated_[52].centralFrequency=40625000.000000 @@ -2688,6 +3070,8 @@ Observation.DataProducts.Output_Correlated_[52].percentageWritten=0 Observation.DataProducts.Output_Correlated_[52].size=0 Observation.DataProducts.Output_Correlated_[52].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[52].stationSubband=208 +Observation.DataProducts.Output_Correlated_[52].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[52].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[52].subband=52 Observation.DataProducts.Output_Correlated_[53].SAP=0 Observation.DataProducts.Output_Correlated_[53].centralFrequency=40820312.500000 @@ -2702,6 +3086,8 @@ Observation.DataProducts.Output_Correlated_[53].percentageWritten=0 Observation.DataProducts.Output_Correlated_[53].size=0 Observation.DataProducts.Output_Correlated_[53].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[53].stationSubband=209 +Observation.DataProducts.Output_Correlated_[53].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[53].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[53].subband=53 Observation.DataProducts.Output_Correlated_[54].SAP=0 Observation.DataProducts.Output_Correlated_[54].centralFrequency=41015625.000000 @@ -2716,6 +3102,8 @@ Observation.DataProducts.Output_Correlated_[54].percentageWritten=0 Observation.DataProducts.Output_Correlated_[54].size=0 Observation.DataProducts.Output_Correlated_[54].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[54].stationSubband=210 +Observation.DataProducts.Output_Correlated_[54].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[54].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[54].subband=54 Observation.DataProducts.Output_Correlated_[55].SAP=0 Observation.DataProducts.Output_Correlated_[55].centralFrequency=41210937.500000 @@ -2730,6 +3118,8 @@ Observation.DataProducts.Output_Correlated_[55].percentageWritten=0 Observation.DataProducts.Output_Correlated_[55].size=0 Observation.DataProducts.Output_Correlated_[55].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[55].stationSubband=211 +Observation.DataProducts.Output_Correlated_[55].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[55].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[55].subband=55 Observation.DataProducts.Output_Correlated_[56].SAP=0 Observation.DataProducts.Output_Correlated_[56].centralFrequency=41406250.000000 @@ -2744,6 +3134,8 @@ Observation.DataProducts.Output_Correlated_[56].percentageWritten=0 Observation.DataProducts.Output_Correlated_[56].size=0 Observation.DataProducts.Output_Correlated_[56].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[56].stationSubband=212 +Observation.DataProducts.Output_Correlated_[56].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[56].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[56].subband=56 Observation.DataProducts.Output_Correlated_[57].SAP=0 Observation.DataProducts.Output_Correlated_[57].centralFrequency=41601562.500000 @@ -2758,6 +3150,8 @@ Observation.DataProducts.Output_Correlated_[57].percentageWritten=0 Observation.DataProducts.Output_Correlated_[57].size=0 Observation.DataProducts.Output_Correlated_[57].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[57].stationSubband=213 +Observation.DataProducts.Output_Correlated_[57].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[57].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[57].subband=57 Observation.DataProducts.Output_Correlated_[58].SAP=0 Observation.DataProducts.Output_Correlated_[58].centralFrequency=41796875.000000 @@ -2772,6 +3166,8 @@ Observation.DataProducts.Output_Correlated_[58].percentageWritten=0 Observation.DataProducts.Output_Correlated_[58].size=0 Observation.DataProducts.Output_Correlated_[58].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[58].stationSubband=214 +Observation.DataProducts.Output_Correlated_[58].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[58].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[58].subband=58 Observation.DataProducts.Output_Correlated_[59].SAP=0 Observation.DataProducts.Output_Correlated_[59].centralFrequency=41992187.500000 @@ -2786,6 +3182,8 @@ Observation.DataProducts.Output_Correlated_[59].percentageWritten=0 Observation.DataProducts.Output_Correlated_[59].size=0 Observation.DataProducts.Output_Correlated_[59].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[59].stationSubband=215 +Observation.DataProducts.Output_Correlated_[59].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[59].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[59].subband=59 Observation.DataProducts.Output_Correlated_[5].SAP=0 Observation.DataProducts.Output_Correlated_[5].centralFrequency=31445312.500000 @@ -2800,6 +3198,8 @@ Observation.DataProducts.Output_Correlated_[5].percentageWritten=0 Observation.DataProducts.Output_Correlated_[5].size=0 Observation.DataProducts.Output_Correlated_[5].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[5].stationSubband=161 +Observation.DataProducts.Output_Correlated_[5].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[5].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[5].subband=5 Observation.DataProducts.Output_Correlated_[60].SAP=0 Observation.DataProducts.Output_Correlated_[60].centralFrequency=42187500.000000 @@ -2814,6 +3214,8 @@ Observation.DataProducts.Output_Correlated_[60].percentageWritten=0 Observation.DataProducts.Output_Correlated_[60].size=0 Observation.DataProducts.Output_Correlated_[60].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[60].stationSubband=216 +Observation.DataProducts.Output_Correlated_[60].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[60].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[60].subband=60 Observation.DataProducts.Output_Correlated_[61].SAP=0 Observation.DataProducts.Output_Correlated_[61].centralFrequency=42382812.500000 @@ -2828,6 +3230,8 @@ Observation.DataProducts.Output_Correlated_[61].percentageWritten=0 Observation.DataProducts.Output_Correlated_[61].size=0 Observation.DataProducts.Output_Correlated_[61].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[61].stationSubband=217 +Observation.DataProducts.Output_Correlated_[61].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[61].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[61].subband=61 Observation.DataProducts.Output_Correlated_[62].SAP=0 Observation.DataProducts.Output_Correlated_[62].centralFrequency=42578125.000000 @@ -2842,6 +3246,8 @@ Observation.DataProducts.Output_Correlated_[62].percentageWritten=0 Observation.DataProducts.Output_Correlated_[62].size=0 Observation.DataProducts.Output_Correlated_[62].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[62].stationSubband=218 +Observation.DataProducts.Output_Correlated_[62].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[62].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[62].subband=62 Observation.DataProducts.Output_Correlated_[63].SAP=0 Observation.DataProducts.Output_Correlated_[63].centralFrequency=42773437.500000 @@ -2856,6 +3262,8 @@ Observation.DataProducts.Output_Correlated_[63].percentageWritten=0 Observation.DataProducts.Output_Correlated_[63].size=0 Observation.DataProducts.Output_Correlated_[63].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[63].stationSubband=219 +Observation.DataProducts.Output_Correlated_[63].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[63].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[63].subband=63 Observation.DataProducts.Output_Correlated_[64].SAP=0 Observation.DataProducts.Output_Correlated_[64].centralFrequency=42968750.000000 @@ -2870,6 +3278,8 @@ Observation.DataProducts.Output_Correlated_[64].percentageWritten=0 Observation.DataProducts.Output_Correlated_[64].size=0 Observation.DataProducts.Output_Correlated_[64].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[64].stationSubband=220 +Observation.DataProducts.Output_Correlated_[64].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[64].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[64].subband=64 Observation.DataProducts.Output_Correlated_[65].SAP=0 Observation.DataProducts.Output_Correlated_[65].centralFrequency=43164062.500000 @@ -2884,6 +3294,8 @@ Observation.DataProducts.Output_Correlated_[65].percentageWritten=0 Observation.DataProducts.Output_Correlated_[65].size=0 Observation.DataProducts.Output_Correlated_[65].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[65].stationSubband=221 +Observation.DataProducts.Output_Correlated_[65].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[65].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[65].subband=65 Observation.DataProducts.Output_Correlated_[66].SAP=0 Observation.DataProducts.Output_Correlated_[66].centralFrequency=43359375.000000 @@ -2898,6 +3310,8 @@ Observation.DataProducts.Output_Correlated_[66].percentageWritten=0 Observation.DataProducts.Output_Correlated_[66].size=0 Observation.DataProducts.Output_Correlated_[66].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[66].stationSubband=222 +Observation.DataProducts.Output_Correlated_[66].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[66].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[66].subband=66 Observation.DataProducts.Output_Correlated_[67].SAP=0 Observation.DataProducts.Output_Correlated_[67].centralFrequency=43554687.500000 @@ -2912,6 +3326,8 @@ Observation.DataProducts.Output_Correlated_[67].percentageWritten=0 Observation.DataProducts.Output_Correlated_[67].size=0 Observation.DataProducts.Output_Correlated_[67].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[67].stationSubband=223 +Observation.DataProducts.Output_Correlated_[67].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[67].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[67].subband=67 Observation.DataProducts.Output_Correlated_[68].SAP=0 Observation.DataProducts.Output_Correlated_[68].centralFrequency=43750000.000000 @@ -2926,6 +3342,8 @@ Observation.DataProducts.Output_Correlated_[68].percentageWritten=0 Observation.DataProducts.Output_Correlated_[68].size=0 Observation.DataProducts.Output_Correlated_[68].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[68].stationSubband=224 +Observation.DataProducts.Output_Correlated_[68].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[68].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[68].subband=68 Observation.DataProducts.Output_Correlated_[69].SAP=0 Observation.DataProducts.Output_Correlated_[69].centralFrequency=43945312.500000 @@ -2940,6 +3358,8 @@ Observation.DataProducts.Output_Correlated_[69].percentageWritten=0 Observation.DataProducts.Output_Correlated_[69].size=0 Observation.DataProducts.Output_Correlated_[69].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[69].stationSubband=225 +Observation.DataProducts.Output_Correlated_[69].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[69].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[69].subband=69 Observation.DataProducts.Output_Correlated_[6].SAP=0 Observation.DataProducts.Output_Correlated_[6].centralFrequency=31640625.000000 @@ -2954,6 +3374,8 @@ Observation.DataProducts.Output_Correlated_[6].percentageWritten=0 Observation.DataProducts.Output_Correlated_[6].size=0 Observation.DataProducts.Output_Correlated_[6].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[6].stationSubband=162 +Observation.DataProducts.Output_Correlated_[6].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[6].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[6].subband=6 Observation.DataProducts.Output_Correlated_[70].SAP=0 Observation.DataProducts.Output_Correlated_[70].centralFrequency=44140625.000000 @@ -2968,6 +3390,8 @@ Observation.DataProducts.Output_Correlated_[70].percentageWritten=0 Observation.DataProducts.Output_Correlated_[70].size=0 Observation.DataProducts.Output_Correlated_[70].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[70].stationSubband=226 +Observation.DataProducts.Output_Correlated_[70].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[70].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[70].subband=70 Observation.DataProducts.Output_Correlated_[71].SAP=0 Observation.DataProducts.Output_Correlated_[71].centralFrequency=44335937.500000 @@ -2982,6 +3406,8 @@ Observation.DataProducts.Output_Correlated_[71].percentageWritten=0 Observation.DataProducts.Output_Correlated_[71].size=0 Observation.DataProducts.Output_Correlated_[71].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[71].stationSubband=227 +Observation.DataProducts.Output_Correlated_[71].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[71].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[71].subband=71 Observation.DataProducts.Output_Correlated_[72].SAP=0 Observation.DataProducts.Output_Correlated_[72].centralFrequency=44531250.000000 @@ -2996,6 +3422,8 @@ Observation.DataProducts.Output_Correlated_[72].percentageWritten=0 Observation.DataProducts.Output_Correlated_[72].size=0 Observation.DataProducts.Output_Correlated_[72].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[72].stationSubband=228 +Observation.DataProducts.Output_Correlated_[72].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[72].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[72].subband=72 Observation.DataProducts.Output_Correlated_[73].SAP=0 Observation.DataProducts.Output_Correlated_[73].centralFrequency=44726562.500000 @@ -3010,6 +3438,8 @@ Observation.DataProducts.Output_Correlated_[73].percentageWritten=0 Observation.DataProducts.Output_Correlated_[73].size=0 Observation.DataProducts.Output_Correlated_[73].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[73].stationSubband=229 +Observation.DataProducts.Output_Correlated_[73].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[73].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[73].subband=73 Observation.DataProducts.Output_Correlated_[74].SAP=0 Observation.DataProducts.Output_Correlated_[74].centralFrequency=44921875.000000 @@ -3024,6 +3454,8 @@ Observation.DataProducts.Output_Correlated_[74].percentageWritten=0 Observation.DataProducts.Output_Correlated_[74].size=0 Observation.DataProducts.Output_Correlated_[74].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[74].stationSubband=230 +Observation.DataProducts.Output_Correlated_[74].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[74].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[74].subband=74 Observation.DataProducts.Output_Correlated_[75].SAP=0 Observation.DataProducts.Output_Correlated_[75].centralFrequency=45117187.500000 @@ -3038,6 +3470,8 @@ Observation.DataProducts.Output_Correlated_[75].percentageWritten=0 Observation.DataProducts.Output_Correlated_[75].size=0 Observation.DataProducts.Output_Correlated_[75].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[75].stationSubband=231 +Observation.DataProducts.Output_Correlated_[75].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[75].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[75].subband=75 Observation.DataProducts.Output_Correlated_[76].SAP=0 Observation.DataProducts.Output_Correlated_[76].centralFrequency=45312500.000000 @@ -3052,6 +3486,8 @@ Observation.DataProducts.Output_Correlated_[76].percentageWritten=0 Observation.DataProducts.Output_Correlated_[76].size=0 Observation.DataProducts.Output_Correlated_[76].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[76].stationSubband=232 +Observation.DataProducts.Output_Correlated_[76].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[76].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[76].subband=76 Observation.DataProducts.Output_Correlated_[77].SAP=0 Observation.DataProducts.Output_Correlated_[77].centralFrequency=45507812.500000 @@ -3066,6 +3502,8 @@ Observation.DataProducts.Output_Correlated_[77].percentageWritten=0 Observation.DataProducts.Output_Correlated_[77].size=0 Observation.DataProducts.Output_Correlated_[77].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[77].stationSubband=233 +Observation.DataProducts.Output_Correlated_[77].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[77].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[77].subband=77 Observation.DataProducts.Output_Correlated_[78].SAP=0 Observation.DataProducts.Output_Correlated_[78].centralFrequency=45703125.000000 @@ -3080,6 +3518,8 @@ Observation.DataProducts.Output_Correlated_[78].percentageWritten=0 Observation.DataProducts.Output_Correlated_[78].size=0 Observation.DataProducts.Output_Correlated_[78].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[78].stationSubband=234 +Observation.DataProducts.Output_Correlated_[78].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[78].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[78].subband=78 Observation.DataProducts.Output_Correlated_[79].SAP=0 Observation.DataProducts.Output_Correlated_[79].centralFrequency=45898437.500000 @@ -3094,6 +3534,8 @@ Observation.DataProducts.Output_Correlated_[79].percentageWritten=0 Observation.DataProducts.Output_Correlated_[79].size=0 Observation.DataProducts.Output_Correlated_[79].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[79].stationSubband=235 +Observation.DataProducts.Output_Correlated_[79].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[79].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[79].subband=79 Observation.DataProducts.Output_Correlated_[7].SAP=0 Observation.DataProducts.Output_Correlated_[7].centralFrequency=31835937.500000 @@ -3108,6 +3550,8 @@ Observation.DataProducts.Output_Correlated_[7].percentageWritten=0 Observation.DataProducts.Output_Correlated_[7].size=0 Observation.DataProducts.Output_Correlated_[7].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[7].stationSubband=163 +Observation.DataProducts.Output_Correlated_[7].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[7].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[7].subband=7 Observation.DataProducts.Output_Correlated_[80].SAP=0 Observation.DataProducts.Output_Correlated_[80].centralFrequency=46093750.000000 @@ -3122,6 +3566,8 @@ Observation.DataProducts.Output_Correlated_[80].percentageWritten=0 Observation.DataProducts.Output_Correlated_[80].size=0 Observation.DataProducts.Output_Correlated_[80].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[80].stationSubband=236 +Observation.DataProducts.Output_Correlated_[80].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[80].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[80].subband=80 Observation.DataProducts.Output_Correlated_[81].SAP=0 Observation.DataProducts.Output_Correlated_[81].centralFrequency=46289062.500000 @@ -3136,6 +3582,8 @@ Observation.DataProducts.Output_Correlated_[81].percentageWritten=0 Observation.DataProducts.Output_Correlated_[81].size=0 Observation.DataProducts.Output_Correlated_[81].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[81].stationSubband=237 +Observation.DataProducts.Output_Correlated_[81].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[81].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[81].subband=81 Observation.DataProducts.Output_Correlated_[82].SAP=0 Observation.DataProducts.Output_Correlated_[82].centralFrequency=46484375.000000 @@ -3150,6 +3598,8 @@ Observation.DataProducts.Output_Correlated_[82].percentageWritten=0 Observation.DataProducts.Output_Correlated_[82].size=0 Observation.DataProducts.Output_Correlated_[82].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[82].stationSubband=238 +Observation.DataProducts.Output_Correlated_[82].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[82].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[82].subband=82 Observation.DataProducts.Output_Correlated_[83].SAP=0 Observation.DataProducts.Output_Correlated_[83].centralFrequency=46679687.500000 @@ -3164,6 +3614,8 @@ Observation.DataProducts.Output_Correlated_[83].percentageWritten=0 Observation.DataProducts.Output_Correlated_[83].size=0 Observation.DataProducts.Output_Correlated_[83].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[83].stationSubband=239 +Observation.DataProducts.Output_Correlated_[83].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[83].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[83].subband=83 Observation.DataProducts.Output_Correlated_[84].SAP=0 Observation.DataProducts.Output_Correlated_[84].centralFrequency=46875000.000000 @@ -3178,6 +3630,8 @@ Observation.DataProducts.Output_Correlated_[84].percentageWritten=0 Observation.DataProducts.Output_Correlated_[84].size=0 Observation.DataProducts.Output_Correlated_[84].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[84].stationSubband=240 +Observation.DataProducts.Output_Correlated_[84].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[84].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[84].subband=84 Observation.DataProducts.Output_Correlated_[85].SAP=0 Observation.DataProducts.Output_Correlated_[85].centralFrequency=47070312.500000 @@ -3192,6 +3646,8 @@ Observation.DataProducts.Output_Correlated_[85].percentageWritten=0 Observation.DataProducts.Output_Correlated_[85].size=0 Observation.DataProducts.Output_Correlated_[85].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[85].stationSubband=241 +Observation.DataProducts.Output_Correlated_[85].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[85].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[85].subband=85 Observation.DataProducts.Output_Correlated_[86].SAP=0 Observation.DataProducts.Output_Correlated_[86].centralFrequency=47265625.000000 @@ -3206,6 +3662,8 @@ Observation.DataProducts.Output_Correlated_[86].percentageWritten=0 Observation.DataProducts.Output_Correlated_[86].size=0 Observation.DataProducts.Output_Correlated_[86].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[86].stationSubband=242 +Observation.DataProducts.Output_Correlated_[86].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[86].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[86].subband=86 Observation.DataProducts.Output_Correlated_[87].SAP=0 Observation.DataProducts.Output_Correlated_[87].centralFrequency=47460937.500000 @@ -3220,6 +3678,8 @@ Observation.DataProducts.Output_Correlated_[87].percentageWritten=0 Observation.DataProducts.Output_Correlated_[87].size=0 Observation.DataProducts.Output_Correlated_[87].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[87].stationSubband=243 +Observation.DataProducts.Output_Correlated_[87].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[87].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[87].subband=87 Observation.DataProducts.Output_Correlated_[88].SAP=0 Observation.DataProducts.Output_Correlated_[88].centralFrequency=47656250.000000 @@ -3234,6 +3694,8 @@ Observation.DataProducts.Output_Correlated_[88].percentageWritten=0 Observation.DataProducts.Output_Correlated_[88].size=0 Observation.DataProducts.Output_Correlated_[88].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[88].stationSubband=244 +Observation.DataProducts.Output_Correlated_[88].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[88].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[88].subband=88 Observation.DataProducts.Output_Correlated_[89].SAP=0 Observation.DataProducts.Output_Correlated_[89].centralFrequency=47851562.500000 @@ -3248,6 +3710,8 @@ Observation.DataProducts.Output_Correlated_[89].percentageWritten=0 Observation.DataProducts.Output_Correlated_[89].size=0 Observation.DataProducts.Output_Correlated_[89].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[89].stationSubband=245 +Observation.DataProducts.Output_Correlated_[89].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[89].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[89].subband=89 Observation.DataProducts.Output_Correlated_[8].SAP=0 Observation.DataProducts.Output_Correlated_[8].centralFrequency=32031250.000000 @@ -3262,6 +3726,8 @@ Observation.DataProducts.Output_Correlated_[8].percentageWritten=0 Observation.DataProducts.Output_Correlated_[8].size=0 Observation.DataProducts.Output_Correlated_[8].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[8].stationSubband=164 +Observation.DataProducts.Output_Correlated_[8].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[8].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[8].subband=8 Observation.DataProducts.Output_Correlated_[90].SAP=0 Observation.DataProducts.Output_Correlated_[90].centralFrequency=48046875.000000 @@ -3276,6 +3742,8 @@ Observation.DataProducts.Output_Correlated_[90].percentageWritten=0 Observation.DataProducts.Output_Correlated_[90].size=0 Observation.DataProducts.Output_Correlated_[90].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[90].stationSubband=246 +Observation.DataProducts.Output_Correlated_[90].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[90].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[90].subband=90 Observation.DataProducts.Output_Correlated_[91].SAP=0 Observation.DataProducts.Output_Correlated_[91].centralFrequency=48242187.500000 @@ -3290,6 +3758,8 @@ Observation.DataProducts.Output_Correlated_[91].percentageWritten=0 Observation.DataProducts.Output_Correlated_[91].size=0 Observation.DataProducts.Output_Correlated_[91].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[91].stationSubband=247 +Observation.DataProducts.Output_Correlated_[91].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[91].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[91].subband=91 Observation.DataProducts.Output_Correlated_[92].SAP=0 Observation.DataProducts.Output_Correlated_[92].centralFrequency=48437500.000000 @@ -3304,6 +3774,8 @@ Observation.DataProducts.Output_Correlated_[92].percentageWritten=0 Observation.DataProducts.Output_Correlated_[92].size=0 Observation.DataProducts.Output_Correlated_[92].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[92].stationSubband=248 +Observation.DataProducts.Output_Correlated_[92].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[92].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[92].subband=92 Observation.DataProducts.Output_Correlated_[93].SAP=0 Observation.DataProducts.Output_Correlated_[93].centralFrequency=48632812.500000 @@ -3318,6 +3790,8 @@ Observation.DataProducts.Output_Correlated_[93].percentageWritten=0 Observation.DataProducts.Output_Correlated_[93].size=0 Observation.DataProducts.Output_Correlated_[93].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[93].stationSubband=249 +Observation.DataProducts.Output_Correlated_[93].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[93].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[93].subband=93 Observation.DataProducts.Output_Correlated_[94].SAP=0 Observation.DataProducts.Output_Correlated_[94].centralFrequency=48828125.000000 @@ -3332,6 +3806,8 @@ Observation.DataProducts.Output_Correlated_[94].percentageWritten=0 Observation.DataProducts.Output_Correlated_[94].size=0 Observation.DataProducts.Output_Correlated_[94].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[94].stationSubband=250 +Observation.DataProducts.Output_Correlated_[94].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[94].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[94].subband=94 Observation.DataProducts.Output_Correlated_[95].SAP=0 Observation.DataProducts.Output_Correlated_[95].centralFrequency=49023437.500000 @@ -3346,6 +3822,8 @@ Observation.DataProducts.Output_Correlated_[95].percentageWritten=0 Observation.DataProducts.Output_Correlated_[95].size=0 Observation.DataProducts.Output_Correlated_[95].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[95].stationSubband=251 +Observation.DataProducts.Output_Correlated_[95].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[95].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[95].subband=95 Observation.DataProducts.Output_Correlated_[96].SAP=0 Observation.DataProducts.Output_Correlated_[96].centralFrequency=49218750.000000 @@ -3360,6 +3838,8 @@ Observation.DataProducts.Output_Correlated_[96].percentageWritten=0 Observation.DataProducts.Output_Correlated_[96].size=0 Observation.DataProducts.Output_Correlated_[96].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[96].stationSubband=252 +Observation.DataProducts.Output_Correlated_[96].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[96].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[96].subband=96 Observation.DataProducts.Output_Correlated_[97].SAP=0 Observation.DataProducts.Output_Correlated_[97].centralFrequency=49414062.500000 @@ -3374,6 +3854,8 @@ Observation.DataProducts.Output_Correlated_[97].percentageWritten=0 Observation.DataProducts.Output_Correlated_[97].size=0 Observation.DataProducts.Output_Correlated_[97].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[97].stationSubband=253 +Observation.DataProducts.Output_Correlated_[97].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[97].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[97].subband=97 Observation.DataProducts.Output_Correlated_[98].SAP=0 Observation.DataProducts.Output_Correlated_[98].centralFrequency=49609375.000000 @@ -3388,6 +3870,8 @@ Observation.DataProducts.Output_Correlated_[98].percentageWritten=0 Observation.DataProducts.Output_Correlated_[98].size=0 Observation.DataProducts.Output_Correlated_[98].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[98].stationSubband=254 +Observation.DataProducts.Output_Correlated_[98].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[98].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[98].subband=98 Observation.DataProducts.Output_Correlated_[99].SAP=0 Observation.DataProducts.Output_Correlated_[99].centralFrequency=49804687.500000 @@ -3402,6 +3886,8 @@ Observation.DataProducts.Output_Correlated_[99].percentageWritten=0 Observation.DataProducts.Output_Correlated_[99].size=0 Observation.DataProducts.Output_Correlated_[99].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[99].stationSubband=255 +Observation.DataProducts.Output_Correlated_[99].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[99].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[99].subband=99 Observation.DataProducts.Output_Correlated_[9].SAP=0 Observation.DataProducts.Output_Correlated_[9].centralFrequency=32226562.500000 @@ -3416,7 +3902,10 @@ Observation.DataProducts.Output_Correlated_[9].percentageWritten=0 Observation.DataProducts.Output_Correlated_[9].size=0 Observation.DataProducts.Output_Correlated_[9].startTime=2014-04-18 15:02:00 Observation.DataProducts.Output_Correlated_[9].stationSubband=165 +Observation.DataProducts.Output_Correlated_[9].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[9].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[9].subband=9 Observation.DataProducts.nrOfOutput_Beamformed_=0 Observation.DataProducts.nrOfOutput_Correlated_=244 _isCobalt=T +feedback_version=03.01.00 diff --git a/RTCP/Cobalt/CoInterface/test/tLTAFeedback.in_reference/Observation221197_feedback b/RTCP/Cobalt/CoInterface/test/tLTAFeedback.in_reference/Observation221197_feedback index aad40607a4c0d983a4b18ffc230e19ec9d73f290..5ad17630bdf7c8bb3e49ea775601049514973a4f 100644 --- a/RTCP/Cobalt/CoInterface/test/tLTAFeedback.in_reference/Observation221197_feedback +++ b/RTCP/Cobalt/CoInterface/test/tLTAFeedback.in_reference/Observation221197_feedback @@ -33,6 +33,8 @@ Observation.DataProducts.Output_Beamformed_[0].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[0].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[0].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[0].size=0 +Observation.DataProducts.Output_Beamformed_[0].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[0].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[100].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[100].CoherentStokesBeam[0].Offset.angle2=-0.008560 Observation.DataProducts.Output_Beamformed_[100].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -60,6 +62,8 @@ Observation.DataProducts.Output_Beamformed_[100].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[100].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[100].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[100].size=0 +Observation.DataProducts.Output_Beamformed_[100].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[100].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[101].CoherentStokesBeam[0].Offset.angle1=-0.006338 Observation.DataProducts.Output_Beamformed_[101].CoherentStokesBeam[0].Offset.angle2=-0.006420 Observation.DataProducts.Output_Beamformed_[101].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -87,6 +91,8 @@ Observation.DataProducts.Output_Beamformed_[101].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[101].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[101].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[101].size=0 +Observation.DataProducts.Output_Beamformed_[101].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[101].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[102].CoherentStokesBeam[0].Offset.angle1=-0.012714 Observation.DataProducts.Output_Beamformed_[102].CoherentStokesBeam[0].Offset.angle2=-0.004280 Observation.DataProducts.Output_Beamformed_[102].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -114,6 +120,8 @@ Observation.DataProducts.Output_Beamformed_[102].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[102].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[102].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[102].size=0 +Observation.DataProducts.Output_Beamformed_[102].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[102].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[103].CoherentStokesBeam[0].Offset.angle1=-0.012791 Observation.DataProducts.Output_Beamformed_[103].CoherentStokesBeam[0].Offset.angle2=0.000000 Observation.DataProducts.Output_Beamformed_[103].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -141,6 +149,8 @@ Observation.DataProducts.Output_Beamformed_[103].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[103].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[103].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[103].size=0 +Observation.DataProducts.Output_Beamformed_[103].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[103].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[104].CoherentStokesBeam[0].Offset.angle1=-0.012868 Observation.DataProducts.Output_Beamformed_[104].CoherentStokesBeam[0].Offset.angle2=0.004280 Observation.DataProducts.Output_Beamformed_[104].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -168,6 +178,8 @@ Observation.DataProducts.Output_Beamformed_[104].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[104].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[104].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[104].size=0 +Observation.DataProducts.Output_Beamformed_[104].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[104].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[105].CoherentStokesBeam[0].Offset.angle1=-0.006454 Observation.DataProducts.Output_Beamformed_[105].CoherentStokesBeam[0].Offset.angle2=0.006420 Observation.DataProducts.Output_Beamformed_[105].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -195,6 +207,8 @@ Observation.DataProducts.Output_Beamformed_[105].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[105].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[105].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[105].size=0 +Observation.DataProducts.Output_Beamformed_[105].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[105].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[106].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[106].CoherentStokesBeam[0].Offset.angle2=0.012840 Observation.DataProducts.Output_Beamformed_[106].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -222,6 +236,8 @@ Observation.DataProducts.Output_Beamformed_[106].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[106].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[106].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[106].size=0 +Observation.DataProducts.Output_Beamformed_[106].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[106].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[107].CoherentStokesBeam[0].Offset.angle1=0.006493 Observation.DataProducts.Output_Beamformed_[107].CoherentStokesBeam[0].Offset.angle2=0.010700 Observation.DataProducts.Output_Beamformed_[107].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -249,6 +265,8 @@ Observation.DataProducts.Output_Beamformed_[107].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[107].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[107].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[107].size=0 +Observation.DataProducts.Output_Beamformed_[107].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[107].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[108].CoherentStokesBeam[0].Offset.angle1=0.012947 Observation.DataProducts.Output_Beamformed_[108].CoherentStokesBeam[0].Offset.angle2=0.008560 Observation.DataProducts.Output_Beamformed_[108].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -276,6 +294,8 @@ Observation.DataProducts.Output_Beamformed_[108].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[108].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[108].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[108].size=0 +Observation.DataProducts.Output_Beamformed_[108].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[108].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[109].CoherentStokesBeam[0].Offset.angle1=0.019361 Observation.DataProducts.Output_Beamformed_[109].CoherentStokesBeam[0].Offset.angle2=0.006420 Observation.DataProducts.Output_Beamformed_[109].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -303,6 +323,8 @@ Observation.DataProducts.Output_Beamformed_[109].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[109].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[109].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[109].size=0 +Observation.DataProducts.Output_Beamformed_[109].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[109].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[10].CoherentStokesBeam[0].Offset.angle1=-1.009503 Observation.DataProducts.Output_Beamformed_[10].CoherentStokesBeam[0].Offset.angle2=-0.970080 Observation.DataProducts.Output_Beamformed_[10].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -330,6 +352,8 @@ Observation.DataProducts.Output_Beamformed_[10].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[10].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[10].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[10].size=0 +Observation.DataProducts.Output_Beamformed_[10].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[10].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[110].CoherentStokesBeam[0].Offset.angle1=0.019244 Observation.DataProducts.Output_Beamformed_[110].CoherentStokesBeam[0].Offset.angle2=0.002140 Observation.DataProducts.Output_Beamformed_[110].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -357,6 +381,8 @@ Observation.DataProducts.Output_Beamformed_[110].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[110].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[110].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[110].size=0 +Observation.DataProducts.Output_Beamformed_[110].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[110].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[111].CoherentStokesBeam[0].Offset.angle1=0.019128 Observation.DataProducts.Output_Beamformed_[111].CoherentStokesBeam[0].Offset.angle2=-0.002140 Observation.DataProducts.Output_Beamformed_[111].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -384,6 +410,8 @@ Observation.DataProducts.Output_Beamformed_[111].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[111].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[111].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[111].size=0 +Observation.DataProducts.Output_Beamformed_[111].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[111].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[112].CoherentStokesBeam[0].Offset.angle1=0.019015 Observation.DataProducts.Output_Beamformed_[112].CoherentStokesBeam[0].Offset.angle2=-0.006420 Observation.DataProducts.Output_Beamformed_[112].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -411,6 +439,8 @@ Observation.DataProducts.Output_Beamformed_[112].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[112].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[112].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[112].size=0 +Observation.DataProducts.Output_Beamformed_[112].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[112].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[113].CoherentStokesBeam[0].Offset.angle1=0.012639 Observation.DataProducts.Output_Beamformed_[113].CoherentStokesBeam[0].Offset.angle2=-0.008560 Observation.DataProducts.Output_Beamformed_[113].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -438,6 +468,8 @@ Observation.DataProducts.Output_Beamformed_[113].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[113].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[113].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[113].size=0 +Observation.DataProducts.Output_Beamformed_[113].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[113].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[114].CoherentStokesBeam[0].Offset.angle1=0.006301 Observation.DataProducts.Output_Beamformed_[114].CoherentStokesBeam[0].Offset.angle2=-0.010700 Observation.DataProducts.Output_Beamformed_[114].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -465,6 +497,8 @@ Observation.DataProducts.Output_Beamformed_[114].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[114].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[114].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[114].size=0 +Observation.DataProducts.Output_Beamformed_[114].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[114].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[115].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[115].CoherentStokesBeam[0].Offset.angle2=-0.012840 Observation.DataProducts.Output_Beamformed_[115].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -492,6 +526,8 @@ Observation.DataProducts.Output_Beamformed_[115].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[115].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[115].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[115].size=0 +Observation.DataProducts.Output_Beamformed_[115].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[115].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[116].CoherentStokesBeam[0].Offset.angle1=-0.006301 Observation.DataProducts.Output_Beamformed_[116].CoherentStokesBeam[0].Offset.angle2=-0.010700 Observation.DataProducts.Output_Beamformed_[116].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -519,6 +555,8 @@ Observation.DataProducts.Output_Beamformed_[116].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[116].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[116].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[116].size=0 +Observation.DataProducts.Output_Beamformed_[116].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[116].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[117].CoherentStokesBeam[0].Offset.angle1=-0.012639 Observation.DataProducts.Output_Beamformed_[117].CoherentStokesBeam[0].Offset.angle2=-0.008560 Observation.DataProducts.Output_Beamformed_[117].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -546,6 +584,8 @@ Observation.DataProducts.Output_Beamformed_[117].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[117].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[117].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[117].size=0 +Observation.DataProducts.Output_Beamformed_[117].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[117].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[118].CoherentStokesBeam[0].Offset.angle1=-0.019015 Observation.DataProducts.Output_Beamformed_[118].CoherentStokesBeam[0].Offset.angle2=-0.006420 Observation.DataProducts.Output_Beamformed_[118].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -573,6 +613,8 @@ Observation.DataProducts.Output_Beamformed_[118].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[118].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[118].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[118].size=0 +Observation.DataProducts.Output_Beamformed_[118].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[118].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[119].CoherentStokesBeam[0].Offset.angle1=-0.019128 Observation.DataProducts.Output_Beamformed_[119].CoherentStokesBeam[0].Offset.angle2=-0.002140 Observation.DataProducts.Output_Beamformed_[119].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -600,6 +642,8 @@ Observation.DataProducts.Output_Beamformed_[119].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[119].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[119].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[119].size=0 +Observation.DataProducts.Output_Beamformed_[119].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[119].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[11].CoherentStokesBeam[0].Offset.angle1=-1.026063 Observation.DataProducts.Output_Beamformed_[11].CoherentStokesBeam[0].Offset.angle2=-0.952579 Observation.DataProducts.Output_Beamformed_[11].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -627,6 +671,8 @@ Observation.DataProducts.Output_Beamformed_[11].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[11].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[11].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[11].size=0 +Observation.DataProducts.Output_Beamformed_[11].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[11].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[120].CoherentStokesBeam[0].Offset.angle1=-0.019244 Observation.DataProducts.Output_Beamformed_[120].CoherentStokesBeam[0].Offset.angle2=0.002140 Observation.DataProducts.Output_Beamformed_[120].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -654,6 +700,8 @@ Observation.DataProducts.Output_Beamformed_[120].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[120].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[120].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[120].size=0 +Observation.DataProducts.Output_Beamformed_[120].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[120].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[121].CoherentStokesBeam[0].Offset.angle1=-0.019361 Observation.DataProducts.Output_Beamformed_[121].CoherentStokesBeam[0].Offset.angle2=0.006420 Observation.DataProducts.Output_Beamformed_[121].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -681,6 +729,8 @@ Observation.DataProducts.Output_Beamformed_[121].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[121].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[121].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[121].size=0 +Observation.DataProducts.Output_Beamformed_[121].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[121].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[122].CoherentStokesBeam[0].Offset.angle1=-0.012947 Observation.DataProducts.Output_Beamformed_[122].CoherentStokesBeam[0].Offset.angle2=0.008560 Observation.DataProducts.Output_Beamformed_[122].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -708,6 +758,8 @@ Observation.DataProducts.Output_Beamformed_[122].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[122].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[122].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[122].size=0 +Observation.DataProducts.Output_Beamformed_[122].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[122].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[123].CoherentStokesBeam[0].Offset.angle1=-0.006493 Observation.DataProducts.Output_Beamformed_[123].CoherentStokesBeam[0].Offset.angle2=0.010700 Observation.DataProducts.Output_Beamformed_[123].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -735,6 +787,8 @@ Observation.DataProducts.Output_Beamformed_[123].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[123].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[123].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[123].size=0 +Observation.DataProducts.Output_Beamformed_[123].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[123].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[124].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[124].CoherentStokesBeam[0].Offset.angle2=0.017120 Observation.DataProducts.Output_Beamformed_[124].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -762,6 +816,8 @@ Observation.DataProducts.Output_Beamformed_[124].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[124].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[124].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[124].size=0 +Observation.DataProducts.Output_Beamformed_[124].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[124].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[125].CoherentStokesBeam[0].Offset.angle1=0.006534 Observation.DataProducts.Output_Beamformed_[125].CoherentStokesBeam[0].Offset.angle2=0.014980 Observation.DataProducts.Output_Beamformed_[125].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -789,6 +845,8 @@ Observation.DataProducts.Output_Beamformed_[125].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[125].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[125].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[125].size=0 +Observation.DataProducts.Output_Beamformed_[125].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[125].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[126].CoherentStokesBeam[0].Offset.angle1=0.013027 Observation.DataProducts.Output_Beamformed_[126].CoherentStokesBeam[0].Offset.angle2=0.012840 Observation.DataProducts.Output_Beamformed_[126].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -816,6 +874,8 @@ Observation.DataProducts.Output_Beamformed_[126].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[126].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[126].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[126].size=0 +Observation.DataProducts.Output_Beamformed_[126].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[126].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[127].CoherentStokesBeam[0].Offset.angle1=0.019480 Observation.DataProducts.Output_Beamformed_[127].CoherentStokesBeam[0].Offset.angle2=0.010700 Observation.DataProducts.Output_Beamformed_[127].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -843,6 +903,8 @@ Observation.DataProducts.Output_Beamformed_[127].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[127].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[127].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[127].size=0 +Observation.DataProducts.Output_Beamformed_[127].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[127].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[128].CoherentStokesBeam[0].Offset.angle1=0.025894 Observation.DataProducts.Output_Beamformed_[128].CoherentStokesBeam[0].Offset.angle2=0.008560 Observation.DataProducts.Output_Beamformed_[128].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -870,6 +932,8 @@ Observation.DataProducts.Output_Beamformed_[128].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[128].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[128].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[128].size=0 +Observation.DataProducts.Output_Beamformed_[128].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[128].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[129].CoherentStokesBeam[0].Offset.angle1=0.025736 Observation.DataProducts.Output_Beamformed_[129].CoherentStokesBeam[0].Offset.angle2=0.004280 Observation.DataProducts.Output_Beamformed_[129].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -897,6 +961,8 @@ Observation.DataProducts.Output_Beamformed_[129].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[129].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[129].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[129].size=0 +Observation.DataProducts.Output_Beamformed_[129].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[129].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[12].IncoherentStokesBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[12].IncoherentStokesBeam[0].TAB=12 Observation.DataProducts.Output_Beamformed_[12].IncoherentStokesBeam[0].centralFrequencies=[119531250.0000, 119726562.5000, 119921875.0000, 120117187.5000, 120312500.0000, 120507812.5000, 120703125.0000, 120898437.5000, 121093750.0000, 121289062.5000, 121484375.0000, 121679687.5000, 121875000.0000, 122070312.5000, 122265625.0000, 122460937.5000, 122656250.0000, 122851562.5000, 123046875.0000, 123242187.5000, 123437500.0000, 123632812.5000, 123828125.0000, 124023437.5000, 124218750.0000, 124414062.5000, 124609375.0000, 124804687.5000, 125000000.0000, 125195312.5000, 125390625.0000, 125585937.5000, 125781250.0000, 125976562.5000, 126171875.0000, 126367187.5000, 126562500.0000, 126757812.5000, 126953125.0000, 127148437.5000, 127343750.0000, 127539062.5000, 127734375.0000, 127929687.5000, 128125000.0000, 128320312.5000, 128515625.0000, 128710937.5000, 128906250.0000, 129101562.5000, 129296875.0000, 129492187.5000, 129687500.0000, 129882812.5000, 130078125.0000, 130273437.5000, 130468750.0000, 130664062.5000, 130859375.0000, 131054687.5000, 131250000.0000, 131445312.5000, 131640625.0000, 131835937.5000, 132031250.0000, 132226562.5000, 132421875.0000, 132617187.5000, 132812500.0000, 133007812.5000, 133203125.0000, 133398437.5000, 133593750.0000, 133789062.5000, 133984375.0000, 134179687.5000, 134375000.0000, 134570312.5000, 134765625.0000, 134960937.5000, 135156250.0000, 135351562.5000, 135546875.0000, 135742187.5000, 135937500.0000, 136132812.5000, 136328125.0000, 136523437.5000, 136718750.0000, 136914062.5000, 137109375.0000, 137304687.5000, 137500000.0000, 137695312.5000, 137890625.0000, 138085937.5000, 138281250.0000, 138476562.5000, 138671875.0000, 138867187.5000, 139062500.0000, 139257812.5000, 139453125.0000, 139648437.5000, 139843750.0000, 140039062.5000, 140234375.0000, 140429687.5000, 140625000.0000, 140820312.5000, 141015625.0000, 141210937.5000, 141406250.0000, 141601562.5000, 141796875.0000, 141992187.5000, 142187500.0000, 142382812.5000, 142578125.0000, 142773437.5000, 142968750.0000, 143164062.5000, 143359375.0000, 143554687.5000, 143750000.0000, 143945312.5000, 144140625.0000, 144335937.5000, 144531250.0000, 144726562.5000, 144921875.0000, 145117187.5000, 145312500.0000, 145507812.5000, 145703125.0000, 145898437.5000, 146093750.0000, 146289062.5000, 146484375.0000, 146679687.5000, 146875000.0000, 147070312.5000, 147265625.0000, 147460937.5000, 147656250.0000, 147851562.5000, 148046875.0000, 148242187.5000, 148437500.0000, 148632812.5000, 148828125.0000, 149023437.5000, 149218750.0000, 149414062.5000, 149609375.0000, 149804687.5000, 150000000.0000, 150195312.5000, 150390625.0000, 150585937.5000, 150781250.0000, 150976562.5000] @@ -916,6 +982,8 @@ Observation.DataProducts.Output_Beamformed_[12].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[12].nrOfIncoherentStokesBeams=1 Observation.DataProducts.Output_Beamformed_[12].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[12].size=0 +Observation.DataProducts.Output_Beamformed_[12].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[12].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[130].CoherentStokesBeam[0].Offset.angle1=0.025581 Observation.DataProducts.Output_Beamformed_[130].CoherentStokesBeam[0].Offset.angle2=0.000000 Observation.DataProducts.Output_Beamformed_[130].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -943,6 +1011,8 @@ Observation.DataProducts.Output_Beamformed_[130].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[130].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[130].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[130].size=0 +Observation.DataProducts.Output_Beamformed_[130].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[130].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[131].CoherentStokesBeam[0].Offset.angle1=0.025428 Observation.DataProducts.Output_Beamformed_[131].CoherentStokesBeam[0].Offset.angle2=-0.004280 Observation.DataProducts.Output_Beamformed_[131].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -970,6 +1040,8 @@ Observation.DataProducts.Output_Beamformed_[131].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[131].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[131].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[131].size=0 +Observation.DataProducts.Output_Beamformed_[131].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[131].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[132].CoherentStokesBeam[0].Offset.angle1=0.025278 Observation.DataProducts.Output_Beamformed_[132].CoherentStokesBeam[0].Offset.angle2=-0.008560 Observation.DataProducts.Output_Beamformed_[132].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -997,6 +1069,8 @@ Observation.DataProducts.Output_Beamformed_[132].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[132].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[132].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[132].size=0 +Observation.DataProducts.Output_Beamformed_[132].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[132].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[133].CoherentStokesBeam[0].Offset.angle1=0.018903 Observation.DataProducts.Output_Beamformed_[133].CoherentStokesBeam[0].Offset.angle2=-0.010700 Observation.DataProducts.Output_Beamformed_[133].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1024,6 +1098,8 @@ Observation.DataProducts.Output_Beamformed_[133].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[133].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[133].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[133].size=0 +Observation.DataProducts.Output_Beamformed_[133].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[133].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[134].CoherentStokesBeam[0].Offset.angle1=0.012565 Observation.DataProducts.Output_Beamformed_[134].CoherentStokesBeam[0].Offset.angle2=-0.012840 Observation.DataProducts.Output_Beamformed_[134].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1051,6 +1127,8 @@ Observation.DataProducts.Output_Beamformed_[134].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[134].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[134].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[134].size=0 +Observation.DataProducts.Output_Beamformed_[134].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[134].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[135].CoherentStokesBeam[0].Offset.angle1=0.006264 Observation.DataProducts.Output_Beamformed_[135].CoherentStokesBeam[0].Offset.angle2=-0.014980 Observation.DataProducts.Output_Beamformed_[135].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1078,6 +1156,8 @@ Observation.DataProducts.Output_Beamformed_[135].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[135].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[135].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[135].size=0 +Observation.DataProducts.Output_Beamformed_[135].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[135].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[136].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[136].CoherentStokesBeam[0].Offset.angle2=-0.017120 Observation.DataProducts.Output_Beamformed_[136].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1105,6 +1185,8 @@ Observation.DataProducts.Output_Beamformed_[136].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[136].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[136].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[136].size=0 +Observation.DataProducts.Output_Beamformed_[136].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[136].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[137].CoherentStokesBeam[0].Offset.angle1=-0.006264 Observation.DataProducts.Output_Beamformed_[137].CoherentStokesBeam[0].Offset.angle2=-0.014980 Observation.DataProducts.Output_Beamformed_[137].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1132,6 +1214,8 @@ Observation.DataProducts.Output_Beamformed_[137].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[137].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[137].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[137].size=0 +Observation.DataProducts.Output_Beamformed_[137].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[137].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[138].CoherentStokesBeam[0].Offset.angle1=-0.012565 Observation.DataProducts.Output_Beamformed_[138].CoherentStokesBeam[0].Offset.angle2=-0.012840 Observation.DataProducts.Output_Beamformed_[138].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1159,6 +1243,8 @@ Observation.DataProducts.Output_Beamformed_[138].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[138].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[138].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[138].size=0 +Observation.DataProducts.Output_Beamformed_[138].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[138].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[139].CoherentStokesBeam[0].Offset.angle1=-0.018903 Observation.DataProducts.Output_Beamformed_[139].CoherentStokesBeam[0].Offset.angle2=-0.010700 Observation.DataProducts.Output_Beamformed_[139].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1186,6 +1272,8 @@ Observation.DataProducts.Output_Beamformed_[139].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[139].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[139].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[139].size=0 +Observation.DataProducts.Output_Beamformed_[139].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[139].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[13].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[13].CoherentStokesBeam[0].Offset.angle2=0.000000 Observation.DataProducts.Output_Beamformed_[13].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1213,6 +1301,8 @@ Observation.DataProducts.Output_Beamformed_[13].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[13].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[13].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[13].size=0 +Observation.DataProducts.Output_Beamformed_[13].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[13].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[140].CoherentStokesBeam[0].Offset.angle1=-0.025278 Observation.DataProducts.Output_Beamformed_[140].CoherentStokesBeam[0].Offset.angle2=-0.008560 Observation.DataProducts.Output_Beamformed_[140].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1240,6 +1330,8 @@ Observation.DataProducts.Output_Beamformed_[140].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[140].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[140].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[140].size=0 +Observation.DataProducts.Output_Beamformed_[140].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[140].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[141].CoherentStokesBeam[0].Offset.angle1=-0.025428 Observation.DataProducts.Output_Beamformed_[141].CoherentStokesBeam[0].Offset.angle2=-0.004280 Observation.DataProducts.Output_Beamformed_[141].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1267,6 +1359,8 @@ Observation.DataProducts.Output_Beamformed_[141].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[141].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[141].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[141].size=0 +Observation.DataProducts.Output_Beamformed_[141].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[141].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[142].CoherentStokesBeam[0].Offset.angle1=-0.025581 Observation.DataProducts.Output_Beamformed_[142].CoherentStokesBeam[0].Offset.angle2=0.000000 Observation.DataProducts.Output_Beamformed_[142].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1294,6 +1388,8 @@ Observation.DataProducts.Output_Beamformed_[142].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[142].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[142].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[142].size=0 +Observation.DataProducts.Output_Beamformed_[142].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[142].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[143].CoherentStokesBeam[0].Offset.angle1=-0.025736 Observation.DataProducts.Output_Beamformed_[143].CoherentStokesBeam[0].Offset.angle2=0.004280 Observation.DataProducts.Output_Beamformed_[143].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1321,6 +1417,8 @@ Observation.DataProducts.Output_Beamformed_[143].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[143].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[143].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[143].size=0 +Observation.DataProducts.Output_Beamformed_[143].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[143].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[144].CoherentStokesBeam[0].Offset.angle1=-0.025894 Observation.DataProducts.Output_Beamformed_[144].CoherentStokesBeam[0].Offset.angle2=0.008560 Observation.DataProducts.Output_Beamformed_[144].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1348,6 +1446,8 @@ Observation.DataProducts.Output_Beamformed_[144].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[144].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[144].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[144].size=0 +Observation.DataProducts.Output_Beamformed_[144].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[144].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[145].CoherentStokesBeam[0].Offset.angle1=-0.019480 Observation.DataProducts.Output_Beamformed_[145].CoherentStokesBeam[0].Offset.angle2=0.010700 Observation.DataProducts.Output_Beamformed_[145].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1375,6 +1475,8 @@ Observation.DataProducts.Output_Beamformed_[145].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[145].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[145].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[145].size=0 +Observation.DataProducts.Output_Beamformed_[145].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[145].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[146].CoherentStokesBeam[0].Offset.angle1=-0.013027 Observation.DataProducts.Output_Beamformed_[146].CoherentStokesBeam[0].Offset.angle2=0.012840 Observation.DataProducts.Output_Beamformed_[146].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1402,6 +1504,8 @@ Observation.DataProducts.Output_Beamformed_[146].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[146].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[146].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[146].size=0 +Observation.DataProducts.Output_Beamformed_[146].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[146].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[147].CoherentStokesBeam[0].Offset.angle1=-0.006534 Observation.DataProducts.Output_Beamformed_[147].CoherentStokesBeam[0].Offset.angle2=0.014980 Observation.DataProducts.Output_Beamformed_[147].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1429,6 +1533,8 @@ Observation.DataProducts.Output_Beamformed_[147].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[147].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[147].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[147].size=0 +Observation.DataProducts.Output_Beamformed_[147].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[147].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[148].CoherentStokesBeam[0].Offset.angle1=-0.808888 Observation.DataProducts.Output_Beamformed_[148].CoherentStokesBeam[0].Offset.angle2=-0.917577 Observation.DataProducts.Output_Beamformed_[148].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1456,6 +1562,8 @@ Observation.DataProducts.Output_Beamformed_[148].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[148].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[148].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[148].size=0 +Observation.DataProducts.Output_Beamformed_[148].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[148].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[149].CoherentStokesBeam[0].Offset.angle1=-0.756406 Observation.DataProducts.Output_Beamformed_[149].CoherentStokesBeam[0].Offset.angle2=-0.922267 Observation.DataProducts.Output_Beamformed_[149].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1483,6 +1591,8 @@ Observation.DataProducts.Output_Beamformed_[149].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[149].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[149].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[149].size=0 +Observation.DataProducts.Output_Beamformed_[149].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[149].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[14].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[14].CoherentStokesBeam[0].Offset.angle2=0.004280 Observation.DataProducts.Output_Beamformed_[14].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1510,6 +1620,8 @@ Observation.DataProducts.Output_Beamformed_[14].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[14].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[14].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[14].size=0 +Observation.DataProducts.Output_Beamformed_[14].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[14].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[150].CoherentStokesBeam[0].Offset.angle1=-0.896619 Observation.DataProducts.Output_Beamformed_[150].CoherentStokesBeam[0].Offset.angle2=-0.935078 Observation.DataProducts.Output_Beamformed_[150].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1537,6 +1649,8 @@ Observation.DataProducts.Output_Beamformed_[150].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[150].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[150].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[150].size=0 +Observation.DataProducts.Output_Beamformed_[150].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[150].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[151].CoherentStokesBeam[0].Offset.angle1=-0.861370 Observation.DataProducts.Output_Beamformed_[151].CoherentStokesBeam[0].Offset.angle2=-0.922267 Observation.DataProducts.Output_Beamformed_[151].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1564,6 +1678,8 @@ Observation.DataProducts.Output_Beamformed_[151].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[151].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[151].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[151].size=0 +Observation.DataProducts.Output_Beamformed_[151].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[151].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[152].CoherentStokesBeam[0].Offset.angle1=-0.721157 Observation.DataProducts.Output_Beamformed_[152].CoherentStokesBeam[0].Offset.angle2=-0.935078 Observation.DataProducts.Output_Beamformed_[152].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1591,6 +1707,8 @@ Observation.DataProducts.Output_Beamformed_[152].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[152].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[152].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[152].size=0 +Observation.DataProducts.Output_Beamformed_[152].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[152].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[153].CoherentStokesBeam[0].Offset.angle1=-0.712167 Observation.DataProducts.Output_Beamformed_[153].CoherentStokesBeam[0].Offset.angle2=-0.952579 Observation.DataProducts.Output_Beamformed_[153].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1618,6 +1736,8 @@ Observation.DataProducts.Output_Beamformed_[153].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[153].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[153].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[153].size=0 +Observation.DataProducts.Output_Beamformed_[153].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[153].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[154].CoherentStokesBeam[0].Offset.angle1=-0.728727 Observation.DataProducts.Output_Beamformed_[154].CoherentStokesBeam[0].Offset.angle2=-0.970080 Observation.DataProducts.Output_Beamformed_[154].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1645,6 +1765,8 @@ Observation.DataProducts.Output_Beamformed_[154].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[154].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[154].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[154].size=0 +Observation.DataProducts.Output_Beamformed_[154].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[154].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[155].CoherentStokesBeam[0].Offset.angle1=-0.764011 Observation.DataProducts.Output_Beamformed_[155].CoherentStokesBeam[0].Offset.angle2=-0.982891 Observation.DataProducts.Output_Beamformed_[155].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1672,6 +1794,8 @@ Observation.DataProducts.Output_Beamformed_[155].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[155].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[155].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[155].size=0 +Observation.DataProducts.Output_Beamformed_[155].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[155].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[156].CoherentStokesBeam[0].Offset.angle1=-0.808888 Observation.DataProducts.Output_Beamformed_[156].CoherentStokesBeam[0].Offset.angle2=-0.987581 Observation.DataProducts.Output_Beamformed_[156].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1699,6 +1823,8 @@ Observation.DataProducts.Output_Beamformed_[156].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[156].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[156].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[156].size=0 +Observation.DataProducts.Output_Beamformed_[156].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[156].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[157].CoherentStokesBeam[0].Offset.angle1=-0.853765 Observation.DataProducts.Output_Beamformed_[157].CoherentStokesBeam[0].Offset.angle2=-0.982891 Observation.DataProducts.Output_Beamformed_[157].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1726,6 +1852,8 @@ Observation.DataProducts.Output_Beamformed_[157].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[157].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[157].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[157].size=0 +Observation.DataProducts.Output_Beamformed_[157].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[157].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[158].CoherentStokesBeam[0].Offset.angle1=-0.889049 Observation.DataProducts.Output_Beamformed_[158].CoherentStokesBeam[0].Offset.angle2=-0.970080 Observation.DataProducts.Output_Beamformed_[158].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1753,6 +1881,8 @@ Observation.DataProducts.Output_Beamformed_[158].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[158].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[158].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[158].size=0 +Observation.DataProducts.Output_Beamformed_[158].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[158].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[159].CoherentStokesBeam[0].Offset.angle1=-0.905609 Observation.DataProducts.Output_Beamformed_[159].CoherentStokesBeam[0].Offset.angle2=-0.952579 Observation.DataProducts.Output_Beamformed_[159].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1780,6 +1910,8 @@ Observation.DataProducts.Output_Beamformed_[159].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[159].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[159].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[159].size=0 +Observation.DataProducts.Output_Beamformed_[159].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[159].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[15].CoherentStokesBeam[0].Offset.angle1=0.006415 Observation.DataProducts.Output_Beamformed_[15].CoherentStokesBeam[0].Offset.angle2=0.002140 Observation.DataProducts.Output_Beamformed_[15].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1807,6 +1939,8 @@ Observation.DataProducts.Output_Beamformed_[15].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[15].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[15].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[15].size=0 +Observation.DataProducts.Output_Beamformed_[15].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[15].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[160].IncoherentStokesBeam[0].SAP=2 Observation.DataProducts.Output_Beamformed_[160].IncoherentStokesBeam[0].TAB=12 Observation.DataProducts.Output_Beamformed_[160].IncoherentStokesBeam[0].centralFrequencies=[119531250.0000, 119726562.5000, 119921875.0000, 120117187.5000, 120312500.0000, 120507812.5000, 120703125.0000, 120898437.5000, 121093750.0000, 121289062.5000, 121484375.0000, 121679687.5000, 121875000.0000, 122070312.5000, 122265625.0000, 122460937.5000, 122656250.0000, 122851562.5000, 123046875.0000, 123242187.5000, 123437500.0000, 123632812.5000, 123828125.0000, 124023437.5000, 124218750.0000, 124414062.5000, 124609375.0000, 124804687.5000, 125000000.0000, 125195312.5000, 125390625.0000, 125585937.5000, 125781250.0000, 125976562.5000, 126171875.0000, 126367187.5000, 126562500.0000, 126757812.5000, 126953125.0000, 127148437.5000, 127343750.0000, 127539062.5000, 127734375.0000, 127929687.5000, 128125000.0000, 128320312.5000, 128515625.0000, 128710937.5000, 128906250.0000, 129101562.5000, 129296875.0000, 129492187.5000, 129687500.0000, 129882812.5000, 130078125.0000, 130273437.5000, 130468750.0000, 130664062.5000, 130859375.0000, 131054687.5000, 131250000.0000, 131445312.5000, 131640625.0000, 131835937.5000, 132031250.0000, 132226562.5000, 132421875.0000, 132617187.5000, 132812500.0000, 133007812.5000, 133203125.0000, 133398437.5000, 133593750.0000, 133789062.5000, 133984375.0000, 134179687.5000, 134375000.0000, 134570312.5000, 134765625.0000, 134960937.5000, 135156250.0000, 135351562.5000, 135546875.0000, 135742187.5000, 135937500.0000, 136132812.5000, 136328125.0000, 136523437.5000, 136718750.0000, 136914062.5000, 137109375.0000, 137304687.5000, 137500000.0000, 137695312.5000, 137890625.0000, 138085937.5000, 138281250.0000, 138476562.5000, 138671875.0000, 138867187.5000, 139062500.0000, 139257812.5000, 139453125.0000, 139648437.5000, 139843750.0000, 140039062.5000, 140234375.0000, 140429687.5000, 140625000.0000, 140820312.5000, 141015625.0000, 141210937.5000, 141406250.0000, 141601562.5000, 141796875.0000, 141992187.5000, 142187500.0000, 142382812.5000, 142578125.0000, 142773437.5000, 142968750.0000, 143164062.5000, 143359375.0000, 143554687.5000, 143750000.0000, 143945312.5000, 144140625.0000, 144335937.5000, 144531250.0000, 144726562.5000, 144921875.0000, 145117187.5000, 145312500.0000, 145507812.5000, 145703125.0000, 145898437.5000, 146093750.0000, 146289062.5000, 146484375.0000, 146679687.5000, 146875000.0000, 147070312.5000, 147265625.0000, 147460937.5000, 147656250.0000, 147851562.5000, 148046875.0000, 148242187.5000, 148437500.0000, 148632812.5000, 148828125.0000, 149023437.5000, 149218750.0000, 149414062.5000, 149609375.0000, 149804687.5000, 150000000.0000, 150195312.5000, 150390625.0000, 150585937.5000, 150781250.0000, 150976562.5000] @@ -1826,6 +1960,8 @@ Observation.DataProducts.Output_Beamformed_[160].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[160].nrOfIncoherentStokesBeams=1 Observation.DataProducts.Output_Beamformed_[160].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[160].size=0 +Observation.DataProducts.Output_Beamformed_[160].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[160].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[161].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[161].CoherentStokesBeam[0].Offset.angle2=0.000000 Observation.DataProducts.Output_Beamformed_[161].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1853,6 +1989,8 @@ Observation.DataProducts.Output_Beamformed_[161].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[161].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[161].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[161].size=0 +Observation.DataProducts.Output_Beamformed_[161].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[161].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[162].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[162].CoherentStokesBeam[0].Offset.angle2=0.004280 Observation.DataProducts.Output_Beamformed_[162].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1880,6 +2018,8 @@ Observation.DataProducts.Output_Beamformed_[162].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[162].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[162].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[162].size=0 +Observation.DataProducts.Output_Beamformed_[162].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[162].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[163].CoherentStokesBeam[0].Offset.angle1=0.006415 Observation.DataProducts.Output_Beamformed_[163].CoherentStokesBeam[0].Offset.angle2=0.002140 Observation.DataProducts.Output_Beamformed_[163].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1907,6 +2047,8 @@ Observation.DataProducts.Output_Beamformed_[163].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[163].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[163].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[163].size=0 +Observation.DataProducts.Output_Beamformed_[163].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[163].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[164].CoherentStokesBeam[0].Offset.angle1=0.006376 Observation.DataProducts.Output_Beamformed_[164].CoherentStokesBeam[0].Offset.angle2=-0.002140 Observation.DataProducts.Output_Beamformed_[164].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1934,6 +2076,8 @@ Observation.DataProducts.Output_Beamformed_[164].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[164].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[164].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[164].size=0 +Observation.DataProducts.Output_Beamformed_[164].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[164].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[165].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[165].CoherentStokesBeam[0].Offset.angle2=-0.004280 Observation.DataProducts.Output_Beamformed_[165].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1961,6 +2105,8 @@ Observation.DataProducts.Output_Beamformed_[165].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[165].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[165].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[165].size=0 +Observation.DataProducts.Output_Beamformed_[165].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[165].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[166].CoherentStokesBeam[0].Offset.angle1=-0.006376 Observation.DataProducts.Output_Beamformed_[166].CoherentStokesBeam[0].Offset.angle2=-0.002140 Observation.DataProducts.Output_Beamformed_[166].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -1988,6 +2134,8 @@ Observation.DataProducts.Output_Beamformed_[166].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[166].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[166].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[166].size=0 +Observation.DataProducts.Output_Beamformed_[166].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[166].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[167].CoherentStokesBeam[0].Offset.angle1=-0.006415 Observation.DataProducts.Output_Beamformed_[167].CoherentStokesBeam[0].Offset.angle2=0.002140 Observation.DataProducts.Output_Beamformed_[167].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2015,6 +2163,8 @@ Observation.DataProducts.Output_Beamformed_[167].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[167].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[167].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[167].size=0 +Observation.DataProducts.Output_Beamformed_[167].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[167].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[168].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[168].CoherentStokesBeam[0].Offset.angle2=0.008560 Observation.DataProducts.Output_Beamformed_[168].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2042,6 +2192,8 @@ Observation.DataProducts.Output_Beamformed_[168].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[168].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[168].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[168].size=0 +Observation.DataProducts.Output_Beamformed_[168].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[168].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[169].CoherentStokesBeam[0].Offset.angle1=0.006454 Observation.DataProducts.Output_Beamformed_[169].CoherentStokesBeam[0].Offset.angle2=0.006420 Observation.DataProducts.Output_Beamformed_[169].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2069,6 +2221,8 @@ Observation.DataProducts.Output_Beamformed_[169].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[169].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[169].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[169].size=0 +Observation.DataProducts.Output_Beamformed_[169].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[169].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[16].CoherentStokesBeam[0].Offset.angle1=0.006376 Observation.DataProducts.Output_Beamformed_[16].CoherentStokesBeam[0].Offset.angle2=-0.002140 Observation.DataProducts.Output_Beamformed_[16].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2096,6 +2250,8 @@ Observation.DataProducts.Output_Beamformed_[16].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[16].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[16].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[16].size=0 +Observation.DataProducts.Output_Beamformed_[16].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[16].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[170].CoherentStokesBeam[0].Offset.angle1=0.012868 Observation.DataProducts.Output_Beamformed_[170].CoherentStokesBeam[0].Offset.angle2=0.004280 Observation.DataProducts.Output_Beamformed_[170].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2123,6 +2279,8 @@ Observation.DataProducts.Output_Beamformed_[170].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[170].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[170].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[170].size=0 +Observation.DataProducts.Output_Beamformed_[170].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[170].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[171].CoherentStokesBeam[0].Offset.angle1=0.012791 Observation.DataProducts.Output_Beamformed_[171].CoherentStokesBeam[0].Offset.angle2=0.000000 Observation.DataProducts.Output_Beamformed_[171].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2150,6 +2308,8 @@ Observation.DataProducts.Output_Beamformed_[171].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[171].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[171].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[171].size=0 +Observation.DataProducts.Output_Beamformed_[171].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[171].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[172].CoherentStokesBeam[0].Offset.angle1=0.012714 Observation.DataProducts.Output_Beamformed_[172].CoherentStokesBeam[0].Offset.angle2=-0.004280 Observation.DataProducts.Output_Beamformed_[172].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2177,6 +2337,8 @@ Observation.DataProducts.Output_Beamformed_[172].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[172].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[172].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[172].size=0 +Observation.DataProducts.Output_Beamformed_[172].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[172].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[173].CoherentStokesBeam[0].Offset.angle1=0.006338 Observation.DataProducts.Output_Beamformed_[173].CoherentStokesBeam[0].Offset.angle2=-0.006420 Observation.DataProducts.Output_Beamformed_[173].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2204,6 +2366,8 @@ Observation.DataProducts.Output_Beamformed_[173].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[173].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[173].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[173].size=0 +Observation.DataProducts.Output_Beamformed_[173].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[173].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[174].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[174].CoherentStokesBeam[0].Offset.angle2=-0.008560 Observation.DataProducts.Output_Beamformed_[174].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2231,6 +2395,8 @@ Observation.DataProducts.Output_Beamformed_[174].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[174].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[174].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[174].size=0 +Observation.DataProducts.Output_Beamformed_[174].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[174].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[175].CoherentStokesBeam[0].Offset.angle1=-0.006338 Observation.DataProducts.Output_Beamformed_[175].CoherentStokesBeam[0].Offset.angle2=-0.006420 Observation.DataProducts.Output_Beamformed_[175].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2258,6 +2424,8 @@ Observation.DataProducts.Output_Beamformed_[175].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[175].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[175].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[175].size=0 +Observation.DataProducts.Output_Beamformed_[175].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[175].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[176].CoherentStokesBeam[0].Offset.angle1=-0.012714 Observation.DataProducts.Output_Beamformed_[176].CoherentStokesBeam[0].Offset.angle2=-0.004280 Observation.DataProducts.Output_Beamformed_[176].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2285,6 +2453,8 @@ Observation.DataProducts.Output_Beamformed_[176].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[176].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[176].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[176].size=0 +Observation.DataProducts.Output_Beamformed_[176].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[176].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[177].CoherentStokesBeam[0].Offset.angle1=-0.012791 Observation.DataProducts.Output_Beamformed_[177].CoherentStokesBeam[0].Offset.angle2=0.000000 Observation.DataProducts.Output_Beamformed_[177].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2312,6 +2482,8 @@ Observation.DataProducts.Output_Beamformed_[177].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[177].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[177].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[177].size=0 +Observation.DataProducts.Output_Beamformed_[177].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[177].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[178].CoherentStokesBeam[0].Offset.angle1=-0.012868 Observation.DataProducts.Output_Beamformed_[178].CoherentStokesBeam[0].Offset.angle2=0.004280 Observation.DataProducts.Output_Beamformed_[178].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2339,6 +2511,8 @@ Observation.DataProducts.Output_Beamformed_[178].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[178].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[178].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[178].size=0 +Observation.DataProducts.Output_Beamformed_[178].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[178].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[179].CoherentStokesBeam[0].Offset.angle1=-0.006454 Observation.DataProducts.Output_Beamformed_[179].CoherentStokesBeam[0].Offset.angle2=0.006420 Observation.DataProducts.Output_Beamformed_[179].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2366,6 +2540,8 @@ Observation.DataProducts.Output_Beamformed_[179].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[179].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[179].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[179].size=0 +Observation.DataProducts.Output_Beamformed_[179].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[179].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[17].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[17].CoherentStokesBeam[0].Offset.angle2=-0.004280 Observation.DataProducts.Output_Beamformed_[17].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2393,6 +2569,8 @@ Observation.DataProducts.Output_Beamformed_[17].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[17].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[17].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[17].size=0 +Observation.DataProducts.Output_Beamformed_[17].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[17].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[180].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[180].CoherentStokesBeam[0].Offset.angle2=0.012840 Observation.DataProducts.Output_Beamformed_[180].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2420,6 +2598,8 @@ Observation.DataProducts.Output_Beamformed_[180].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[180].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[180].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[180].size=0 +Observation.DataProducts.Output_Beamformed_[180].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[180].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[181].CoherentStokesBeam[0].Offset.angle1=0.006493 Observation.DataProducts.Output_Beamformed_[181].CoherentStokesBeam[0].Offset.angle2=0.010700 Observation.DataProducts.Output_Beamformed_[181].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2447,6 +2627,8 @@ Observation.DataProducts.Output_Beamformed_[181].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[181].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[181].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[181].size=0 +Observation.DataProducts.Output_Beamformed_[181].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[181].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[182].CoherentStokesBeam[0].Offset.angle1=0.012947 Observation.DataProducts.Output_Beamformed_[182].CoherentStokesBeam[0].Offset.angle2=0.008560 Observation.DataProducts.Output_Beamformed_[182].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2474,6 +2656,8 @@ Observation.DataProducts.Output_Beamformed_[182].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[182].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[182].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[182].size=0 +Observation.DataProducts.Output_Beamformed_[182].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[182].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[183].CoherentStokesBeam[0].Offset.angle1=0.019361 Observation.DataProducts.Output_Beamformed_[183].CoherentStokesBeam[0].Offset.angle2=0.006420 Observation.DataProducts.Output_Beamformed_[183].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2501,6 +2685,8 @@ Observation.DataProducts.Output_Beamformed_[183].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[183].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[183].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[183].size=0 +Observation.DataProducts.Output_Beamformed_[183].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[183].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[184].CoherentStokesBeam[0].Offset.angle1=0.019244 Observation.DataProducts.Output_Beamformed_[184].CoherentStokesBeam[0].Offset.angle2=0.002140 Observation.DataProducts.Output_Beamformed_[184].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2528,6 +2714,8 @@ Observation.DataProducts.Output_Beamformed_[184].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[184].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[184].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[184].size=0 +Observation.DataProducts.Output_Beamformed_[184].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[184].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[185].CoherentStokesBeam[0].Offset.angle1=0.019128 Observation.DataProducts.Output_Beamformed_[185].CoherentStokesBeam[0].Offset.angle2=-0.002140 Observation.DataProducts.Output_Beamformed_[185].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2555,6 +2743,8 @@ Observation.DataProducts.Output_Beamformed_[185].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[185].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[185].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[185].size=0 +Observation.DataProducts.Output_Beamformed_[185].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[185].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[186].CoherentStokesBeam[0].Offset.angle1=0.019015 Observation.DataProducts.Output_Beamformed_[186].CoherentStokesBeam[0].Offset.angle2=-0.006420 Observation.DataProducts.Output_Beamformed_[186].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2582,6 +2772,8 @@ Observation.DataProducts.Output_Beamformed_[186].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[186].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[186].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[186].size=0 +Observation.DataProducts.Output_Beamformed_[186].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[186].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[187].CoherentStokesBeam[0].Offset.angle1=0.012639 Observation.DataProducts.Output_Beamformed_[187].CoherentStokesBeam[0].Offset.angle2=-0.008560 Observation.DataProducts.Output_Beamformed_[187].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2609,6 +2801,8 @@ Observation.DataProducts.Output_Beamformed_[187].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[187].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[187].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[187].size=0 +Observation.DataProducts.Output_Beamformed_[187].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[187].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[188].CoherentStokesBeam[0].Offset.angle1=0.006301 Observation.DataProducts.Output_Beamformed_[188].CoherentStokesBeam[0].Offset.angle2=-0.010700 Observation.DataProducts.Output_Beamformed_[188].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2636,6 +2830,8 @@ Observation.DataProducts.Output_Beamformed_[188].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[188].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[188].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[188].size=0 +Observation.DataProducts.Output_Beamformed_[188].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[188].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[189].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[189].CoherentStokesBeam[0].Offset.angle2=-0.012840 Observation.DataProducts.Output_Beamformed_[189].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2663,6 +2859,8 @@ Observation.DataProducts.Output_Beamformed_[189].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[189].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[189].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[189].size=0 +Observation.DataProducts.Output_Beamformed_[189].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[189].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[18].CoherentStokesBeam[0].Offset.angle1=-0.006376 Observation.DataProducts.Output_Beamformed_[18].CoherentStokesBeam[0].Offset.angle2=-0.002140 Observation.DataProducts.Output_Beamformed_[18].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2690,6 +2888,8 @@ Observation.DataProducts.Output_Beamformed_[18].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[18].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[18].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[18].size=0 +Observation.DataProducts.Output_Beamformed_[18].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[18].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[190].CoherentStokesBeam[0].Offset.angle1=-0.006301 Observation.DataProducts.Output_Beamformed_[190].CoherentStokesBeam[0].Offset.angle2=-0.010700 Observation.DataProducts.Output_Beamformed_[190].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2717,6 +2917,8 @@ Observation.DataProducts.Output_Beamformed_[190].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[190].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[190].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[190].size=0 +Observation.DataProducts.Output_Beamformed_[190].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[190].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[191].CoherentStokesBeam[0].Offset.angle1=-0.012639 Observation.DataProducts.Output_Beamformed_[191].CoherentStokesBeam[0].Offset.angle2=-0.008560 Observation.DataProducts.Output_Beamformed_[191].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2744,6 +2946,8 @@ Observation.DataProducts.Output_Beamformed_[191].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[191].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[191].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[191].size=0 +Observation.DataProducts.Output_Beamformed_[191].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[191].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[192].CoherentStokesBeam[0].Offset.angle1=-0.019015 Observation.DataProducts.Output_Beamformed_[192].CoherentStokesBeam[0].Offset.angle2=-0.006420 Observation.DataProducts.Output_Beamformed_[192].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2771,6 +2975,8 @@ Observation.DataProducts.Output_Beamformed_[192].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[192].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[192].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[192].size=0 +Observation.DataProducts.Output_Beamformed_[192].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[192].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[193].CoherentStokesBeam[0].Offset.angle1=-0.019128 Observation.DataProducts.Output_Beamformed_[193].CoherentStokesBeam[0].Offset.angle2=-0.002140 Observation.DataProducts.Output_Beamformed_[193].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2798,6 +3004,8 @@ Observation.DataProducts.Output_Beamformed_[193].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[193].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[193].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[193].size=0 +Observation.DataProducts.Output_Beamformed_[193].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[193].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[194].CoherentStokesBeam[0].Offset.angle1=-0.019244 Observation.DataProducts.Output_Beamformed_[194].CoherentStokesBeam[0].Offset.angle2=0.002140 Observation.DataProducts.Output_Beamformed_[194].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2825,6 +3033,8 @@ Observation.DataProducts.Output_Beamformed_[194].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[194].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[194].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[194].size=0 +Observation.DataProducts.Output_Beamformed_[194].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[194].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[195].CoherentStokesBeam[0].Offset.angle1=-0.019361 Observation.DataProducts.Output_Beamformed_[195].CoherentStokesBeam[0].Offset.angle2=0.006420 Observation.DataProducts.Output_Beamformed_[195].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2852,6 +3062,8 @@ Observation.DataProducts.Output_Beamformed_[195].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[195].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[195].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[195].size=0 +Observation.DataProducts.Output_Beamformed_[195].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[195].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[196].CoherentStokesBeam[0].Offset.angle1=-0.012947 Observation.DataProducts.Output_Beamformed_[196].CoherentStokesBeam[0].Offset.angle2=0.008560 Observation.DataProducts.Output_Beamformed_[196].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2879,6 +3091,8 @@ Observation.DataProducts.Output_Beamformed_[196].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[196].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[196].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[196].size=0 +Observation.DataProducts.Output_Beamformed_[196].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[196].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[197].CoherentStokesBeam[0].Offset.angle1=-0.006493 Observation.DataProducts.Output_Beamformed_[197].CoherentStokesBeam[0].Offset.angle2=0.010700 Observation.DataProducts.Output_Beamformed_[197].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2906,6 +3120,8 @@ Observation.DataProducts.Output_Beamformed_[197].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[197].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[197].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[197].size=0 +Observation.DataProducts.Output_Beamformed_[197].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[197].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[198].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[198].CoherentStokesBeam[0].Offset.angle2=0.017120 Observation.DataProducts.Output_Beamformed_[198].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2933,6 +3149,8 @@ Observation.DataProducts.Output_Beamformed_[198].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[198].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[198].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[198].size=0 +Observation.DataProducts.Output_Beamformed_[198].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[198].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[199].CoherentStokesBeam[0].Offset.angle1=0.006534 Observation.DataProducts.Output_Beamformed_[199].CoherentStokesBeam[0].Offset.angle2=0.014980 Observation.DataProducts.Output_Beamformed_[199].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2960,6 +3178,8 @@ Observation.DataProducts.Output_Beamformed_[199].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[199].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[199].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[199].size=0 +Observation.DataProducts.Output_Beamformed_[199].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[199].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[19].CoherentStokesBeam[0].Offset.angle1=-0.006415 Observation.DataProducts.Output_Beamformed_[19].CoherentStokesBeam[0].Offset.angle2=0.002140 Observation.DataProducts.Output_Beamformed_[19].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -2987,6 +3207,8 @@ Observation.DataProducts.Output_Beamformed_[19].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[19].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[19].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[19].size=0 +Observation.DataProducts.Output_Beamformed_[19].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[19].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[1].CoherentStokesBeam[0].Offset.angle1=-0.876860 Observation.DataProducts.Output_Beamformed_[1].CoherentStokesBeam[0].Offset.angle2=-0.922267 Observation.DataProducts.Output_Beamformed_[1].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3014,6 +3236,8 @@ Observation.DataProducts.Output_Beamformed_[1].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[1].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[1].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[1].size=0 +Observation.DataProducts.Output_Beamformed_[1].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[1].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[200].CoherentStokesBeam[0].Offset.angle1=0.013027 Observation.DataProducts.Output_Beamformed_[200].CoherentStokesBeam[0].Offset.angle2=0.012840 Observation.DataProducts.Output_Beamformed_[200].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3041,6 +3265,8 @@ Observation.DataProducts.Output_Beamformed_[200].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[200].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[200].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[200].size=0 +Observation.DataProducts.Output_Beamformed_[200].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[200].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[201].CoherentStokesBeam[0].Offset.angle1=0.019480 Observation.DataProducts.Output_Beamformed_[201].CoherentStokesBeam[0].Offset.angle2=0.010700 Observation.DataProducts.Output_Beamformed_[201].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3068,6 +3294,8 @@ Observation.DataProducts.Output_Beamformed_[201].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[201].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[201].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[201].size=0 +Observation.DataProducts.Output_Beamformed_[201].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[201].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[202].CoherentStokesBeam[0].Offset.angle1=0.025894 Observation.DataProducts.Output_Beamformed_[202].CoherentStokesBeam[0].Offset.angle2=0.008560 Observation.DataProducts.Output_Beamformed_[202].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3095,6 +3323,8 @@ Observation.DataProducts.Output_Beamformed_[202].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[202].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[202].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[202].size=0 +Observation.DataProducts.Output_Beamformed_[202].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[202].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[203].CoherentStokesBeam[0].Offset.angle1=0.025736 Observation.DataProducts.Output_Beamformed_[203].CoherentStokesBeam[0].Offset.angle2=0.004280 Observation.DataProducts.Output_Beamformed_[203].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3122,6 +3352,8 @@ Observation.DataProducts.Output_Beamformed_[203].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[203].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[203].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[203].size=0 +Observation.DataProducts.Output_Beamformed_[203].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[203].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[204].CoherentStokesBeam[0].Offset.angle1=0.025581 Observation.DataProducts.Output_Beamformed_[204].CoherentStokesBeam[0].Offset.angle2=0.000000 Observation.DataProducts.Output_Beamformed_[204].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3149,6 +3381,8 @@ Observation.DataProducts.Output_Beamformed_[204].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[204].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[204].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[204].size=0 +Observation.DataProducts.Output_Beamformed_[204].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[204].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[205].CoherentStokesBeam[0].Offset.angle1=0.025428 Observation.DataProducts.Output_Beamformed_[205].CoherentStokesBeam[0].Offset.angle2=-0.004280 Observation.DataProducts.Output_Beamformed_[205].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3176,6 +3410,8 @@ Observation.DataProducts.Output_Beamformed_[205].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[205].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[205].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[205].size=0 +Observation.DataProducts.Output_Beamformed_[205].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[205].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[206].CoherentStokesBeam[0].Offset.angle1=0.025278 Observation.DataProducts.Output_Beamformed_[206].CoherentStokesBeam[0].Offset.angle2=-0.008560 Observation.DataProducts.Output_Beamformed_[206].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3203,6 +3439,8 @@ Observation.DataProducts.Output_Beamformed_[206].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[206].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[206].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[206].size=0 +Observation.DataProducts.Output_Beamformed_[206].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[206].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[207].CoherentStokesBeam[0].Offset.angle1=0.018903 Observation.DataProducts.Output_Beamformed_[207].CoherentStokesBeam[0].Offset.angle2=-0.010700 Observation.DataProducts.Output_Beamformed_[207].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3230,6 +3468,8 @@ Observation.DataProducts.Output_Beamformed_[207].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[207].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[207].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[207].size=0 +Observation.DataProducts.Output_Beamformed_[207].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[207].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[208].CoherentStokesBeam[0].Offset.angle1=0.012565 Observation.DataProducts.Output_Beamformed_[208].CoherentStokesBeam[0].Offset.angle2=-0.012840 Observation.DataProducts.Output_Beamformed_[208].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3257,6 +3497,8 @@ Observation.DataProducts.Output_Beamformed_[208].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[208].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[208].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[208].size=0 +Observation.DataProducts.Output_Beamformed_[208].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[208].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[209].CoherentStokesBeam[0].Offset.angle1=0.006264 Observation.DataProducts.Output_Beamformed_[209].CoherentStokesBeam[0].Offset.angle2=-0.014980 Observation.DataProducts.Output_Beamformed_[209].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3284,6 +3526,8 @@ Observation.DataProducts.Output_Beamformed_[209].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[209].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[209].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[209].size=0 +Observation.DataProducts.Output_Beamformed_[209].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[209].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[20].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[20].CoherentStokesBeam[0].Offset.angle2=0.008560 Observation.DataProducts.Output_Beamformed_[20].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3311,6 +3555,8 @@ Observation.DataProducts.Output_Beamformed_[20].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[20].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[20].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[20].size=0 +Observation.DataProducts.Output_Beamformed_[20].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[20].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[210].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[210].CoherentStokesBeam[0].Offset.angle2=-0.017120 Observation.DataProducts.Output_Beamformed_[210].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3338,6 +3584,8 @@ Observation.DataProducts.Output_Beamformed_[210].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[210].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[210].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[210].size=0 +Observation.DataProducts.Output_Beamformed_[210].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[210].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[211].CoherentStokesBeam[0].Offset.angle1=-0.006264 Observation.DataProducts.Output_Beamformed_[211].CoherentStokesBeam[0].Offset.angle2=-0.014980 Observation.DataProducts.Output_Beamformed_[211].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3365,6 +3613,8 @@ Observation.DataProducts.Output_Beamformed_[211].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[211].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[211].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[211].size=0 +Observation.DataProducts.Output_Beamformed_[211].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[211].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[212].CoherentStokesBeam[0].Offset.angle1=-0.012565 Observation.DataProducts.Output_Beamformed_[212].CoherentStokesBeam[0].Offset.angle2=-0.012840 Observation.DataProducts.Output_Beamformed_[212].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3392,6 +3642,8 @@ Observation.DataProducts.Output_Beamformed_[212].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[212].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[212].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[212].size=0 +Observation.DataProducts.Output_Beamformed_[212].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[212].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[213].CoherentStokesBeam[0].Offset.angle1=-0.018903 Observation.DataProducts.Output_Beamformed_[213].CoherentStokesBeam[0].Offset.angle2=-0.010700 Observation.DataProducts.Output_Beamformed_[213].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3419,6 +3671,8 @@ Observation.DataProducts.Output_Beamformed_[213].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[213].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[213].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[213].size=0 +Observation.DataProducts.Output_Beamformed_[213].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[213].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[214].CoherentStokesBeam[0].Offset.angle1=-0.025278 Observation.DataProducts.Output_Beamformed_[214].CoherentStokesBeam[0].Offset.angle2=-0.008560 Observation.DataProducts.Output_Beamformed_[214].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3446,6 +3700,8 @@ Observation.DataProducts.Output_Beamformed_[214].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[214].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[214].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[214].size=0 +Observation.DataProducts.Output_Beamformed_[214].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[214].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[215].CoherentStokesBeam[0].Offset.angle1=-0.025428 Observation.DataProducts.Output_Beamformed_[215].CoherentStokesBeam[0].Offset.angle2=-0.004280 Observation.DataProducts.Output_Beamformed_[215].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3473,6 +3729,8 @@ Observation.DataProducts.Output_Beamformed_[215].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[215].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[215].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[215].size=0 +Observation.DataProducts.Output_Beamformed_[215].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[215].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[216].CoherentStokesBeam[0].Offset.angle1=-0.025581 Observation.DataProducts.Output_Beamformed_[216].CoherentStokesBeam[0].Offset.angle2=0.000000 Observation.DataProducts.Output_Beamformed_[216].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3500,6 +3758,8 @@ Observation.DataProducts.Output_Beamformed_[216].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[216].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[216].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[216].size=0 +Observation.DataProducts.Output_Beamformed_[216].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[216].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[217].CoherentStokesBeam[0].Offset.angle1=-0.025736 Observation.DataProducts.Output_Beamformed_[217].CoherentStokesBeam[0].Offset.angle2=0.004280 Observation.DataProducts.Output_Beamformed_[217].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3527,6 +3787,8 @@ Observation.DataProducts.Output_Beamformed_[217].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[217].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[217].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[217].size=0 +Observation.DataProducts.Output_Beamformed_[217].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[217].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[218].CoherentStokesBeam[0].Offset.angle1=-0.025894 Observation.DataProducts.Output_Beamformed_[218].CoherentStokesBeam[0].Offset.angle2=0.008560 Observation.DataProducts.Output_Beamformed_[218].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3554,6 +3816,8 @@ Observation.DataProducts.Output_Beamformed_[218].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[218].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[218].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[218].size=0 +Observation.DataProducts.Output_Beamformed_[218].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[218].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[219].CoherentStokesBeam[0].Offset.angle1=-0.019480 Observation.DataProducts.Output_Beamformed_[219].CoherentStokesBeam[0].Offset.angle2=0.010700 Observation.DataProducts.Output_Beamformed_[219].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3581,6 +3845,8 @@ Observation.DataProducts.Output_Beamformed_[219].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[219].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[219].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[219].size=0 +Observation.DataProducts.Output_Beamformed_[219].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[219].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[21].CoherentStokesBeam[0].Offset.angle1=0.006454 Observation.DataProducts.Output_Beamformed_[21].CoherentStokesBeam[0].Offset.angle2=0.006420 Observation.DataProducts.Output_Beamformed_[21].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3608,6 +3874,8 @@ Observation.DataProducts.Output_Beamformed_[21].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[21].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[21].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[21].size=0 +Observation.DataProducts.Output_Beamformed_[21].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[21].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[220].CoherentStokesBeam[0].Offset.angle1=-0.013027 Observation.DataProducts.Output_Beamformed_[220].CoherentStokesBeam[0].Offset.angle2=0.012840 Observation.DataProducts.Output_Beamformed_[220].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3635,6 +3903,8 @@ Observation.DataProducts.Output_Beamformed_[220].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[220].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[220].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[220].size=0 +Observation.DataProducts.Output_Beamformed_[220].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[220].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[221].CoherentStokesBeam[0].Offset.angle1=-0.006534 Observation.DataProducts.Output_Beamformed_[221].CoherentStokesBeam[0].Offset.angle2=0.014980 Observation.DataProducts.Output_Beamformed_[221].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3662,6 +3932,8 @@ Observation.DataProducts.Output_Beamformed_[221].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[221].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[221].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[221].size=0 +Observation.DataProducts.Output_Beamformed_[221].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[221].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[22].CoherentStokesBeam[0].Offset.angle1=0.012868 Observation.DataProducts.Output_Beamformed_[22].CoherentStokesBeam[0].Offset.angle2=0.004280 Observation.DataProducts.Output_Beamformed_[22].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3689,6 +3961,8 @@ Observation.DataProducts.Output_Beamformed_[22].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[22].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[22].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[22].size=0 +Observation.DataProducts.Output_Beamformed_[22].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[22].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[23].CoherentStokesBeam[0].Offset.angle1=0.012791 Observation.DataProducts.Output_Beamformed_[23].CoherentStokesBeam[0].Offset.angle2=0.000000 Observation.DataProducts.Output_Beamformed_[23].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3716,6 +3990,8 @@ Observation.DataProducts.Output_Beamformed_[23].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[23].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[23].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[23].size=0 +Observation.DataProducts.Output_Beamformed_[23].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[23].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[24].CoherentStokesBeam[0].Offset.angle1=0.012714 Observation.DataProducts.Output_Beamformed_[24].CoherentStokesBeam[0].Offset.angle2=-0.004280 Observation.DataProducts.Output_Beamformed_[24].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3743,6 +4019,8 @@ Observation.DataProducts.Output_Beamformed_[24].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[24].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[24].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[24].size=0 +Observation.DataProducts.Output_Beamformed_[24].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[24].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[25].CoherentStokesBeam[0].Offset.angle1=0.006338 Observation.DataProducts.Output_Beamformed_[25].CoherentStokesBeam[0].Offset.angle2=-0.006420 Observation.DataProducts.Output_Beamformed_[25].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3770,6 +4048,8 @@ Observation.DataProducts.Output_Beamformed_[25].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[25].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[25].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[25].size=0 +Observation.DataProducts.Output_Beamformed_[25].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[25].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[26].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[26].CoherentStokesBeam[0].Offset.angle2=-0.008560 Observation.DataProducts.Output_Beamformed_[26].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3797,6 +4077,8 @@ Observation.DataProducts.Output_Beamformed_[26].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[26].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[26].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[26].size=0 +Observation.DataProducts.Output_Beamformed_[26].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[26].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[27].CoherentStokesBeam[0].Offset.angle1=-0.006338 Observation.DataProducts.Output_Beamformed_[27].CoherentStokesBeam[0].Offset.angle2=-0.006420 Observation.DataProducts.Output_Beamformed_[27].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3824,6 +4106,8 @@ Observation.DataProducts.Output_Beamformed_[27].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[27].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[27].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[27].size=0 +Observation.DataProducts.Output_Beamformed_[27].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[27].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[28].CoherentStokesBeam[0].Offset.angle1=-0.012714 Observation.DataProducts.Output_Beamformed_[28].CoherentStokesBeam[0].Offset.angle2=-0.004280 Observation.DataProducts.Output_Beamformed_[28].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3851,6 +4135,8 @@ Observation.DataProducts.Output_Beamformed_[28].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[28].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[28].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[28].size=0 +Observation.DataProducts.Output_Beamformed_[28].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[28].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[29].CoherentStokesBeam[0].Offset.angle1=-0.012791 Observation.DataProducts.Output_Beamformed_[29].CoherentStokesBeam[0].Offset.angle2=0.000000 Observation.DataProducts.Output_Beamformed_[29].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3878,6 +4164,8 @@ Observation.DataProducts.Output_Beamformed_[29].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[29].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[29].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[29].size=0 +Observation.DataProducts.Output_Beamformed_[29].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[29].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[2].CoherentStokesBeam[0].Offset.angle1=-1.017073 Observation.DataProducts.Output_Beamformed_[2].CoherentStokesBeam[0].Offset.angle2=-0.935078 Observation.DataProducts.Output_Beamformed_[2].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3905,6 +4193,8 @@ Observation.DataProducts.Output_Beamformed_[2].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[2].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[2].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[2].size=0 +Observation.DataProducts.Output_Beamformed_[2].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[2].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[30].CoherentStokesBeam[0].Offset.angle1=-0.012868 Observation.DataProducts.Output_Beamformed_[30].CoherentStokesBeam[0].Offset.angle2=0.004280 Observation.DataProducts.Output_Beamformed_[30].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3932,6 +4222,8 @@ Observation.DataProducts.Output_Beamformed_[30].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[30].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[30].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[30].size=0 +Observation.DataProducts.Output_Beamformed_[30].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[30].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[31].CoherentStokesBeam[0].Offset.angle1=-0.006454 Observation.DataProducts.Output_Beamformed_[31].CoherentStokesBeam[0].Offset.angle2=0.006420 Observation.DataProducts.Output_Beamformed_[31].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3959,6 +4251,8 @@ Observation.DataProducts.Output_Beamformed_[31].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[31].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[31].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[31].size=0 +Observation.DataProducts.Output_Beamformed_[31].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[31].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[32].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[32].CoherentStokesBeam[0].Offset.angle2=0.012840 Observation.DataProducts.Output_Beamformed_[32].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -3986,6 +4280,8 @@ Observation.DataProducts.Output_Beamformed_[32].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[32].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[32].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[32].size=0 +Observation.DataProducts.Output_Beamformed_[32].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[32].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[33].CoherentStokesBeam[0].Offset.angle1=0.006493 Observation.DataProducts.Output_Beamformed_[33].CoherentStokesBeam[0].Offset.angle2=0.010700 Observation.DataProducts.Output_Beamformed_[33].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4013,6 +4309,8 @@ Observation.DataProducts.Output_Beamformed_[33].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[33].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[33].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[33].size=0 +Observation.DataProducts.Output_Beamformed_[33].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[33].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[34].CoherentStokesBeam[0].Offset.angle1=0.012947 Observation.DataProducts.Output_Beamformed_[34].CoherentStokesBeam[0].Offset.angle2=0.008560 Observation.DataProducts.Output_Beamformed_[34].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4040,6 +4338,8 @@ Observation.DataProducts.Output_Beamformed_[34].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[34].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[34].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[34].size=0 +Observation.DataProducts.Output_Beamformed_[34].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[34].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[35].CoherentStokesBeam[0].Offset.angle1=0.019361 Observation.DataProducts.Output_Beamformed_[35].CoherentStokesBeam[0].Offset.angle2=0.006420 Observation.DataProducts.Output_Beamformed_[35].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4067,6 +4367,8 @@ Observation.DataProducts.Output_Beamformed_[35].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[35].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[35].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[35].size=0 +Observation.DataProducts.Output_Beamformed_[35].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[35].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[36].CoherentStokesBeam[0].Offset.angle1=0.019244 Observation.DataProducts.Output_Beamformed_[36].CoherentStokesBeam[0].Offset.angle2=0.002140 Observation.DataProducts.Output_Beamformed_[36].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4094,6 +4396,8 @@ Observation.DataProducts.Output_Beamformed_[36].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[36].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[36].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[36].size=0 +Observation.DataProducts.Output_Beamformed_[36].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[36].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[37].CoherentStokesBeam[0].Offset.angle1=0.019128 Observation.DataProducts.Output_Beamformed_[37].CoherentStokesBeam[0].Offset.angle2=-0.002140 Observation.DataProducts.Output_Beamformed_[37].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4121,6 +4425,8 @@ Observation.DataProducts.Output_Beamformed_[37].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[37].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[37].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[37].size=0 +Observation.DataProducts.Output_Beamformed_[37].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[37].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[38].CoherentStokesBeam[0].Offset.angle1=0.019015 Observation.DataProducts.Output_Beamformed_[38].CoherentStokesBeam[0].Offset.angle2=-0.006420 Observation.DataProducts.Output_Beamformed_[38].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4148,6 +4454,8 @@ Observation.DataProducts.Output_Beamformed_[38].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[38].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[38].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[38].size=0 +Observation.DataProducts.Output_Beamformed_[38].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[38].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[39].CoherentStokesBeam[0].Offset.angle1=0.012639 Observation.DataProducts.Output_Beamformed_[39].CoherentStokesBeam[0].Offset.angle2=-0.008560 Observation.DataProducts.Output_Beamformed_[39].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4175,6 +4483,8 @@ Observation.DataProducts.Output_Beamformed_[39].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[39].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[39].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[39].size=0 +Observation.DataProducts.Output_Beamformed_[39].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[39].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[3].CoherentStokesBeam[0].Offset.angle1=-0.981824 Observation.DataProducts.Output_Beamformed_[3].CoherentStokesBeam[0].Offset.angle2=-0.922267 Observation.DataProducts.Output_Beamformed_[3].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4202,6 +4512,8 @@ Observation.DataProducts.Output_Beamformed_[3].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[3].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[3].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[3].size=0 +Observation.DataProducts.Output_Beamformed_[3].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[3].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[40].CoherentStokesBeam[0].Offset.angle1=0.006301 Observation.DataProducts.Output_Beamformed_[40].CoherentStokesBeam[0].Offset.angle2=-0.010700 Observation.DataProducts.Output_Beamformed_[40].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4229,6 +4541,8 @@ Observation.DataProducts.Output_Beamformed_[40].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[40].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[40].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[40].size=0 +Observation.DataProducts.Output_Beamformed_[40].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[40].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[41].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[41].CoherentStokesBeam[0].Offset.angle2=-0.012840 Observation.DataProducts.Output_Beamformed_[41].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4256,6 +4570,8 @@ Observation.DataProducts.Output_Beamformed_[41].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[41].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[41].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[41].size=0 +Observation.DataProducts.Output_Beamformed_[41].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[41].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[42].CoherentStokesBeam[0].Offset.angle1=-0.006301 Observation.DataProducts.Output_Beamformed_[42].CoherentStokesBeam[0].Offset.angle2=-0.010700 Observation.DataProducts.Output_Beamformed_[42].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4283,6 +4599,8 @@ Observation.DataProducts.Output_Beamformed_[42].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[42].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[42].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[42].size=0 +Observation.DataProducts.Output_Beamformed_[42].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[42].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[43].CoherentStokesBeam[0].Offset.angle1=-0.012639 Observation.DataProducts.Output_Beamformed_[43].CoherentStokesBeam[0].Offset.angle2=-0.008560 Observation.DataProducts.Output_Beamformed_[43].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4310,6 +4628,8 @@ Observation.DataProducts.Output_Beamformed_[43].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[43].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[43].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[43].size=0 +Observation.DataProducts.Output_Beamformed_[43].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[43].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[44].CoherentStokesBeam[0].Offset.angle1=-0.019015 Observation.DataProducts.Output_Beamformed_[44].CoherentStokesBeam[0].Offset.angle2=-0.006420 Observation.DataProducts.Output_Beamformed_[44].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4337,6 +4657,8 @@ Observation.DataProducts.Output_Beamformed_[44].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[44].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[44].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[44].size=0 +Observation.DataProducts.Output_Beamformed_[44].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[44].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[45].CoherentStokesBeam[0].Offset.angle1=-0.019128 Observation.DataProducts.Output_Beamformed_[45].CoherentStokesBeam[0].Offset.angle2=-0.002140 Observation.DataProducts.Output_Beamformed_[45].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4364,6 +4686,8 @@ Observation.DataProducts.Output_Beamformed_[45].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[45].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[45].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[45].size=0 +Observation.DataProducts.Output_Beamformed_[45].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[45].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[46].CoherentStokesBeam[0].Offset.angle1=-0.019244 Observation.DataProducts.Output_Beamformed_[46].CoherentStokesBeam[0].Offset.angle2=0.002140 Observation.DataProducts.Output_Beamformed_[46].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4391,6 +4715,8 @@ Observation.DataProducts.Output_Beamformed_[46].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[46].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[46].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[46].size=0 +Observation.DataProducts.Output_Beamformed_[46].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[46].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[47].CoherentStokesBeam[0].Offset.angle1=-0.019361 Observation.DataProducts.Output_Beamformed_[47].CoherentStokesBeam[0].Offset.angle2=0.006420 Observation.DataProducts.Output_Beamformed_[47].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4418,6 +4744,8 @@ Observation.DataProducts.Output_Beamformed_[47].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[47].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[47].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[47].size=0 +Observation.DataProducts.Output_Beamformed_[47].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[47].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[48].CoherentStokesBeam[0].Offset.angle1=-0.012947 Observation.DataProducts.Output_Beamformed_[48].CoherentStokesBeam[0].Offset.angle2=0.008560 Observation.DataProducts.Output_Beamformed_[48].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4445,6 +4773,8 @@ Observation.DataProducts.Output_Beamformed_[48].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[48].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[48].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[48].size=0 +Observation.DataProducts.Output_Beamformed_[48].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[48].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[49].CoherentStokesBeam[0].Offset.angle1=-0.006493 Observation.DataProducts.Output_Beamformed_[49].CoherentStokesBeam[0].Offset.angle2=0.010700 Observation.DataProducts.Output_Beamformed_[49].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4472,6 +4802,8 @@ Observation.DataProducts.Output_Beamformed_[49].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[49].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[49].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[49].size=0 +Observation.DataProducts.Output_Beamformed_[49].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[49].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[4].CoherentStokesBeam[0].Offset.angle1=-0.841611 Observation.DataProducts.Output_Beamformed_[4].CoherentStokesBeam[0].Offset.angle2=-0.935078 Observation.DataProducts.Output_Beamformed_[4].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4499,6 +4831,8 @@ Observation.DataProducts.Output_Beamformed_[4].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[4].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[4].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[4].size=0 +Observation.DataProducts.Output_Beamformed_[4].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[4].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[50].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[50].CoherentStokesBeam[0].Offset.angle2=0.017120 Observation.DataProducts.Output_Beamformed_[50].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4526,6 +4860,8 @@ Observation.DataProducts.Output_Beamformed_[50].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[50].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[50].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[50].size=0 +Observation.DataProducts.Output_Beamformed_[50].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[50].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[51].CoherentStokesBeam[0].Offset.angle1=0.006534 Observation.DataProducts.Output_Beamformed_[51].CoherentStokesBeam[0].Offset.angle2=0.014980 Observation.DataProducts.Output_Beamformed_[51].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4553,6 +4889,8 @@ Observation.DataProducts.Output_Beamformed_[51].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[51].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[51].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[51].size=0 +Observation.DataProducts.Output_Beamformed_[51].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[51].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[52].CoherentStokesBeam[0].Offset.angle1=0.013027 Observation.DataProducts.Output_Beamformed_[52].CoherentStokesBeam[0].Offset.angle2=0.012840 Observation.DataProducts.Output_Beamformed_[52].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4580,6 +4918,8 @@ Observation.DataProducts.Output_Beamformed_[52].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[52].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[52].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[52].size=0 +Observation.DataProducts.Output_Beamformed_[52].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[52].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[53].CoherentStokesBeam[0].Offset.angle1=0.019480 Observation.DataProducts.Output_Beamformed_[53].CoherentStokesBeam[0].Offset.angle2=0.010700 Observation.DataProducts.Output_Beamformed_[53].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4607,6 +4947,8 @@ Observation.DataProducts.Output_Beamformed_[53].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[53].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[53].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[53].size=0 +Observation.DataProducts.Output_Beamformed_[53].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[53].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[54].CoherentStokesBeam[0].Offset.angle1=0.025894 Observation.DataProducts.Output_Beamformed_[54].CoherentStokesBeam[0].Offset.angle2=0.008560 Observation.DataProducts.Output_Beamformed_[54].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4634,6 +4976,8 @@ Observation.DataProducts.Output_Beamformed_[54].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[54].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[54].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[54].size=0 +Observation.DataProducts.Output_Beamformed_[54].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[54].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[55].CoherentStokesBeam[0].Offset.angle1=0.025736 Observation.DataProducts.Output_Beamformed_[55].CoherentStokesBeam[0].Offset.angle2=0.004280 Observation.DataProducts.Output_Beamformed_[55].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4661,6 +5005,8 @@ Observation.DataProducts.Output_Beamformed_[55].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[55].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[55].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[55].size=0 +Observation.DataProducts.Output_Beamformed_[55].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[55].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[56].CoherentStokesBeam[0].Offset.angle1=0.025581 Observation.DataProducts.Output_Beamformed_[56].CoherentStokesBeam[0].Offset.angle2=0.000000 Observation.DataProducts.Output_Beamformed_[56].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4688,6 +5034,8 @@ Observation.DataProducts.Output_Beamformed_[56].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[56].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[56].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[56].size=0 +Observation.DataProducts.Output_Beamformed_[56].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[56].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[57].CoherentStokesBeam[0].Offset.angle1=0.025428 Observation.DataProducts.Output_Beamformed_[57].CoherentStokesBeam[0].Offset.angle2=-0.004280 Observation.DataProducts.Output_Beamformed_[57].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4715,6 +5063,8 @@ Observation.DataProducts.Output_Beamformed_[57].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[57].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[57].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[57].size=0 +Observation.DataProducts.Output_Beamformed_[57].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[57].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[58].CoherentStokesBeam[0].Offset.angle1=0.025278 Observation.DataProducts.Output_Beamformed_[58].CoherentStokesBeam[0].Offset.angle2=-0.008560 Observation.DataProducts.Output_Beamformed_[58].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4742,6 +5092,8 @@ Observation.DataProducts.Output_Beamformed_[58].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[58].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[58].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[58].size=0 +Observation.DataProducts.Output_Beamformed_[58].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[58].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[59].CoherentStokesBeam[0].Offset.angle1=0.018903 Observation.DataProducts.Output_Beamformed_[59].CoherentStokesBeam[0].Offset.angle2=-0.010700 Observation.DataProducts.Output_Beamformed_[59].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4769,6 +5121,8 @@ Observation.DataProducts.Output_Beamformed_[59].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[59].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[59].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[59].size=0 +Observation.DataProducts.Output_Beamformed_[59].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[59].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[5].CoherentStokesBeam[0].Offset.angle1=-0.832621 Observation.DataProducts.Output_Beamformed_[5].CoherentStokesBeam[0].Offset.angle2=-0.952579 Observation.DataProducts.Output_Beamformed_[5].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4796,6 +5150,8 @@ Observation.DataProducts.Output_Beamformed_[5].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[5].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[5].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[5].size=0 +Observation.DataProducts.Output_Beamformed_[5].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[5].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[60].CoherentStokesBeam[0].Offset.angle1=0.012565 Observation.DataProducts.Output_Beamformed_[60].CoherentStokesBeam[0].Offset.angle2=-0.012840 Observation.DataProducts.Output_Beamformed_[60].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4823,6 +5179,8 @@ Observation.DataProducts.Output_Beamformed_[60].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[60].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[60].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[60].size=0 +Observation.DataProducts.Output_Beamformed_[60].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[60].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[61].CoherentStokesBeam[0].Offset.angle1=0.006264 Observation.DataProducts.Output_Beamformed_[61].CoherentStokesBeam[0].Offset.angle2=-0.014980 Observation.DataProducts.Output_Beamformed_[61].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4850,6 +5208,8 @@ Observation.DataProducts.Output_Beamformed_[61].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[61].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[61].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[61].size=0 +Observation.DataProducts.Output_Beamformed_[61].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[61].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[62].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[62].CoherentStokesBeam[0].Offset.angle2=-0.017120 Observation.DataProducts.Output_Beamformed_[62].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4877,6 +5237,8 @@ Observation.DataProducts.Output_Beamformed_[62].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[62].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[62].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[62].size=0 +Observation.DataProducts.Output_Beamformed_[62].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[62].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[63].CoherentStokesBeam[0].Offset.angle1=-0.006264 Observation.DataProducts.Output_Beamformed_[63].CoherentStokesBeam[0].Offset.angle2=-0.014980 Observation.DataProducts.Output_Beamformed_[63].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4904,6 +5266,8 @@ Observation.DataProducts.Output_Beamformed_[63].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[63].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[63].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[63].size=0 +Observation.DataProducts.Output_Beamformed_[63].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[63].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[64].CoherentStokesBeam[0].Offset.angle1=-0.012565 Observation.DataProducts.Output_Beamformed_[64].CoherentStokesBeam[0].Offset.angle2=-0.012840 Observation.DataProducts.Output_Beamformed_[64].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4931,6 +5295,8 @@ Observation.DataProducts.Output_Beamformed_[64].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[64].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[64].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[64].size=0 +Observation.DataProducts.Output_Beamformed_[64].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[64].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[65].CoherentStokesBeam[0].Offset.angle1=-0.018903 Observation.DataProducts.Output_Beamformed_[65].CoherentStokesBeam[0].Offset.angle2=-0.010700 Observation.DataProducts.Output_Beamformed_[65].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4958,6 +5324,8 @@ Observation.DataProducts.Output_Beamformed_[65].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[65].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[65].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[65].size=0 +Observation.DataProducts.Output_Beamformed_[65].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[65].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[66].CoherentStokesBeam[0].Offset.angle1=-0.025278 Observation.DataProducts.Output_Beamformed_[66].CoherentStokesBeam[0].Offset.angle2=-0.008560 Observation.DataProducts.Output_Beamformed_[66].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -4985,6 +5353,8 @@ Observation.DataProducts.Output_Beamformed_[66].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[66].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[66].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[66].size=0 +Observation.DataProducts.Output_Beamformed_[66].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[66].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[67].CoherentStokesBeam[0].Offset.angle1=-0.025428 Observation.DataProducts.Output_Beamformed_[67].CoherentStokesBeam[0].Offset.angle2=-0.004280 Observation.DataProducts.Output_Beamformed_[67].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5012,6 +5382,8 @@ Observation.DataProducts.Output_Beamformed_[67].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[67].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[67].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[67].size=0 +Observation.DataProducts.Output_Beamformed_[67].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[67].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[68].CoherentStokesBeam[0].Offset.angle1=-0.025581 Observation.DataProducts.Output_Beamformed_[68].CoherentStokesBeam[0].Offset.angle2=0.000000 Observation.DataProducts.Output_Beamformed_[68].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5039,6 +5411,8 @@ Observation.DataProducts.Output_Beamformed_[68].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[68].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[68].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[68].size=0 +Observation.DataProducts.Output_Beamformed_[68].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[68].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[69].CoherentStokesBeam[0].Offset.angle1=-0.025736 Observation.DataProducts.Output_Beamformed_[69].CoherentStokesBeam[0].Offset.angle2=0.004280 Observation.DataProducts.Output_Beamformed_[69].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5066,6 +5440,8 @@ Observation.DataProducts.Output_Beamformed_[69].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[69].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[69].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[69].size=0 +Observation.DataProducts.Output_Beamformed_[69].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[69].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[6].CoherentStokesBeam[0].Offset.angle1=-0.849181 Observation.DataProducts.Output_Beamformed_[6].CoherentStokesBeam[0].Offset.angle2=-0.970080 Observation.DataProducts.Output_Beamformed_[6].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5093,6 +5469,8 @@ Observation.DataProducts.Output_Beamformed_[6].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[6].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[6].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[6].size=0 +Observation.DataProducts.Output_Beamformed_[6].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[6].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[70].CoherentStokesBeam[0].Offset.angle1=-0.025894 Observation.DataProducts.Output_Beamformed_[70].CoherentStokesBeam[0].Offset.angle2=0.008560 Observation.DataProducts.Output_Beamformed_[70].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5120,6 +5498,8 @@ Observation.DataProducts.Output_Beamformed_[70].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[70].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[70].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[70].size=0 +Observation.DataProducts.Output_Beamformed_[70].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[70].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[71].CoherentStokesBeam[0].Offset.angle1=-0.019480 Observation.DataProducts.Output_Beamformed_[71].CoherentStokesBeam[0].Offset.angle2=0.010700 Observation.DataProducts.Output_Beamformed_[71].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5147,6 +5527,8 @@ Observation.DataProducts.Output_Beamformed_[71].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[71].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[71].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[71].size=0 +Observation.DataProducts.Output_Beamformed_[71].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[71].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[72].CoherentStokesBeam[0].Offset.angle1=-0.013027 Observation.DataProducts.Output_Beamformed_[72].CoherentStokesBeam[0].Offset.angle2=0.012840 Observation.DataProducts.Output_Beamformed_[72].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5174,6 +5556,8 @@ Observation.DataProducts.Output_Beamformed_[72].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[72].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[72].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[72].size=0 +Observation.DataProducts.Output_Beamformed_[72].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[72].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[73].CoherentStokesBeam[0].Offset.angle1=-0.006534 Observation.DataProducts.Output_Beamformed_[73].CoherentStokesBeam[0].Offset.angle2=0.014980 Observation.DataProducts.Output_Beamformed_[73].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5201,6 +5585,8 @@ Observation.DataProducts.Output_Beamformed_[73].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[73].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[73].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[73].size=0 +Observation.DataProducts.Output_Beamformed_[73].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[73].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[74].CoherentStokesBeam[0].Offset.angle1=-1.049796 Observation.DataProducts.Output_Beamformed_[74].CoherentStokesBeam[0].Offset.angle2=-0.917577 Observation.DataProducts.Output_Beamformed_[74].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5228,6 +5614,8 @@ Observation.DataProducts.Output_Beamformed_[74].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[74].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[74].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[74].size=0 +Observation.DataProducts.Output_Beamformed_[74].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[74].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[75].CoherentStokesBeam[0].Offset.angle1=-0.997314 Observation.DataProducts.Output_Beamformed_[75].CoherentStokesBeam[0].Offset.angle2=-0.922267 Observation.DataProducts.Output_Beamformed_[75].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5255,6 +5643,8 @@ Observation.DataProducts.Output_Beamformed_[75].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[75].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[75].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[75].size=0 +Observation.DataProducts.Output_Beamformed_[75].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[75].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[76].CoherentStokesBeam[0].Offset.angle1=-1.137527 Observation.DataProducts.Output_Beamformed_[76].CoherentStokesBeam[0].Offset.angle2=-0.935078 Observation.DataProducts.Output_Beamformed_[76].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5282,6 +5672,8 @@ Observation.DataProducts.Output_Beamformed_[76].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[76].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[76].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[76].size=0 +Observation.DataProducts.Output_Beamformed_[76].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[76].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[77].CoherentStokesBeam[0].Offset.angle1=-1.102278 Observation.DataProducts.Output_Beamformed_[77].CoherentStokesBeam[0].Offset.angle2=-0.922267 Observation.DataProducts.Output_Beamformed_[77].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5309,6 +5701,8 @@ Observation.DataProducts.Output_Beamformed_[77].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[77].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[77].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[77].size=0 +Observation.DataProducts.Output_Beamformed_[77].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[77].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[78].CoherentStokesBeam[0].Offset.angle1=-0.962065 Observation.DataProducts.Output_Beamformed_[78].CoherentStokesBeam[0].Offset.angle2=-0.935078 Observation.DataProducts.Output_Beamformed_[78].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5336,6 +5730,8 @@ Observation.DataProducts.Output_Beamformed_[78].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[78].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[78].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[78].size=0 +Observation.DataProducts.Output_Beamformed_[78].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[78].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[79].CoherentStokesBeam[0].Offset.angle1=-0.953075 Observation.DataProducts.Output_Beamformed_[79].CoherentStokesBeam[0].Offset.angle2=-0.952579 Observation.DataProducts.Output_Beamformed_[79].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5363,6 +5759,8 @@ Observation.DataProducts.Output_Beamformed_[79].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[79].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[79].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[79].size=0 +Observation.DataProducts.Output_Beamformed_[79].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[79].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[7].CoherentStokesBeam[0].Offset.angle1=-0.884465 Observation.DataProducts.Output_Beamformed_[7].CoherentStokesBeam[0].Offset.angle2=-0.982891 Observation.DataProducts.Output_Beamformed_[7].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5390,6 +5788,8 @@ Observation.DataProducts.Output_Beamformed_[7].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[7].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[7].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[7].size=0 +Observation.DataProducts.Output_Beamformed_[7].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[7].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[80].CoherentStokesBeam[0].Offset.angle1=-0.969635 Observation.DataProducts.Output_Beamformed_[80].CoherentStokesBeam[0].Offset.angle2=-0.970080 Observation.DataProducts.Output_Beamformed_[80].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5417,6 +5817,8 @@ Observation.DataProducts.Output_Beamformed_[80].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[80].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[80].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[80].size=0 +Observation.DataProducts.Output_Beamformed_[80].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[80].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[81].CoherentStokesBeam[0].Offset.angle1=-1.004919 Observation.DataProducts.Output_Beamformed_[81].CoherentStokesBeam[0].Offset.angle2=-0.982891 Observation.DataProducts.Output_Beamformed_[81].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5444,6 +5846,8 @@ Observation.DataProducts.Output_Beamformed_[81].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[81].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[81].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[81].size=0 +Observation.DataProducts.Output_Beamformed_[81].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[81].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[82].CoherentStokesBeam[0].Offset.angle1=-1.049796 Observation.DataProducts.Output_Beamformed_[82].CoherentStokesBeam[0].Offset.angle2=-0.987581 Observation.DataProducts.Output_Beamformed_[82].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5471,6 +5875,8 @@ Observation.DataProducts.Output_Beamformed_[82].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[82].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[82].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[82].size=0 +Observation.DataProducts.Output_Beamformed_[82].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[82].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[83].CoherentStokesBeam[0].Offset.angle1=-1.094673 Observation.DataProducts.Output_Beamformed_[83].CoherentStokesBeam[0].Offset.angle2=-0.982891 Observation.DataProducts.Output_Beamformed_[83].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5498,6 +5904,8 @@ Observation.DataProducts.Output_Beamformed_[83].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[83].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[83].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[83].size=0 +Observation.DataProducts.Output_Beamformed_[83].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[83].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[84].CoherentStokesBeam[0].Offset.angle1=-1.129957 Observation.DataProducts.Output_Beamformed_[84].CoherentStokesBeam[0].Offset.angle2=-0.970080 Observation.DataProducts.Output_Beamformed_[84].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5525,6 +5933,8 @@ Observation.DataProducts.Output_Beamformed_[84].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[84].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[84].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[84].size=0 +Observation.DataProducts.Output_Beamformed_[84].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[84].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[85].CoherentStokesBeam[0].Offset.angle1=-1.146517 Observation.DataProducts.Output_Beamformed_[85].CoherentStokesBeam[0].Offset.angle2=-0.952579 Observation.DataProducts.Output_Beamformed_[85].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5552,6 +5962,8 @@ Observation.DataProducts.Output_Beamformed_[85].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[85].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[85].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[85].size=0 +Observation.DataProducts.Output_Beamformed_[85].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[85].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[86].IncoherentStokesBeam[0].SAP=1 Observation.DataProducts.Output_Beamformed_[86].IncoherentStokesBeam[0].TAB=12 Observation.DataProducts.Output_Beamformed_[86].IncoherentStokesBeam[0].centralFrequencies=[119531250.0000, 119726562.5000, 119921875.0000, 120117187.5000, 120312500.0000, 120507812.5000, 120703125.0000, 120898437.5000, 121093750.0000, 121289062.5000, 121484375.0000, 121679687.5000, 121875000.0000, 122070312.5000, 122265625.0000, 122460937.5000, 122656250.0000, 122851562.5000, 123046875.0000, 123242187.5000, 123437500.0000, 123632812.5000, 123828125.0000, 124023437.5000, 124218750.0000, 124414062.5000, 124609375.0000, 124804687.5000, 125000000.0000, 125195312.5000, 125390625.0000, 125585937.5000, 125781250.0000, 125976562.5000, 126171875.0000, 126367187.5000, 126562500.0000, 126757812.5000, 126953125.0000, 127148437.5000, 127343750.0000, 127539062.5000, 127734375.0000, 127929687.5000, 128125000.0000, 128320312.5000, 128515625.0000, 128710937.5000, 128906250.0000, 129101562.5000, 129296875.0000, 129492187.5000, 129687500.0000, 129882812.5000, 130078125.0000, 130273437.5000, 130468750.0000, 130664062.5000, 130859375.0000, 131054687.5000, 131250000.0000, 131445312.5000, 131640625.0000, 131835937.5000, 132031250.0000, 132226562.5000, 132421875.0000, 132617187.5000, 132812500.0000, 133007812.5000, 133203125.0000, 133398437.5000, 133593750.0000, 133789062.5000, 133984375.0000, 134179687.5000, 134375000.0000, 134570312.5000, 134765625.0000, 134960937.5000, 135156250.0000, 135351562.5000, 135546875.0000, 135742187.5000, 135937500.0000, 136132812.5000, 136328125.0000, 136523437.5000, 136718750.0000, 136914062.5000, 137109375.0000, 137304687.5000, 137500000.0000, 137695312.5000, 137890625.0000, 138085937.5000, 138281250.0000, 138476562.5000, 138671875.0000, 138867187.5000, 139062500.0000, 139257812.5000, 139453125.0000, 139648437.5000, 139843750.0000, 140039062.5000, 140234375.0000, 140429687.5000, 140625000.0000, 140820312.5000, 141015625.0000, 141210937.5000, 141406250.0000, 141601562.5000, 141796875.0000, 141992187.5000, 142187500.0000, 142382812.5000, 142578125.0000, 142773437.5000, 142968750.0000, 143164062.5000, 143359375.0000, 143554687.5000, 143750000.0000, 143945312.5000, 144140625.0000, 144335937.5000, 144531250.0000, 144726562.5000, 144921875.0000, 145117187.5000, 145312500.0000, 145507812.5000, 145703125.0000, 145898437.5000, 146093750.0000, 146289062.5000, 146484375.0000, 146679687.5000, 146875000.0000, 147070312.5000, 147265625.0000, 147460937.5000, 147656250.0000, 147851562.5000, 148046875.0000, 148242187.5000, 148437500.0000, 148632812.5000, 148828125.0000, 149023437.5000, 149218750.0000, 149414062.5000, 149609375.0000, 149804687.5000, 150000000.0000, 150195312.5000, 150390625.0000, 150585937.5000, 150781250.0000, 150976562.5000] @@ -5571,6 +5983,8 @@ Observation.DataProducts.Output_Beamformed_[86].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[86].nrOfIncoherentStokesBeams=1 Observation.DataProducts.Output_Beamformed_[86].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[86].size=0 +Observation.DataProducts.Output_Beamformed_[86].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[86].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[87].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[87].CoherentStokesBeam[0].Offset.angle2=0.000000 Observation.DataProducts.Output_Beamformed_[87].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5598,6 +6012,8 @@ Observation.DataProducts.Output_Beamformed_[87].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[87].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[87].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[87].size=0 +Observation.DataProducts.Output_Beamformed_[87].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[87].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[88].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[88].CoherentStokesBeam[0].Offset.angle2=0.004280 Observation.DataProducts.Output_Beamformed_[88].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5625,6 +6041,8 @@ Observation.DataProducts.Output_Beamformed_[88].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[88].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[88].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[88].size=0 +Observation.DataProducts.Output_Beamformed_[88].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[88].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[89].CoherentStokesBeam[0].Offset.angle1=0.006415 Observation.DataProducts.Output_Beamformed_[89].CoherentStokesBeam[0].Offset.angle2=0.002140 Observation.DataProducts.Output_Beamformed_[89].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5652,6 +6070,8 @@ Observation.DataProducts.Output_Beamformed_[89].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[89].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[89].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[89].size=0 +Observation.DataProducts.Output_Beamformed_[89].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[89].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[8].CoherentStokesBeam[0].Offset.angle1=-0.929342 Observation.DataProducts.Output_Beamformed_[8].CoherentStokesBeam[0].Offset.angle2=-0.987581 Observation.DataProducts.Output_Beamformed_[8].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5679,6 +6099,8 @@ Observation.DataProducts.Output_Beamformed_[8].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[8].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[8].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[8].size=0 +Observation.DataProducts.Output_Beamformed_[8].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[8].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[90].CoherentStokesBeam[0].Offset.angle1=0.006376 Observation.DataProducts.Output_Beamformed_[90].CoherentStokesBeam[0].Offset.angle2=-0.002140 Observation.DataProducts.Output_Beamformed_[90].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5706,6 +6128,8 @@ Observation.DataProducts.Output_Beamformed_[90].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[90].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[90].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[90].size=0 +Observation.DataProducts.Output_Beamformed_[90].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[90].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[91].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[91].CoherentStokesBeam[0].Offset.angle2=-0.004280 Observation.DataProducts.Output_Beamformed_[91].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5733,6 +6157,8 @@ Observation.DataProducts.Output_Beamformed_[91].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[91].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[91].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[91].size=0 +Observation.DataProducts.Output_Beamformed_[91].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[91].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[92].CoherentStokesBeam[0].Offset.angle1=-0.006376 Observation.DataProducts.Output_Beamformed_[92].CoherentStokesBeam[0].Offset.angle2=-0.002140 Observation.DataProducts.Output_Beamformed_[92].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5760,6 +6186,8 @@ Observation.DataProducts.Output_Beamformed_[92].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[92].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[92].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[92].size=0 +Observation.DataProducts.Output_Beamformed_[92].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[92].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[93].CoherentStokesBeam[0].Offset.angle1=-0.006415 Observation.DataProducts.Output_Beamformed_[93].CoherentStokesBeam[0].Offset.angle2=0.002140 Observation.DataProducts.Output_Beamformed_[93].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5787,6 +6215,8 @@ Observation.DataProducts.Output_Beamformed_[93].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[93].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[93].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[93].size=0 +Observation.DataProducts.Output_Beamformed_[93].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[93].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[94].CoherentStokesBeam[0].Offset.angle1=0.000000 Observation.DataProducts.Output_Beamformed_[94].CoherentStokesBeam[0].Offset.angle2=0.008560 Observation.DataProducts.Output_Beamformed_[94].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5814,6 +6244,8 @@ Observation.DataProducts.Output_Beamformed_[94].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[94].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[94].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[94].size=0 +Observation.DataProducts.Output_Beamformed_[94].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[94].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[95].CoherentStokesBeam[0].Offset.angle1=0.006454 Observation.DataProducts.Output_Beamformed_[95].CoherentStokesBeam[0].Offset.angle2=0.006420 Observation.DataProducts.Output_Beamformed_[95].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5841,6 +6273,8 @@ Observation.DataProducts.Output_Beamformed_[95].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[95].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[95].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[95].size=0 +Observation.DataProducts.Output_Beamformed_[95].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[95].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[96].CoherentStokesBeam[0].Offset.angle1=0.012868 Observation.DataProducts.Output_Beamformed_[96].CoherentStokesBeam[0].Offset.angle2=0.004280 Observation.DataProducts.Output_Beamformed_[96].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5868,6 +6302,8 @@ Observation.DataProducts.Output_Beamformed_[96].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[96].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[96].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[96].size=0 +Observation.DataProducts.Output_Beamformed_[96].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[96].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[97].CoherentStokesBeam[0].Offset.angle1=0.012791 Observation.DataProducts.Output_Beamformed_[97].CoherentStokesBeam[0].Offset.angle2=0.000000 Observation.DataProducts.Output_Beamformed_[97].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5895,6 +6331,8 @@ Observation.DataProducts.Output_Beamformed_[97].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[97].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[97].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[97].size=0 +Observation.DataProducts.Output_Beamformed_[97].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[97].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[98].CoherentStokesBeam[0].Offset.angle1=0.012714 Observation.DataProducts.Output_Beamformed_[98].CoherentStokesBeam[0].Offset.angle2=-0.004280 Observation.DataProducts.Output_Beamformed_[98].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5922,6 +6360,8 @@ Observation.DataProducts.Output_Beamformed_[98].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[98].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[98].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[98].size=0 +Observation.DataProducts.Output_Beamformed_[98].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[98].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[99].CoherentStokesBeam[0].Offset.angle1=0.006338 Observation.DataProducts.Output_Beamformed_[99].CoherentStokesBeam[0].Offset.angle2=-0.006420 Observation.DataProducts.Output_Beamformed_[99].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5949,6 +6389,8 @@ Observation.DataProducts.Output_Beamformed_[99].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[99].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[99].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[99].size=0 +Observation.DataProducts.Output_Beamformed_[99].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[99].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[9].CoherentStokesBeam[0].Offset.angle1=-0.974219 Observation.DataProducts.Output_Beamformed_[9].CoherentStokesBeam[0].Offset.angle2=-0.982891 Observation.DataProducts.Output_Beamformed_[9].CoherentStokesBeam[0].Offset.coordType=RA-DEC @@ -5976,6 +6418,8 @@ Observation.DataProducts.Output_Beamformed_[9].nrOfFlysEyeBeams=0 Observation.DataProducts.Output_Beamformed_[9].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[9].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[9].size=0 +Observation.DataProducts.Output_Beamformed_[9].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[9].storageWriterVersion=UNKNOWN Observation.DataProducts.nrOfOutput_Beamformed_=222 Observation.DataProducts.nrOfOutput_Correlated_=0 Observation.IncoherentStokes.antennaSet=HBA_DUAL @@ -5987,3 +6431,4 @@ Observation.IncoherentStokes.stationList=[CS007,CS005,CS002,CS004,CS006,CS003] Observation.IncoherentStokes.stokes=I Observation.IncoherentStokes.timeDownsamplingFactor=6 _isCobalt=T +feedback_version=03.01.00 diff --git a/RTCP/Cobalt/CoInterface/test/tLTAFeedback.in_reference/Observation221311_feedback b/RTCP/Cobalt/CoInterface/test/tLTAFeedback.in_reference/Observation221311_feedback index cbbe197ce26d16abaf8575825a8ce1244b15ac5c..19d6640f9f37e7c30c8c7c5a35ccb3fb23fb35e0 100644 --- a/RTCP/Cobalt/CoInterface/test/tLTAFeedback.in_reference/Observation221311_feedback +++ b/RTCP/Cobalt/CoInterface/test/tLTAFeedback.in_reference/Observation221311_feedback @@ -27,6 +27,8 @@ Observation.DataProducts.Output_Beamformed_[0].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[0].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[0].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[0].size=0 +Observation.DataProducts.Output_Beamformed_[0].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[0].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[10].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[10].FlysEyeBeam[0].TAB=10 Observation.DataProducts.Output_Beamformed_[10].FlysEyeBeam[0].antennaFieldName=HBA0 @@ -48,6 +50,8 @@ Observation.DataProducts.Output_Beamformed_[10].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[10].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[10].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[10].size=0 +Observation.DataProducts.Output_Beamformed_[10].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[10].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[11].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[11].FlysEyeBeam[0].TAB=11 Observation.DataProducts.Output_Beamformed_[11].FlysEyeBeam[0].antennaFieldName=HBA1 @@ -69,6 +73,8 @@ Observation.DataProducts.Output_Beamformed_[11].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[11].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[11].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[11].size=0 +Observation.DataProducts.Output_Beamformed_[11].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[11].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[12].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[12].FlysEyeBeam[0].TAB=12 Observation.DataProducts.Output_Beamformed_[12].FlysEyeBeam[0].antennaFieldName=HBA0 @@ -90,6 +96,8 @@ Observation.DataProducts.Output_Beamformed_[12].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[12].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[12].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[12].size=0 +Observation.DataProducts.Output_Beamformed_[12].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[12].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[13].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[13].FlysEyeBeam[0].TAB=13 Observation.DataProducts.Output_Beamformed_[13].FlysEyeBeam[0].antennaFieldName=HBA1 @@ -111,6 +119,8 @@ Observation.DataProducts.Output_Beamformed_[13].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[13].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[13].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[13].size=0 +Observation.DataProducts.Output_Beamformed_[13].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[13].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[14].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[14].FlysEyeBeam[0].TAB=14 Observation.DataProducts.Output_Beamformed_[14].FlysEyeBeam[0].antennaFieldName=HBA0 @@ -132,6 +142,8 @@ Observation.DataProducts.Output_Beamformed_[14].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[14].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[14].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[14].size=0 +Observation.DataProducts.Output_Beamformed_[14].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[14].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[15].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[15].FlysEyeBeam[0].TAB=15 Observation.DataProducts.Output_Beamformed_[15].FlysEyeBeam[0].antennaFieldName=HBA1 @@ -153,6 +165,8 @@ Observation.DataProducts.Output_Beamformed_[15].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[15].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[15].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[15].size=0 +Observation.DataProducts.Output_Beamformed_[15].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[15].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[16].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[16].FlysEyeBeam[0].TAB=16 Observation.DataProducts.Output_Beamformed_[16].FlysEyeBeam[0].antennaFieldName=HBA0 @@ -174,6 +188,8 @@ Observation.DataProducts.Output_Beamformed_[16].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[16].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[16].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[16].size=0 +Observation.DataProducts.Output_Beamformed_[16].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[16].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[17].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[17].FlysEyeBeam[0].TAB=17 Observation.DataProducts.Output_Beamformed_[17].FlysEyeBeam[0].antennaFieldName=HBA1 @@ -195,6 +211,8 @@ Observation.DataProducts.Output_Beamformed_[17].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[17].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[17].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[17].size=0 +Observation.DataProducts.Output_Beamformed_[17].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[17].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[18].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[18].FlysEyeBeam[0].TAB=18 Observation.DataProducts.Output_Beamformed_[18].FlysEyeBeam[0].antennaFieldName=HBA0 @@ -216,6 +234,8 @@ Observation.DataProducts.Output_Beamformed_[18].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[18].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[18].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[18].size=0 +Observation.DataProducts.Output_Beamformed_[18].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[18].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[19].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[19].FlysEyeBeam[0].TAB=19 Observation.DataProducts.Output_Beamformed_[19].FlysEyeBeam[0].antennaFieldName=HBA1 @@ -237,6 +257,8 @@ Observation.DataProducts.Output_Beamformed_[19].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[19].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[19].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[19].size=0 +Observation.DataProducts.Output_Beamformed_[19].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[19].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[1].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[1].FlysEyeBeam[0].TAB=1 Observation.DataProducts.Output_Beamformed_[1].FlysEyeBeam[0].antennaFieldName=HBA1 @@ -258,6 +280,8 @@ Observation.DataProducts.Output_Beamformed_[1].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[1].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[1].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[1].size=0 +Observation.DataProducts.Output_Beamformed_[1].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[1].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[20].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[20].FlysEyeBeam[0].TAB=20 Observation.DataProducts.Output_Beamformed_[20].FlysEyeBeam[0].antennaFieldName=HBA0 @@ -279,6 +303,8 @@ Observation.DataProducts.Output_Beamformed_[20].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[20].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[20].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[20].size=0 +Observation.DataProducts.Output_Beamformed_[20].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[20].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[21].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[21].FlysEyeBeam[0].TAB=21 Observation.DataProducts.Output_Beamformed_[21].FlysEyeBeam[0].antennaFieldName=HBA1 @@ -300,6 +326,8 @@ Observation.DataProducts.Output_Beamformed_[21].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[21].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[21].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[21].size=0 +Observation.DataProducts.Output_Beamformed_[21].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[21].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[22].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[22].FlysEyeBeam[0].TAB=22 Observation.DataProducts.Output_Beamformed_[22].FlysEyeBeam[0].antennaFieldName=HBA0 @@ -321,6 +349,8 @@ Observation.DataProducts.Output_Beamformed_[22].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[22].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[22].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[22].size=0 +Observation.DataProducts.Output_Beamformed_[22].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[22].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[23].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[23].FlysEyeBeam[0].TAB=23 Observation.DataProducts.Output_Beamformed_[23].FlysEyeBeam[0].antennaFieldName=HBA1 @@ -342,6 +372,8 @@ Observation.DataProducts.Output_Beamformed_[23].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[23].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[23].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[23].size=0 +Observation.DataProducts.Output_Beamformed_[23].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[23].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[24].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[24].FlysEyeBeam[0].TAB=24 Observation.DataProducts.Output_Beamformed_[24].FlysEyeBeam[0].antennaFieldName=HBA0 @@ -363,6 +395,8 @@ Observation.DataProducts.Output_Beamformed_[24].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[24].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[24].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[24].size=0 +Observation.DataProducts.Output_Beamformed_[24].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[24].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[25].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[25].FlysEyeBeam[0].TAB=25 Observation.DataProducts.Output_Beamformed_[25].FlysEyeBeam[0].antennaFieldName=HBA1 @@ -384,6 +418,8 @@ Observation.DataProducts.Output_Beamformed_[25].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[25].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[25].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[25].size=0 +Observation.DataProducts.Output_Beamformed_[25].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[25].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[26].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[26].FlysEyeBeam[0].TAB=26 Observation.DataProducts.Output_Beamformed_[26].FlysEyeBeam[0].antennaFieldName=HBA0 @@ -405,6 +441,8 @@ Observation.DataProducts.Output_Beamformed_[26].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[26].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[26].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[26].size=0 +Observation.DataProducts.Output_Beamformed_[26].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[26].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[27].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[27].FlysEyeBeam[0].TAB=27 Observation.DataProducts.Output_Beamformed_[27].FlysEyeBeam[0].antennaFieldName=HBA1 @@ -426,6 +464,8 @@ Observation.DataProducts.Output_Beamformed_[27].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[27].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[27].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[27].size=0 +Observation.DataProducts.Output_Beamformed_[27].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[27].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[28].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[28].FlysEyeBeam[0].TAB=28 Observation.DataProducts.Output_Beamformed_[28].FlysEyeBeam[0].antennaFieldName=HBA0 @@ -447,6 +487,8 @@ Observation.DataProducts.Output_Beamformed_[28].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[28].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[28].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[28].size=0 +Observation.DataProducts.Output_Beamformed_[28].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[28].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[29].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[29].FlysEyeBeam[0].TAB=29 Observation.DataProducts.Output_Beamformed_[29].FlysEyeBeam[0].antennaFieldName=HBA1 @@ -468,6 +510,8 @@ Observation.DataProducts.Output_Beamformed_[29].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[29].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[29].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[29].size=0 +Observation.DataProducts.Output_Beamformed_[29].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[29].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[2].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[2].FlysEyeBeam[0].TAB=2 Observation.DataProducts.Output_Beamformed_[2].FlysEyeBeam[0].antennaFieldName=HBA0 @@ -489,6 +533,8 @@ Observation.DataProducts.Output_Beamformed_[2].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[2].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[2].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[2].size=0 +Observation.DataProducts.Output_Beamformed_[2].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[2].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[30].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[30].FlysEyeBeam[0].TAB=30 Observation.DataProducts.Output_Beamformed_[30].FlysEyeBeam[0].antennaFieldName=HBA0 @@ -510,6 +556,8 @@ Observation.DataProducts.Output_Beamformed_[30].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[30].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[30].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[30].size=0 +Observation.DataProducts.Output_Beamformed_[30].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[30].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[31].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[31].FlysEyeBeam[0].TAB=31 Observation.DataProducts.Output_Beamformed_[31].FlysEyeBeam[0].antennaFieldName=HBA1 @@ -531,6 +579,8 @@ Observation.DataProducts.Output_Beamformed_[31].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[31].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[31].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[31].size=0 +Observation.DataProducts.Output_Beamformed_[31].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[31].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[32].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[32].FlysEyeBeam[0].TAB=32 Observation.DataProducts.Output_Beamformed_[32].FlysEyeBeam[0].antennaFieldName=HBA0 @@ -552,6 +602,8 @@ Observation.DataProducts.Output_Beamformed_[32].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[32].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[32].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[32].size=0 +Observation.DataProducts.Output_Beamformed_[32].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[32].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[33].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[33].FlysEyeBeam[0].TAB=33 Observation.DataProducts.Output_Beamformed_[33].FlysEyeBeam[0].antennaFieldName=HBA1 @@ -573,6 +625,8 @@ Observation.DataProducts.Output_Beamformed_[33].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[33].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[33].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[33].size=0 +Observation.DataProducts.Output_Beamformed_[33].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[33].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[34].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[34].FlysEyeBeam[0].TAB=34 Observation.DataProducts.Output_Beamformed_[34].FlysEyeBeam[0].antennaFieldName=HBA0 @@ -594,6 +648,8 @@ Observation.DataProducts.Output_Beamformed_[34].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[34].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[34].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[34].size=0 +Observation.DataProducts.Output_Beamformed_[34].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[34].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[35].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[35].FlysEyeBeam[0].TAB=35 Observation.DataProducts.Output_Beamformed_[35].FlysEyeBeam[0].antennaFieldName=HBA1 @@ -615,6 +671,8 @@ Observation.DataProducts.Output_Beamformed_[35].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[35].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[35].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[35].size=0 +Observation.DataProducts.Output_Beamformed_[35].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[35].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[36].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[36].FlysEyeBeam[0].TAB=36 Observation.DataProducts.Output_Beamformed_[36].FlysEyeBeam[0].antennaFieldName=HBA0 @@ -636,6 +694,8 @@ Observation.DataProducts.Output_Beamformed_[36].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[36].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[36].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[36].size=0 +Observation.DataProducts.Output_Beamformed_[36].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[36].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[37].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[37].FlysEyeBeam[0].TAB=37 Observation.DataProducts.Output_Beamformed_[37].FlysEyeBeam[0].antennaFieldName=HBA1 @@ -657,6 +717,8 @@ Observation.DataProducts.Output_Beamformed_[37].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[37].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[37].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[37].size=0 +Observation.DataProducts.Output_Beamformed_[37].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[37].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[38].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[38].FlysEyeBeam[0].TAB=38 Observation.DataProducts.Output_Beamformed_[38].FlysEyeBeam[0].antennaFieldName=HBA0 @@ -678,6 +740,8 @@ Observation.DataProducts.Output_Beamformed_[38].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[38].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[38].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[38].size=0 +Observation.DataProducts.Output_Beamformed_[38].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[38].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[39].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[39].FlysEyeBeam[0].TAB=39 Observation.DataProducts.Output_Beamformed_[39].FlysEyeBeam[0].antennaFieldName=HBA1 @@ -699,6 +763,8 @@ Observation.DataProducts.Output_Beamformed_[39].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[39].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[39].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[39].size=0 +Observation.DataProducts.Output_Beamformed_[39].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[39].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[3].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[3].FlysEyeBeam[0].TAB=3 Observation.DataProducts.Output_Beamformed_[3].FlysEyeBeam[0].antennaFieldName=HBA1 @@ -720,6 +786,8 @@ Observation.DataProducts.Output_Beamformed_[3].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[3].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[3].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[3].size=0 +Observation.DataProducts.Output_Beamformed_[3].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[3].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[4].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[4].FlysEyeBeam[0].TAB=4 Observation.DataProducts.Output_Beamformed_[4].FlysEyeBeam[0].antennaFieldName=HBA0 @@ -741,6 +809,8 @@ Observation.DataProducts.Output_Beamformed_[4].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[4].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[4].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[4].size=0 +Observation.DataProducts.Output_Beamformed_[4].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[4].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[5].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[5].FlysEyeBeam[0].TAB=5 Observation.DataProducts.Output_Beamformed_[5].FlysEyeBeam[0].antennaFieldName=HBA1 @@ -762,6 +832,8 @@ Observation.DataProducts.Output_Beamformed_[5].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[5].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[5].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[5].size=0 +Observation.DataProducts.Output_Beamformed_[5].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[5].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[6].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[6].FlysEyeBeam[0].TAB=6 Observation.DataProducts.Output_Beamformed_[6].FlysEyeBeam[0].antennaFieldName=HBA0 @@ -783,6 +855,8 @@ Observation.DataProducts.Output_Beamformed_[6].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[6].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[6].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[6].size=0 +Observation.DataProducts.Output_Beamformed_[6].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[6].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[7].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[7].FlysEyeBeam[0].TAB=7 Observation.DataProducts.Output_Beamformed_[7].FlysEyeBeam[0].antennaFieldName=HBA1 @@ -804,6 +878,8 @@ Observation.DataProducts.Output_Beamformed_[7].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[7].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[7].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[7].size=0 +Observation.DataProducts.Output_Beamformed_[7].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[7].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[8].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[8].FlysEyeBeam[0].TAB=8 Observation.DataProducts.Output_Beamformed_[8].FlysEyeBeam[0].antennaFieldName=HBA0 @@ -825,6 +901,8 @@ Observation.DataProducts.Output_Beamformed_[8].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[8].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[8].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[8].size=0 +Observation.DataProducts.Output_Beamformed_[8].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[8].storageWriterVersion=UNKNOWN Observation.DataProducts.Output_Beamformed_[9].FlysEyeBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[9].FlysEyeBeam[0].TAB=9 Observation.DataProducts.Output_Beamformed_[9].FlysEyeBeam[0].antennaFieldName=HBA1 @@ -846,6 +924,8 @@ Observation.DataProducts.Output_Beamformed_[9].nrOfFlysEyeBeams=1 Observation.DataProducts.Output_Beamformed_[9].nrOfIncoherentStokesBeams=0 Observation.DataProducts.Output_Beamformed_[9].percentageWritten=0 Observation.DataProducts.Output_Beamformed_[9].size=0 +Observation.DataProducts.Output_Beamformed_[9].storageWriter=HDF5DEFAULT +Observation.DataProducts.Output_Beamformed_[9].storageWriterVersion=UNKNOWN Observation.DataProducts.nrOfOutput_Beamformed_=40 Observation.DataProducts.nrOfOutput_Correlated_=0 Observation.FlysEye.channelWidth=12207.03125 @@ -855,3 +935,4 @@ Observation.FlysEye.samplingTime=0.01048576 Observation.FlysEye.stokes=I Observation.FlysEye.timeDownsamplingFactor=128 _isCobalt=T +feedback_version=03.01.00 diff --git a/RTCP/Cobalt/GPUProc/doc/pipeline-buffers.txt b/RTCP/Cobalt/GPUProc/doc/pipeline-buffers.txt index 782355349a1a9068e85ac128d714eefadcd034b3..3f6e6283fa0767349eeeb11f8453881f2f4eee25 100644 --- a/RTCP/Cobalt/GPUProc/doc/pipeline-buffers.txt +++ b/RTCP/Cobalt/GPUProc/doc/pipeline-buffers.txt @@ -46,7 +46,10 @@ NB: Numbers are for 80 antenna fields. FFT (if >1ch) {out-of-place} | [station][pol][sample][channel] [80][2][3072][64] = 240 MiB Nch: E V -Delay compensation + Band pass + Transpose {I/O: delays} +Zeroing (in-place) + | [station][pol][sample][channel] [80][2][3072][64] = 240 MiB Nch: E + V +Delay compensation (channel) + Band pass + Transpose {I/O: delays} | [station][channel][sample][pol] [80][64][3072][2] = 240 MiB B V Correlator @@ -54,6 +57,8 @@ Correlator V (output) +If 1ch, IntToFloat (A->E) is used instead of FIR+FFT. + (BF) Preprocessing [A -> B, trashes A] ----------------------------------- NB: The key Cobalt.BeamFormer.stationList can be used to select a subset of antenna fields in the observation @@ -67,15 +72,12 @@ IntToFloat + Transpose + FFT-shift FFT-64 {inplace} | [station][pol][sample][channel] [48][2][3072][64] = 144 MiB B V -Delay compensation + Transpose (implicit, DO_TRANSPOSE not defined) {I/O: delays} - | [station][pol][channel][sample] [48][2][64][3072] = 144 MiB A +Zeroing (in-place) + | [station][pol][sample][channel] [48][2][3072][64] = 144 MiB B V -FFT-shift {inplace} +Delay compensation + Transpose (implicit, DO_TRANSPOSE not defined) {I/O: delays} | [station][pol][channel][sample] [48][2][64][3072] = 144 MiB A V -FFT-64 {inplace} - | [station][pol][chan1][sample][chan2] [48][2][64][48][64] = 144 MiB A - V BandPass + Transpose {I/O: weights} | [station][chan1][chan2][sample][pol] [48][64][64][48][2] = 144 MiB B V = [station][channel][sample][pol] @@ -93,7 +95,7 @@ Transpose | [tab][pol][sample][channel] [tab][2][48][4096] = 3 MiB/TAB C | V -iFFT-4k {inplace} +iFFT-64 {inplace} | [tab][pol][sample] [tab][2][196608] = 3 MiB/TAB C | V @@ -123,7 +125,7 @@ Incoherent Stokes: [B -> B, trashes A] Transpose + Copy | [station][pol][sample][channel] [48][2][48][4096] = 144 MiB A V -iFFT-4k {inplace} +iFFT-64 {inplace} | [station][pol][sample] [48][2][196608] = 144 MiB A V FFT-shift {inplace} diff --git a/RTCP/Cobalt/GPUProc/share/gpu/kernels/DelayAndBandPass.cu b/RTCP/Cobalt/GPUProc/share/gpu/kernels/DelayAndBandPass.cu index 62ef0e89e40a45fbd729e955005375e73a1d8121..3ae289a1258685063d07fcc77493c790ae049ee9 100644 --- a/RTCP/Cobalt/GPUProc/share/gpu/kernels/DelayAndBandPass.cu +++ b/RTCP/Cobalt/GPUProc/share/gpu/kernels/DelayAndBandPass.cu @@ -65,31 +65,7 @@ typedef fcomplex(*OutputDataType)[NR_STATIONS][NR_CHANNELS][NR_SAMPLES_PER_CHAN typedef fcomplex(*OutputDataType)[NR_STATIONS][NR_POLARIZATIONS][NR_CHANNELS][NR_SAMPLES_PER_CHANNEL]; #endif -//# TODO: Unify #dims in input type to 4: [NR_SAMPLES_PER_SUBBAND] -> [NR_SAMPLES_PER_CHANNEL][NR_CHANNELS] (see kernel test) -//# It is technically incorrect, but different dims for the same input type is a real pain to use/supply. -//# Also unify order of #chn, #sampl to [NR_SAMPLES_PER_CHANNEL][NR_CHANNELS] -#ifdef INPUT_IS_STATIONDATA -# if NR_BITS_PER_SAMPLE == 16 -typedef short_complex rawSampleType; -typedef short_complex(*InputDataType)[NR_STATIONS][NR_SAMPLES_PER_SUBBAND][NR_POLARIZATIONS]; -#define REAL(sample) sample.x -#define IMAG(sample) sample.y -# elif NR_BITS_PER_SAMPLE == 8 -typedef char_complex rawSampleType; -typedef char_complex(*InputDataType)[NR_STATIONS][NR_SAMPLES_PER_SUBBAND][NR_POLARIZATIONS]; -#define REAL(sample) sample.x -#define IMAG(sample) sample.y -# elif NR_BITS_PER_SAMPLE == 4 -typedef signed char rawSampleType; -typedef signed char (*InputDataType)[NR_STATIONS][NR_SAMPLES_PER_SUBBAND][NR_POLARIZATIONS]; -#define REAL(sample) extractRI(sample, false) -#define IMAG(sample) extractRI(sample, true) -# else -# error unsupported NR_BITS_PER_SAMPLE -# endif -#else typedef fcomplex(*InputDataType)[NR_STATIONS][NR_POLARIZATIONS][NR_SAMPLES_PER_CHANNEL][NR_CHANNELS]; -#endif typedef const double(*DelaysType)[NR_SAPS][NR_DELAYS][NR_POLARIZATIONS]; // 2 Polarizations; in seconds typedef const double2(*Phase0sType)[NR_STATIONS]; // 2 Polarizations; in radians typedef const float(*BandPassFactorsType)[NR_CHANNELS]; @@ -124,9 +100,7 @@ inline __device__ fcomplex sincos_d2f(double phi) * of ::complex (2 complex polarizations) * @param[in] filteredDataPtr pointer to input data; this can either be a * 4D array [station][polarization][sample][channel][complex] -* of ::fcomplex, or a 2D array [station][subband][complex] -* of ::short_complex2 or ::char_complex2, -* depending on the value of @c NR_CHANNELS +* of ::fcomplex * @param[in] subbandFrequency center freqency of the subband * @param[in] beam index number of the beam * @param[in] delaysAtBeginPtr pointer to delay data of ::DelaysType, @@ -251,17 +225,8 @@ extern "C" { for (unsigned time = timeStart; time < NR_SAMPLES_PER_CHANNEL; time += timeInc) { -#ifdef INPUT_IS_STATIONDATA - const rawSampleType sampleXraw = (*inputData)[station][time][0]; - fcomplex sampleX = make_float2(convertIntToFloat(REAL(sampleXraw)), - convertIntToFloat(IMAG(sampleXraw))); - const rawSampleType sampleYraw = (*inputData)[station][time][1]; - fcomplex sampleY = make_float2(convertIntToFloat(REAL(sampleYraw)), - convertIntToFloat(IMAG(sampleYraw))); -#else fcomplex sampleX = (*inputData)[station][0][time][channel]; fcomplex sampleY = (*inputData)[station][1][time][channel]; -#endif #if defined DELAY_COMPENSATION // Offset of this sample between begin and end. diff --git a/RTCP/Cobalt/GPUProc/share/gpu/kernels/IntToFloat.cu b/RTCP/Cobalt/GPUProc/share/gpu/kernels/IntToFloat.cu index b8e002d8ee5299e1a90a58e72e1fc22b82be0cb8..f0a8fb035c35727cad3470df3cdd702eb4dd2af4 100644 --- a/RTCP/Cobalt/GPUProc/share/gpu/kernels/IntToFloat.cu +++ b/RTCP/Cobalt/GPUProc/share/gpu/kernels/IntToFloat.cu @@ -67,6 +67,8 @@ typedef float2 (*ConvertedDataType)[NR_OUTPUT_STATIONS][NR_POLARIZATIONS][NR * * Optional preprocessor symbols: * - DO_FFTSHIFT, if an fft-shift is to be performed as well + * - DO_STATIONSUBSET, if the stationIndices input array is to be used to select a subset + * of stations. * * Execution configuration: * - Use a 1D thread block. No restrictions. @@ -82,8 +84,13 @@ __global__ void intToFloat(void *convertedDataPtr, ConvertedDataType convertedData = (ConvertedDataType)convertedDataPtr; SampledDataType sampledData = (SampledDataType) sampledDataPtr; +#ifdef DO_STATIONSUBSET uint station_in = stationIndices[blockIdx.y]; uint station_out = blockIdx.y; +#else + uint station_in = blockIdx.y; + uint station_out = blockIdx.y; +#endif #ifdef DO_FFTSHIFT // Multiplication factor: 1 for even samples, -1 for odd samples diff --git a/RTCP/Cobalt/GPUProc/share/gpu/kernels/Zeroing.cu b/RTCP/Cobalt/GPUProc/share/gpu/kernels/Zeroing.cu new file mode 100644 index 0000000000000000000000000000000000000000..1da7cecb61f54367cbbebfb3da227f83d39f36ad --- /dev/null +++ b/RTCP/Cobalt/GPUProc/share/gpu/kernels/Zeroing.cu @@ -0,0 +1,53 @@ +//# Zeroing.cu: zero ranges of samples +//# Copyright (C) 2012-2013 ASTRON (Netherlands Institute for Radio Astronomy) +//# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +//# +//# This file is part of the LOFAR software suite. +//# The LOFAR software suite is free software: you can redistribute it and/or +//# modify it under the terms of the GNU General Public License as published +//# by the Free Software Foundation, either version 3 of the License, or +//# (at your option) any later version. +//# +//# The LOFAR software suite is distributed in the hope that it will be useful, +//# but WITHOUT ANY WARRANTY; without even the implied warranty of +//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +//# GNU General Public License for more details. +//# +//# You should have received a copy of the GNU General Public License along +//# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +//# +//# $Id$ + +#include "gpu_math.cuh" +#include <stdio.h> + +typedef float2 FilteredDataType[NR_STABS][NR_POLARIZATIONS][NR_SAMPLES_PER_CHANNEL][NR_CHANNELS]; + +typedef char MaskType[NR_STABS][NR_SAMPLES_PER_CHANNEL]; + +/** + * Zero samples that have been flagged. Clears samples for all channels for + * ranged specified per station. + * + * @param[data] a multi-dimensional array with time samples of type complex + * float in the last dimension. + * @param[mask] an 2D array of bytes, each representing a sample of a station. + * A value of 0 means ignore this sample, a value of 1 means zero this sample. + */ + +extern "C" +{ + __global__ void Zeroing(FilteredDataType data, + MaskType mask) + { + int sample = blockIdx.x * blockDim.x + threadIdx.x; + int channel = (blockIdx.y * blockDim.y + threadIdx.y); + int station = (blockIdx.z * blockDim.z + threadIdx.z) / 2; + int pol = (blockIdx.z * blockDim.z + threadIdx.z) % 2; + + if (mask[station][sample]) { + // Clear our sample + data[station][pol][sample][channel] = make_float2(0.0f, 0.0f); + } + } +} diff --git a/RTCP/Cobalt/GPUProc/src/CMakeLists.txt b/RTCP/Cobalt/GPUProc/src/CMakeLists.txt index fbc4e762035d4ad5f105eecbfe579d76eba16874..2592aa280ea31d17bfb36a72634d897379f82a25 100644 --- a/RTCP/Cobalt/GPUProc/src/CMakeLists.txt +++ b/RTCP/Cobalt/GPUProc/src/CMakeLists.txt @@ -13,6 +13,7 @@ set(_gpuproc_sources CommandThread.cc cpu_utils.cc FilterBank.cc + Flagger.cc global_defines.cc MPIReceiver.cc Package__Version.cc @@ -49,6 +50,7 @@ if(USE_CUDA) cuda/Kernels/IncoherentStokesTransposeKernel.cc cuda/Kernels/IntToFloatKernel.cc cuda/Kernels/FFTShiftKernel.cc + cuda/Kernels/ZeroingKernel.cc #cuda/Kernels/UHEP_BeamFormerKernel.cc #cuda/Kernels/UHEP_InvFFT_Kernel.cc #cuda/Kernels/UHEP_InvFIR_Kernel.cc diff --git a/RTCP/Cobalt/GPUProc/src/Flagger.cc b/RTCP/Cobalt/GPUProc/src/Flagger.cc new file mode 100644 index 0000000000000000000000000000000000000000..57272e2e3800f9d55dd055e8b1083a34ed4fc012 --- /dev/null +++ b/RTCP/Cobalt/GPUProc/src/Flagger.cc @@ -0,0 +1,105 @@ +//# Flagger.cc +//# Copyright (C) 2012-2013 ASTRON (Netherlands Institute for Radio Astronomy) +//# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +//# +//# This file is part of the LOFAR software suite. +//# The LOFAR software suite is free software: you can redistribute it and/or +//# modify it under the terms of the GNU General Public License as published +//# by the Free Software Foundation, either version 3 of the License, or +//# (at your option) any later version. +//# +//# The LOFAR software suite is distributed in the hope that it will be useful, +//# but WITHOUT ANY WARRANTY; without even the implied warranty of +//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +//# GNU General Public License for more details. +//# +//# You should have received a copy of the GNU General Public License along +//# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +//# +//# $Id$ + +#include <lofar_config.h> + +#include <GPUProc/Flagger.h> +#include <Common/LofarLogger.h> + +namespace LOFAR +{ + namespace Cobalt + { + void Flagger::convertFlagsToChannelFlags( + MultiDimArray<LOFAR::SparseSet<unsigned>, 1>const &inputFlags, + MultiDimArray<SparseSet<unsigned>, 1>& flagsPerChannel, + const unsigned nrSamples, + const unsigned nrChannels, + const ssize_t nrPrefixedSamples) + { + ASSERT(inputFlags.num_elements() == flagsPerChannel.num_elements()); + + // If nrChannels == 1, we do not expect nrPrefixedSamples + ASSERT(nrChannels > 1 || nrPrefixedSamples == 0); + + unsigned nrSamplesPerChannel = nrSamples / nrChannels; + unsigned log2NrChannels = log2(nrChannels); + + // Convert the flags per sample to flags per channel + for (unsigned station = 0; station < inputFlags.num_elements(); station ++) + { + // reset the channel flags for this station + flagsPerChannel[station].reset(); + + // get the flag ranges + const SparseSet<unsigned>::Ranges &ranges = inputFlags[station].getRanges(); + for (SparseSet<unsigned>::const_iterator it = ranges.begin(); + it != ranges.end(); it ++) + { + unsigned begin_idx; + unsigned end_idx; + if (nrChannels == 1) + { + // do nothing, just take the ranges as supplied + begin_idx = it->begin; + end_idx = std::min(nrSamplesPerChannel, it->end); + } + else + { + // Never flag before the start of the time range + // use bitshift to divide to the number of channels. + // + // In case of nrPrefixedSamples, there are FIR Filter + // samples in front of those who we split the flags for. + // In that case, nrPrefixedSamples == NR_TAPS - 1. + // + // NR_TAPS is the width of the filter: they are + // absorbed by the FIR and thus should be excluded + // from the original flag set. + // + // The original flag set can span up to + // [0, nrSamplesPerBlock + nrChannels * (NR_TAPS - 1)) + // of which the FIRST (NR_TAPS - 1) samples belong to + // the previous block, and are used to initialise the + // FIR filter. Every sample i of the current block is thus + // actually at index (i + nrChannels * (NR_TAPS - 1)), + // or, after converting to channels, at index (i' + NR_TAPS - 1). + // + // At the same time, every sample is affected by + // the NR_TAPS-1 samples before it. So, any flagged + // sample in the input flags NR_TAPS samples in + // the channel. + begin_idx = std::max(0L, + (signed) (it->begin >> log2NrChannels) - nrPrefixedSamples); + + // The min is needed, because flagging the last input + // samples would cause NR_TAPS subsequent samples to + // be flagged, which aren't necessarily part of this block. + end_idx = std::min(nrSamplesPerChannel, + ((it->end - 1) >> log2NrChannels) + 1); + } + + // Now copy the transformed ranges to the channelflags + flagsPerChannel[station].include(begin_idx, end_idx); + } + } + } + } +} diff --git a/RTCP/Cobalt/GPUProc/src/Flagger.h b/RTCP/Cobalt/GPUProc/src/Flagger.h new file mode 100644 index 0000000000000000000000000000000000000000..b56b629bf423c4030b55243752f8b988096392d1 --- /dev/null +++ b/RTCP/Cobalt/GPUProc/src/Flagger.h @@ -0,0 +1,48 @@ +//# CorrelatorStep.h +//# Copyright (C) 2012-2013 ASTRON (Netherlands Institute for Radio Astronomy) +//# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +//# +//# This file is part of the LOFAR software suite. +//# The LOFAR software suite is free software: you can redistribute it and/or +//# modify it under the terms of the GNU General Public License as published +//# by the Free Software Foundation, either version 3 of the License, or +//# (at your option) any later version. +//# +//# The LOFAR software suite is distributed in the hope that it will be useful, +//# but WITHOUT ANY WARRANTY; without even the implied warranty of +//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +//# GNU General Public License for more details. +//# +//# You should have received a copy of the GNU General Public License along +//# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +//# +//# $Id$ + +#ifndef LOFAR_GPUPROC_FLAGGER_H +#define LOFAR_GPUPROC_FLAGGER_H + +#include <CoInterface/MultiDimArray.h> +#include <CoInterface/SparseSet.h> + +namespace LOFAR +{ + namespace Cobalt + { + // Collection of functions to tranfer the input flags to the output. + class Flagger + { + public: + // Convert the flags from one channel to multiple channels, per station. + // If nrChannels > 1, nrPrefixedSamples are assumed to be already + // prepended to the input flags as a result of the FIR-filter history. + static void convertFlagsToChannelFlags( + MultiDimArray<SparseSet<unsigned>, 1>const &inputFlags, + MultiDimArray<SparseSet<unsigned>, 1>& flagsPerChannel, + const unsigned nrSamplesPerChannel, + const unsigned nrChannels, + const ssize_t nrPrefixedSamples); + }; + } +} + +#endif diff --git a/RTCP/Cobalt/GPUProc/src/Kernels/ZeroingKernel.h b/RTCP/Cobalt/GPUProc/src/Kernels/ZeroingKernel.h new file mode 100644 index 0000000000000000000000000000000000000000..b22aa3ae5535e4c7c857305b9bfacbeadf6aafbf --- /dev/null +++ b/RTCP/Cobalt/GPUProc/src/Kernels/ZeroingKernel.h @@ -0,0 +1,41 @@ +//# ZeroingKernel.h +//# +//# Copyright (C) 2013 ASTRON (Netherlands Institute for Radio Astronomy) +//# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +//# +//# This file is part of the LOFAR software suite. +//# The LOFAR software suite is free software: you can redistribute it and/or +//# modify it under the terms of the GNU General Public License as published +//# by the Free Software Foundation, either version 3 of the License, or +//# (at your option) any later version. +//# +//# The LOFAR software suite is distributed in the hope that it will be useful, +//# but WITHOUT ANY WARRANTY; without even the implied warranty of +//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +//# GNU General Public License for more details. +//# +//# You should have received a copy of the GNU General Public License along +//# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +//# +//# $Id$ + +// \file +// Include the right GPU API include with our options. + +#ifndef LOFAR_GPUPROC_ZEROING_KERNEL_H +#define LOFAR_GPUPROC_ZEROING_KERNEL_H + +#if defined (USE_CUDA) && defined (USE_OPENCL) +# error "Either CUDA or OpenCL must be enabled, not both" +#endif + +#if defined (USE_CUDA) +# include <GPUProc/cuda/Kernels/ZeroingKernel.h> +#elif defined (USE_OPENCL) +# include <GPUProc/opencl/Kernels/ZeroingKernel.h> +#else +# error "Either CUDA or OpenCL must be enabled, not neither" +#endif + +#endif + diff --git a/RTCP/Cobalt/GPUProc/src/cuda/KernelFactory.h b/RTCP/Cobalt/GPUProc/src/cuda/KernelFactory.h index 5efc4d02212d4a0a6e7d5eae8ec049a431c19912..9fc75474dd12c2915322dc8f775ed1e15a27a0f6 100644 --- a/RTCP/Cobalt/GPUProc/src/cuda/KernelFactory.h +++ b/RTCP/Cobalt/GPUProc/src/cuda/KernelFactory.h @@ -99,8 +99,12 @@ namespace LOFAR { // Since we use overlapping input/output buffers, their size // could be larger than we need. - ASSERT(buffers.input.size() >= bufferSize(T::INPUT_DATA)); - ASSERT(buffers.output.size() >= bufferSize(T::OUTPUT_DATA)); + ASSERTSTR(buffers.input.size() >= bufferSize(T::INPUT_DATA), + "Require " << bufferSize(T::INPUT_DATA) << " bytes for input, " + "but buffer is only " << buffers.input.size() << " bytes."); + ASSERTSTR(buffers.output.size() >= bufferSize(T::OUTPUT_DATA), + "Require " << bufferSize(T::OUTPUT_DATA) << " bytes for output, " + "but buffer is only " << buffers.output.size() << " bytes."); return new T( stream, createModule(stream.getContext(), diff --git a/RTCP/Cobalt/GPUProc/src/cuda/Kernels/BandPassCorrectionKernel.cc b/RTCP/Cobalt/GPUProc/src/cuda/Kernels/BandPassCorrectionKernel.cc index 77ed74bc8fcb9a7026733ce61f40813b0933dd8e..32bf4da8a3350d84144d21954e43871b399dec2d 100644 --- a/RTCP/Cobalt/GPUProc/src/cuda/Kernels/BandPassCorrectionKernel.cc +++ b/RTCP/Cobalt/GPUProc/src/cuda/Kernels/BandPassCorrectionKernel.cc @@ -49,7 +49,7 @@ namespace LOFAR nrStations(ps.settings.beamFormer.antennaFieldNames.size()), nrDelayCompensationChannels(ps.settings.beamFormer.nrDelayCompensationChannels), - nrHighResolutionChannels(ps.settings.beamFormer.nrHighResolutionChannels), + nrHighResolutionChannels(ps.settings.beamFormer.nrDelayCompensationChannels), nrSamplesPerChannel(ps.settings.blockSize / nrHighResolutionChannels), correctBandPass(ps.settings.corrections.bandPass) diff --git a/RTCP/Cobalt/GPUProc/src/cuda/Kernels/BeamFormerKernel.cc b/RTCP/Cobalt/GPUProc/src/cuda/Kernels/BeamFormerKernel.cc index 410fc4171d95d43b21223aed886386b9783f2e52..ef83105acd97cc22dac1070ddc017ea776869fdd 100644 --- a/RTCP/Cobalt/GPUProc/src/cuda/Kernels/BeamFormerKernel.cc +++ b/RTCP/Cobalt/GPUProc/src/cuda/Kernels/BeamFormerKernel.cc @@ -52,7 +52,7 @@ namespace LOFAR delayIndices(ObservationSettings::AntennaFieldName::indices(ps.settings.beamFormer.antennaFieldNames, ps.settings.antennaFieldNames)), nrDelays(ps.settings.antennaFieldNames.size()), - nrChannels(ps.settings.beamFormer.nrHighResolutionChannels), + nrChannels(ps.settings.beamFormer.nrDelayCompensationChannels), nrSamplesPerChannel(ps.settings.blockSize / nrChannels), nrSAPs(ps.settings.beamFormer.SAPs.size()), diff --git a/RTCP/Cobalt/GPUProc/src/cuda/Kernels/BeamFormerTransposeKernel.cc b/RTCP/Cobalt/GPUProc/src/cuda/Kernels/BeamFormerTransposeKernel.cc index fff5f02513c288ab0579cb035949dea99c498c68..994c4eff04219de9c230ea906ff6d37367d6f20a 100644 --- a/RTCP/Cobalt/GPUProc/src/cuda/Kernels/BeamFormerTransposeKernel.cc +++ b/RTCP/Cobalt/GPUProc/src/cuda/Kernels/BeamFormerTransposeKernel.cc @@ -46,7 +46,7 @@ namespace LOFAR BeamFormerTransposeKernel::Parameters::Parameters(const Parset& ps) : Kernel::Parameters("beamFormerTranspose"), - nrChannels(ps.settings.beamFormer.nrHighResolutionChannels), + nrChannels(ps.settings.beamFormer.nrDelayCompensationChannels), nrSamplesPerChannel(ps.settings.blockSize / nrChannels), nrTABs(ps.settings.beamFormer.maxNrCoherentTABsPerSAP()) { diff --git a/RTCP/Cobalt/GPUProc/src/cuda/Kernels/CoherentStokesTransposeKernel.cc b/RTCP/Cobalt/GPUProc/src/cuda/Kernels/CoherentStokesTransposeKernel.cc index fa5b80092f7ae07c297983ac29d5a2f0037c2e53..c06839adbf82e7560cf2366d864eaa72ddafece4 100644 --- a/RTCP/Cobalt/GPUProc/src/cuda/Kernels/CoherentStokesTransposeKernel.cc +++ b/RTCP/Cobalt/GPUProc/src/cuda/Kernels/CoherentStokesTransposeKernel.cc @@ -46,7 +46,7 @@ namespace LOFAR CoherentStokesTransposeKernel::Parameters::Parameters(const Parset& ps) : Kernel::Parameters("coherentStokesTranspose"), - nrChannels(ps.settings.beamFormer.nrHighResolutionChannels), + nrChannels(ps.settings.beamFormer.nrDelayCompensationChannels), nrSamplesPerChannel(ps.settings.blockSize / nrChannels), nrTABs(ps.settings.beamFormer.maxNrCoherentTABsPerSAP()) diff --git a/RTCP/Cobalt/GPUProc/src/cuda/Kernels/DelayAndBandPassKernel.cc b/RTCP/Cobalt/GPUProc/src/cuda/Kernels/DelayAndBandPassKernel.cc index 05cbe311d80d9568bb0046f8bee28c3eed442eb8..6205b45ad6c2e935adaf8f71e52e622bacac7103 100644 --- a/RTCP/Cobalt/GPUProc/src/cuda/Kernels/DelayAndBandPassKernel.cc +++ b/RTCP/Cobalt/GPUProc/src/cuda/Kernels/DelayAndBandPassKernel.cc @@ -50,10 +50,6 @@ namespace LOFAR delayIndices(nrStations), nrDelays(ps.settings.antennaFieldNames.size()), nrBitsPerSample(ps.settings.nrBitsPerSample), - inputIsStationData(correlator && ps.settings.correlator.nrChannels == 1 - ? true - : false), - nrChannels(correlator ? ps.settings.correlator.nrChannels : ps.settings.beamFormer.nrDelayCompensationChannels), nrSamplesPerChannel(ps.settings.blockSize / nrChannels), @@ -94,9 +90,7 @@ namespace LOFAR unsigned DelayAndBandPassKernel::Parameters::nrBytesPerComplexSample() const { - return inputIsStationData - ? 2 * nrBitsPerSample / 8 - : sizeof(std::complex<float>); + return sizeof(std::complex<float>); } @@ -213,9 +207,6 @@ namespace LOFAR defs["NR_BITS_PER_SAMPLE"] = lexical_cast<string>(itsParameters.nrBitsPerSample); - if (itsParameters.inputIsStationData) - defs["INPUT_IS_STATIONDATA"] = "1"; - defs["NR_CHANNELS"] = lexical_cast<string>(itsParameters.nrChannels); defs["NR_SAMPLES_PER_CHANNEL"] = lexical_cast<string>(itsParameters.nrSamplesPerChannel); diff --git a/RTCP/Cobalt/GPUProc/src/cuda/Kernels/DelayAndBandPassKernel.h b/RTCP/Cobalt/GPUProc/src/cuda/Kernels/DelayAndBandPassKernel.h index d5bcd7bb673b5fa8eb789edab75c6abf9b9a1957..a2f051c6f0aef458ac27e3d96d3b58e9d50fb314 100644 --- a/RTCP/Cobalt/GPUProc/src/cuda/Kernels/DelayAndBandPassKernel.h +++ b/RTCP/Cobalt/GPUProc/src/cuda/Kernels/DelayAndBandPassKernel.h @@ -60,7 +60,6 @@ namespace LOFAR std::vector<unsigned> delayIndices; unsigned nrDelays; unsigned nrBitsPerSample; - bool inputIsStationData; unsigned nrChannels; unsigned nrSamplesPerChannel; diff --git a/RTCP/Cobalt/GPUProc/src/cuda/Kernels/IncoherentStokesTransposeKernel.cc b/RTCP/Cobalt/GPUProc/src/cuda/Kernels/IncoherentStokesTransposeKernel.cc index 184cb207f90c8f624a7322692bda2cb3ea3deeb8..7ab683197f763edf7f3822d66291be86d4de8223 100644 --- a/RTCP/Cobalt/GPUProc/src/cuda/Kernels/IncoherentStokesTransposeKernel.cc +++ b/RTCP/Cobalt/GPUProc/src/cuda/Kernels/IncoherentStokesTransposeKernel.cc @@ -44,7 +44,7 @@ namespace LOFAR IncoherentStokesTransposeKernel::Parameters::Parameters(const Parset& ps) : Kernel::Parameters("incoherentStokesTranspose"), nrStations(ps.settings.beamFormer.antennaFieldNames.size()), - nrChannels(ps.settings.beamFormer.nrHighResolutionChannels), + nrChannels(ps.settings.beamFormer.nrDelayCompensationChannels), nrSamplesPerChannel(ps.settings.blockSize / nrChannels), tileSize(16) diff --git a/RTCP/Cobalt/GPUProc/src/cuda/Kernels/IntToFloatKernel.cc b/RTCP/Cobalt/GPUProc/src/cuda/Kernels/IntToFloatKernel.cc index e29f6af6e5cf7bd86832eb27c1d59fa40bf72b80..57717698695df1aa0c066546f7b9f642ee10204b 100644 --- a/RTCP/Cobalt/GPUProc/src/cuda/Kernels/IntToFloatKernel.cc +++ b/RTCP/Cobalt/GPUProc/src/cuda/Kernels/IntToFloatKernel.cc @@ -45,17 +45,16 @@ namespace LOFAR string IntToFloatKernel::theirSourceFile = "IntToFloat.cu"; string IntToFloatKernel::theirFunction = "intToFloat"; - IntToFloatKernel::Parameters::Parameters(const Parset& ps) : + IntToFloatKernel::Parameters::Parameters(const Parset& ps, bool fftShift, bool beamFormerStationSubset) : Kernel::Parameters("intToFloat"), nrInputStations(ps.settings.antennaFields.size()), - stationIndices(ObservationSettings::AntennaFieldName::indices(ps.settings.beamFormer.antennaFieldNames, ps.settings.antennaFieldNames)), + stationIndices(beamFormerStationSubset ? ObservationSettings::AntennaFieldName::indices(ps.settings.beamFormer.antennaFieldNames, ps.settings.antennaFieldNames) : vector<unsigned>()), nrBitsPerSample(ps.settings.nrBitsPerSample), nrSamplesPerSubband(ps.settings.blockSize), - fftShift(ps.settings.beamFormer.nrDelayCompensationChannels > 1) + fftShift(fftShift), + doStationSubset(beamFormerStationSubset) { - ASSERTSTR(ps.settings.beamFormer.enabled, "IntToFloatKernel::Parameters assumes it will be used in the beamFormer, but the beamFormer is not enabled."); - dumpBuffers = ps.getBool("Cobalt.Kernels.IntToFloatKernel.dumpOutput", false); dumpFilePattern = @@ -133,6 +132,8 @@ namespace LOFAR if (itsParameters.fftShift) defs["DO_FFTSHIFT"] = "1"; + if (itsParameters.doStationSubset) + defs["DO_STATIONSUBSET"] = "1"; return defs; } diff --git a/RTCP/Cobalt/GPUProc/src/cuda/Kernels/IntToFloatKernel.h b/RTCP/Cobalt/GPUProc/src/cuda/Kernels/IntToFloatKernel.h index 3c212b974f6475565936841e9d0f127afef0aad7..434d6f31b097ee17b94aa685120b47f61e76e5a6 100644 --- a/RTCP/Cobalt/GPUProc/src/cuda/Kernels/IntToFloatKernel.h +++ b/RTCP/Cobalt/GPUProc/src/cuda/Kernels/IntToFloatKernel.h @@ -51,7 +51,7 @@ namespace LOFAR // IntToFloatKernel class. struct Parameters : Kernel::Parameters { - Parameters(const Parset& ps); + Parameters(const Parset& ps, bool fftShift, bool beamFormerStationSubset); unsigned nrInputStations; std::vector<unsigned> stationIndices; // input station nr for ewch output station @@ -61,10 +61,11 @@ namespace LOFAR unsigned nrSamplesPerSubband; bool fftShift; + bool doStationSubset; size_t bufferSize(BufferType bufferType) const; - unsigned nrOutputStations() const { return stationIndices.size(); } + unsigned nrOutputStations() const { return doStationSubset ? stationIndices.size() : nrInputStations; } }; IntToFloatKernel(const gpu::Stream &stream, diff --git a/RTCP/Cobalt/GPUProc/src/cuda/Kernels/ZeroingKernel.cc b/RTCP/Cobalt/GPUProc/src/cuda/Kernels/ZeroingKernel.cc new file mode 100644 index 0000000000000000000000000000000000000000..cb828e4eb085a3aa533e2715ed22362fff90adf2 --- /dev/null +++ b/RTCP/Cobalt/GPUProc/src/cuda/Kernels/ZeroingKernel.cc @@ -0,0 +1,139 @@ +//# ZeroingKernel.cc +//# Copyright (C) 2012-2013 ASTRON (Netherlands Institute for Radio Astronomy) +//# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +//# +//# This file is part of the LOFAR software suite. +//# The LOFAR software suite is free software: you can redistribute it and/or +//# modify it under the terms of the GNU General Public License as published +//# by the Free Software Foundation, either version 3 of the License, or +//# (at your option) any later version. +//# +//# The LOFAR software suite is distributed in the hope that it will be useful, +//# but WITHOUT ANY WARRANTY; without even the implied warranty of +//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +//# GNU General Public License for more details. +//# +//# You should have received a copy of the GNU General Public License along +//# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +//# +//# $Id$ + +#include <lofar_config.h> + +#include "ZeroingKernel.h" + +#include <GPUProc/gpu_utils.h> +#include <CoInterface/BlockID.h> +#include <CoInterface/Config.h> +#include <CoInterface/SubbandMetaData.h> +#include <Common/lofar_complex.h> +#include <Common/Timer.h> + +#include <boost/lexical_cast.hpp> +#include <boost/format.hpp> + +#include <fstream> +#include <algorithm> + +using boost::lexical_cast; +using boost::format; + +namespace LOFAR +{ + namespace Cobalt + { + string ZeroingKernel::theirSourceFile = "Zeroing.cu"; + string ZeroingKernel::theirFunction = "Zeroing"; + + ZeroingKernel::Parameters::Parameters(const Parset& ps, unsigned nrSTABs, unsigned nrChannels, const std::string &name): + Kernel::Parameters(name), + nrSTABs(nrSTABs), + + nrChannels(nrChannels), + nrSamplesPerChannel(ps.settings.blockSize / nrChannels) + { + dumpBuffers = + ps.getBool("Cobalt.Kernels.ZeroingKernel.dumpOutput", false); + dumpFilePattern = + str(format("L%d_SB%%03d_BL%%03d_ZeroingKernel.dat") % + ps.settings.observationID); + } + + + size_t ZeroingKernel::Parameters::bufferSize(BufferType bufferType) const + { + switch (bufferType) { + case ZeroingKernel::INPUT_DATA: + case ZeroingKernel::OUTPUT_DATA: // fall thru + return (size_t)nrSTABs * NR_POLARIZATIONS * + nrChannels * nrSamplesPerChannel * + sizeof(std::complex<float>); + + case ZeroingKernel::MASK: + return (size_t)nrSTABs * nrSamplesPerChannel; + + default: + THROW(GPUProcException, "Invalid bufferType (" << bufferType << ")"); + } + } + + ZeroingKernel::ZeroingKernel(const gpu::Stream& stream, + const gpu::Module& module, + const Buffers& buffers, + const Parameters& params) : + CompiledKernel(stream, gpu::Function(module, theirFunction), buffers, params), + nrSTABs(params.nrSTABs), + nrSamplesPerChannel(params.nrSamplesPerChannel), + gpuMask(stream.getContext(), params.bufferSize(MASK)), + hostMask(stream.getContext(), params.bufferSize(MASK)), + computeMaskTimer("ZeroingKernel: compute mask", true, true) + { + setArg(0, buffers.input); + setArg(1, gpuMask); + + // Number of samples per channel must be even + ASSERT(params.nrSamplesPerChannel % 2 == 0); + + // We definitely want the lowest data dimensions in the same warp. + // The size of the x dimension was tuned manually on a K10, using + // tZeroingKernel | grep mean + setEnqueueWorkSizes( + gpu::Grid(params.nrSamplesPerChannel, params.nrChannels, params.nrSTABs * NR_POLARIZATIONS), + gpu::Block(std::max(1U, 64U / params.nrChannels), params.nrChannels, NR_POLARIZATIONS)); + } + + + void ZeroingKernel::enqueue(const BlockID &blockId, const MultiDimArray<SparseSet<unsigned>, 1> &channelFlags) + { + // marshall flags to GPU host buffer + computeMaskTimer.start(); + for(unsigned station = 0; station < nrSTABs; ++station) { + LOG_DEBUG_STR("Flags for block " << blockId << ", station " << station << ": " << channelFlags[station]); + channelFlags[station].toByteset(hostMask.get<char>() + station * nrSamplesPerChannel, nrSamplesPerChannel); + } + computeMaskTimer.stop(); + + // Copy host buffer to GPU + itsStream.writeBuffer(gpuMask, hostMask, false); + + Kernel::enqueue(blockId); + } + + //-------- Template specializations for KernelFactory --------// + + template<> CompileDefinitions + KernelFactory<ZeroingKernel>::compileDefinitions() const + { + CompileDefinitions defs = + KernelFactoryBase::compileDefinitions(itsParameters); + + defs["NR_STABS"] = lexical_cast<string>(itsParameters.nrSTABs); + defs["NR_CHANNELS"] = lexical_cast<string>(itsParameters.nrChannels); + defs["NR_SAMPLES_PER_CHANNEL"] = + lexical_cast<string>(itsParameters.nrSamplesPerChannel); + + return defs; + } + + } +} diff --git a/RTCP/Cobalt/GPUProc/src/cuda/Kernels/ZeroingKernel.h b/RTCP/Cobalt/GPUProc/src/cuda/Kernels/ZeroingKernel.h new file mode 100644 index 0000000000000000000000000000000000000000..edf360f86f1e4d1cd0d6f83eba65a15b4d770bb4 --- /dev/null +++ b/RTCP/Cobalt/GPUProc/src/cuda/Kernels/ZeroingKernel.h @@ -0,0 +1,98 @@ +//# ZeroingKernel.h +//# Copyright (C) 2012-2013 ASTRON (Netherlands Institute for Radio Astronomy) +//# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +//# +//# This file is part of the LOFAR software suite. +//# The LOFAR software suite is free software: you can redistribute it and/or +//# modify it under the terms of the GNU General Public License as published +//# by the Free Software Foundation, either version 3 of the License, or +//# (at your option) any later version. +//# +//# The LOFAR software suite is distributed in the hope that it will be useful, +//# but WITHOUT ANY WARRANTY; without even the implied warranty of +//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +//# GNU General Public License for more details. +//# +//# You should have received a copy of the GNU General Public License along +//# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +//# +//# $Id$ + +#ifndef LOFAR_GPUPROC_CUDA_ZEROING_KERNEL_H +#define LOFAR_GPUPROC_CUDA_ZEROING_KERNEL_H + +#include <Common/Timer.h> + +#include <CoInterface/Parset.h> +#include <CoInterface/MultiDimArray.h> +#include <CoInterface/SparseSet.h> + +#include <GPUProc/Kernels/Kernel.h> +#include <GPUProc/KernelFactory.h> +#include <GPUProc/gpu_wrapper.h> + +namespace LOFAR +{ + namespace Cobalt + { + class ZeroingKernel : public CompiledKernel + { + public: + static std::string theirSourceFile; + static std::string theirFunction; + + enum BufferType + { + INPUT_DATA, + OUTPUT_DATA, + MASK + }; + + // Parameters that must be passed to the constructor of the + // IntToFloatKernel class. + struct Parameters : Kernel::Parameters + { + Parameters(const Parset& ps, unsigned nrSTABs, unsigned nrChannels, const std::string &name = "Zeroing"); + unsigned nrSTABs; + + unsigned nrChannels; + unsigned nrSamplesPerChannel; + + size_t bufferSize(BufferType bufferType) const; + }; + + // Construct a Zeroing kernel. + // \pre The number of samples per channel must be even. + // \pre The product of the number of stations, the number of + // polarizations, the number of channels per subband, and the number of + // samples per channel must be divisible by the maximum number of threads + // per block (typically 1024). + ZeroingKernel(const gpu::Stream &stream, + const gpu::Module &module, + const Buffers &buffers, + const Parameters ¶m); + + // Run the kernel. "channelFlags" is the set of flags, one for each station/tab + void enqueue(const BlockID &blockId, const MultiDimArray<SparseSet<unsigned>, 1> &channelFlags); + + private: + const unsigned nrSTABs; + const unsigned nrSamplesPerChannel; + + // The flag mask per sample (1=flagged, 0=ok) + gpu::DeviceMemory gpuMask; + gpu::HostMemory hostMask; + + NSTimer computeMaskTimer; + }; + + //# -------- Template specializations for KernelFactory -------- #// + + template<> CompileDefinitions + KernelFactory<ZeroingKernel>::compileDefinitions() const; + } + +} + +#endif + diff --git a/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/BeamFormerCoherentStep.cc b/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/BeamFormerCoherentStep.cc index 9b02246211f9db52cb69e6d7d6185f26e4942703..237409c371f26bd2616288855599083a4f051f34 100644 --- a/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/BeamFormerCoherentStep.cc +++ b/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/BeamFormerCoherentStep.cc @@ -46,13 +46,13 @@ namespace LOFAR coherentTranspose(CoherentStokesTransposeKernel::Parameters(ps)), coherentInverseFFT(FFT_Kernel::Parameters( - ps.settings.beamFormer.nrHighResolutionChannels, + ps.settings.beamFormer.nrDelayCompensationChannels, ps.settings.beamFormer.maxNrCoherentTABsPerSAP() * NR_POLARIZATIONS * ps.settings.blockSize, false, "FFT (coherent, inverse)")), coherentInverseFFTShift(FFTShiftKernel::Parameters(ps, ps.settings.beamFormer.maxNrCoherentTABsPerSAP(), - ps.settings.beamFormer.nrHighResolutionChannels, + ps.settings.beamFormer.nrDelayCompensationChannels, "FFT-shift (coherent, inverse)")), coherentFirFilter( diff --git a/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/BeamFormerIncoherentStep.cc b/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/BeamFormerIncoherentStep.cc index 3cd289e16eeb703d751148a6976a713a718ef50c..f370ffe5833d6e8904a5bd07a16441bf34961ebf 100644 --- a/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/BeamFormerIncoherentStep.cc +++ b/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/BeamFormerIncoherentStep.cc @@ -45,12 +45,12 @@ namespace LOFAR incoherentStokesTranspose(IncoherentStokesTransposeKernel::Parameters(ps)), incoherentInverseFFT(FFT_Kernel::Parameters( - ps.settings.beamFormer.nrHighResolutionChannels, + ps.settings.beamFormer.nrDelayCompensationChannels, ps.settings.beamFormer.antennaFieldNames.size() * NR_POLARIZATIONS * ps.settings.blockSize, false, "FFT (incoherent, inverse)")), incoherentInverseFFTShift(FFTShiftKernel::Parameters(ps, ps.settings.beamFormer.antennaFieldNames.size(), - ps.settings.beamFormer.nrHighResolutionChannels, + ps.settings.beamFormer.nrDelayCompensationChannels, "FFT-shift (incoherent, inverse)")), incoherentFirFilter( diff --git a/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/BeamFormerPreprocessingStep.cc b/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/BeamFormerPreprocessingStep.cc index b2f4ca265050ecb272c54a4a8886d65402e3a399..a9aac05e5e1c927a5f538b3b92381ff5498701fc 100644 --- a/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/BeamFormerPreprocessingStep.cc +++ b/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/BeamFormerPreprocessingStep.cc @@ -24,6 +24,7 @@ #include <GPUProc/global_defines.h> #include <GPUProc/gpu_wrapper.h> +#include <GPUProc/Flagger.h> #include <CoInterface/Parset.h> #include <ApplCommon/PosixTime.h> @@ -36,7 +37,10 @@ namespace LOFAR namespace Cobalt { BeamFormerPreprocessingStep::Factories::Factories(const Parset &ps) : - intToFloat(ps), + intToFloat(IntToFloatKernel::Parameters( + ps, + ps.settings.beamFormer.nrDelayCompensationChannels > 1, + true)), firstFFT(FFT_Kernel::Parameters( ps.settings.beamFormer.nrDelayCompensationChannels, @@ -48,14 +52,12 @@ namespace LOFAR ps.settings.beamFormer.nrDelayCompensationChannels, "FFT-shift (beamformer)")), - delayCompensation(DelayAndBandPassKernel::Parameters(ps, false)), - - secondFFT(FFT_Kernel::Parameters( - ps.settings.beamFormer.nrHighResolutionChannels / + zeroing(ZeroingKernel::Parameters(ps, + ps.settings.beamFormer.antennaFieldNames.size(), ps.settings.beamFormer.nrDelayCompensationChannels, - ps.settings.beamFormer.antennaFieldNames.size() * NR_POLARIZATIONS * ps.settings.blockSize, - true, - "FFT (beamformer, 2nd)")), + "Zeroing (beamformer)")), + + delayCompensation(DelayAndBandPassKernel::Parameters(ps, false)), bandPassCorrection(BandPassCorrectionKernel::Parameters(ps)) { @@ -69,16 +71,13 @@ namespace LOFAR boost::shared_ptr<gpu::DeviceMemory> i_devA, boost::shared_ptr<gpu::DeviceMemory> i_devB) : - ProcessStep(parset, i_queue) + ProcessStep(parset, i_queue), + flagsPerChannel(boost::extents[parset.settings.antennaFields.size()]) { devA=i_devA; devB=i_devB; (void)context; - doSecondFFT = - (ps.settings.beamFormer.nrHighResolutionChannels / - ps.settings.beamFormer.nrDelayCompensationChannels) > 1; - // intToFloat + FFTShift: A -> B intToFloatKernel = std::auto_ptr<IntToFloatKernel>( factories.intToFloat.create(queue, *devA, *devB)); @@ -87,26 +86,17 @@ namespace LOFAR firstFFT = std::auto_ptr<FFT_Kernel>( factories.firstFFT.create(queue, *devB, *devB)); + // zeroing: B -> B + zeroingKernel = std::auto_ptr<ZeroingKernel>( + factories.zeroing.create(queue, *devB, *devB)); + // delayComp: B -> A delayCompensationKernel = std::auto_ptr<DelayAndBandPassKernel>( factories.delayCompensation.create(queue, *devB, *devA)); - // Only perform second FFTshift and FFT if we have to. - if (doSecondFFT) { - - // FFTShift: A -> A - secondFFTShiftKernel = std::auto_ptr<FFTShiftKernel>( - factories.fftShift.create(queue, *devA, *devA)); - - // FFT: A -> A - secondFFT = std::auto_ptr<FFT_Kernel>( - factories.secondFFT.create(queue, *devA, *devA)); - } - // bandPass: A -> B bandPassCorrectionKernel = std::auto_ptr<BandPassCorrectionKernel>( factories.bandPassCorrection.create(queue, *devA, *devB)); - } void BeamFormerPreprocessingStep::writeInput(const SubbandProcInputData &input) @@ -133,18 +123,24 @@ namespace LOFAR firstFFT->enqueue(input.blockID); + // Convert input flags to channel flags + Flagger::convertFlagsToChannelFlags( + input.inputFlags, + flagsPerChannel, + ps.settings.blockSize, + ps.settings.beamFormer.nrDelayCompensationChannels, + 0); + + zeroingKernel->enqueue( + input.blockID, + flagsPerChannel); + // The centralFrequency and SAP immediate kernel args must outlive kernel runs. delayCompensationKernel->enqueue( input.blockID, ps.settings.subbands[input.blockID.globalSubbandIdx].centralFrequency, ps.settings.subbands[input.blockID.globalSubbandIdx].SAP); - if (doSecondFFT) { - secondFFTShiftKernel->enqueue(input.blockID); - - secondFFT->enqueue(input.blockID); - } - bandPassCorrectionKernel->enqueue( input.blockID); } diff --git a/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/BeamFormerPreprocessingStep.h b/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/BeamFormerPreprocessingStep.h index 3492a46b5f98e1a960365ab70403959a61a794bd..cf560c5d9cab0d24d67bd94d6da494fcbee8c42c 100644 --- a/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/BeamFormerPreprocessingStep.h +++ b/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/BeamFormerPreprocessingStep.h @@ -31,6 +31,8 @@ #include <GPUProc/MultiDimArrayHostBuffer.h> #include <CoInterface/BlockID.h> +#include <CoInterface/SparseSet.h> +#include <CoInterface/MultiDimArray.h> #include "SubbandProcInputData.h" #include "SubbandProcOutputData.h" @@ -42,7 +44,7 @@ #include <GPUProc/Kernels/FFTShiftKernel.h> #include <GPUProc/Kernels/FFT_Kernel.h> #include <GPUProc/Kernels/IntToFloatKernel.h> - +#include <GPUProc/Kernels/ZeroingKernel.h> namespace LOFAR { @@ -59,9 +61,9 @@ namespace LOFAR KernelFactory<FFT_Kernel> firstFFT; KernelFactory<FFTShiftKernel> fftShift; - KernelFactory<DelayAndBandPassKernel> delayCompensation; + KernelFactory<ZeroingKernel> zeroing; - KernelFactory<FFT_Kernel> secondFFT; + KernelFactory<DelayAndBandPassKernel> delayCompensation; KernelFactory<BandPassCorrectionKernel> bandPassCorrection; }; @@ -78,6 +80,8 @@ namespace LOFAR void process(const SubbandProcInputData &input); private: + // Preallocated flags for FFT-ed data -- used locally + MultiDimArray<SparseSet<unsigned>, 1> flagsPerChannel; //Data members boost::shared_ptr<gpu::DeviceMemory> devA; @@ -89,20 +93,14 @@ namespace LOFAR // First (64 points) FFT std::auto_ptr<FFT_Kernel> firstFFT; + // Zeroing flagged samples + std::auto_ptr<ZeroingKernel> zeroingKernel; + // Delay compensation std::auto_ptr<DelayAndBandPassKernel> delayCompensationKernel; - // Second FFT-shift - std::auto_ptr<FFTShiftKernel> secondFFTShiftKernel; - - // Second (64 points) FFT - std::auto_ptr<FFT_Kernel> secondFFT; - // Bandpass correction and tranpose std::auto_ptr<BandPassCorrectionKernel> bandPassCorrectionKernel; - - // Flag that indicates if we need to perform a second FFT - bool doSecondFFT; }; } } diff --git a/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/CorrelatorStep.cc b/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/CorrelatorStep.cc index 4807054c07e53cf932a32c452cda909550bbe233..53139adc08151e6f4300013b67edb08621daa396 100644 --- a/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/CorrelatorStep.cc +++ b/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/CorrelatorStep.cc @@ -62,91 +62,21 @@ namespace LOFAR "FIR (correlator)")) : NULL), + intToFloat(ps.settings.correlator.nrChannels == 1 + ? new KernelFactory<IntToFloatKernel>(IntToFloatKernel::Parameters(ps, false, false)) + : NULL), + + zeroing(ZeroingKernel::Parameters(ps, + ps.settings.antennaFields.size(), + ps.settings.correlator.nrChannels, + "Zeroing (correlator)")), + delayAndBandPass(DelayAndBandPassKernel::Parameters(ps, true)), correlator(ps) { } - void CorrelatorStep::Flagger::convertFlagsToChannelFlags(Parset const &ps, - MultiDimArray<LOFAR::SparseSet<unsigned>, 1>const &inputFlags, - MultiDimArray<SparseSet<unsigned>, 1>& flagsPerChannel) - { - unsigned numberOfChannels = ps.settings.correlator.nrChannels; - unsigned log2NrChannels = log2(numberOfChannels); - //Convert the flags per sample to flags per channel - for (unsigned station = 0; station < ps.settings.correlator.stations.size(); station ++) - { - // get the flag ranges - const SparseSet<unsigned>::Ranges &ranges = inputFlags[station].getRanges(); - for (SparseSet<unsigned>::const_iterator it = ranges.begin(); - it != ranges.end(); it ++) - { - unsigned begin_idx; - unsigned end_idx; - if (numberOfChannels == 1) - { - // do nothing, just take the ranges as supplied - begin_idx = it->begin; - end_idx = std::min(static_cast<unsigned>(ps.settings.correlator.nrSamplesPerBlock), it->end); - } - else - { - // Never flag before the start of the time range - // use bitshift to divide to the number of channels. - // - // NR_TAPS is the width of the filter: they are - // absorbed by the FIR and thus should be excluded - // from the original flag set. - // - // The original flag set can span up to - // [0, nrSamplesPerBlock + nrChannels * (NR_TAPS - 1)) - // of which the FIRST (NR_TAPS - 1) samples belong to - // the previous block, and are used to initialise the - // FIR filter. Every sample i of the current block is thus - // actually at index (i + nrChannels * (NR_TAPS - 1)), - // or, after converting to channels, at index (i' + NR_TAPS - 1). - // - // At the same time, every sample is affected by - // the NR_TAPS-1 samples before it. So, any flagged - // sample in the input flags NR_TAPS samples in - // the channel. - begin_idx = std::max(0, - (signed) (it->begin >> log2NrChannels) - NR_TAPS + 1); - - // The min is needed, because flagging the last input - // samples would cause NR_TAPS subsequent samples to - // be flagged, which aren't necessarily part of this block. - end_idx = std::min(static_cast<unsigned>(ps.settings.correlator.nrSamplesPerBlock), - ((it->end - 1) >> log2NrChannels) + 1); - } - - // Now copy the transformed ranges to the channelflags - flagsPerChannel[station].include(begin_idx, end_idx); - } - } - } - - - void CorrelatorStep::Flagger::propagateFlags( - Parset const &parset, - MultiDimArray<LOFAR::SparseSet<unsigned>, 1>const &inputFlags, - SubbandProcOutputData::CorrelatedData &output) - { - // Object for storing transformed flags - MultiDimArray<SparseSet<unsigned>, 1> flagsPerChannel( - boost::extents[parset.settings.antennaFields.size()]); - - // First transform the flags to channel flags: taking in account - // reduced resolution in time and the size of the filter - convertFlagsToChannelFlags(parset, inputFlags, flagsPerChannel); - - // Calculate the number of flags per baseline and assign to - // output object. - calcNrValidSamples(parset, flagsPerChannel, output); - } - - namespace { // Return the baseline number for a pair of stations unsigned baseline(unsigned major, unsigned minor) @@ -305,10 +235,10 @@ namespace LOFAR : ProcessStep(parset, i_queue), correlatorPPF(ps.settings.correlator.nrChannels > 1), - devE(context, correlatorPPF - ? std::max(factories.correlator.bufferSize(CorrelatorKernel::INPUT_DATA), - factories.correlator.bufferSize(CorrelatorKernel::OUTPUT_DATA)) - : factories.correlator.bufferSize(CorrelatorKernel::OUTPUT_DATA)), + flagsWithHistorySamples(boost::extents[parset.settings.antennaFields.size()]), + flagsPerChannel(boost::extents[parset.settings.antennaFields.size()]), + devE(context, std::max(factories.correlator.bufferSize(CorrelatorKernel::INPUT_DATA), + factories.correlator.bufferSize(CorrelatorKernel::OUTPUT_DATA))), outputCounter(context, "output (correlator)"), integratedData(nrSubbandsPerSubbandProc) { @@ -321,11 +251,16 @@ namespace LOFAR // FFT: B -> E fftKernel = factories.fft->create(queue, *devB, devE); + } else { + // intToFloat: A -> E + intToFloatKernel = factories.intToFloat->create(queue, *devA, devE); } - // Delay and Bandpass: A/E -> B - delayAndBandPassKernel = std::auto_ptr<DelayAndBandPassKernel>(factories.delayAndBandPass.create(queue, - correlatorPPF ? devE : *devA, *devB)); + // Zeroing: E -> E + zeroingKernel = factories.zeroing.create(queue, devE, devE); + + // Delay and Bandpass: E -> B + delayAndBandPassKernel = std::auto_ptr<DelayAndBandPassKernel>(factories.delayAndBandPass.create(queue, devE, *devB)); // Correlator: B -> E correlatorKernel = std::auto_ptr<CorrelatorKernel>(factories.correlator.create(queue, @@ -360,6 +295,27 @@ namespace LOFAR firFilterKernel->enqueue(input.blockID, input.blockID.subbandProcSubbandIdx); fftKernel->enqueue(input.blockID); + + // Process flags enough to determine which data to zero + flagsWithHistorySamples = input.inputFlags; + + firFilterKernel->prefixHistoryFlags( + flagsWithHistorySamples, input.blockID.subbandProcSubbandIdx); + + Cobalt::Flagger::convertFlagsToChannelFlags( + flagsWithHistorySamples, + flagsPerChannel, + ps.settings.blockSize, + ps.settings.correlator.nrChannels, + NR_TAPS - 1); + + // Zero the output of each FFT that had flagged input samples + zeroingKernel->enqueue(input.blockID, flagsPerChannel); + } else { + intToFloatKernel->enqueue(input.blockID); + + // Zero the flagged samples + zeroingKernel->enqueue(input.blockID, input.inputFlags); } // Even if we skip delay compensation and bandpass correction (rare), run @@ -385,16 +341,31 @@ namespace LOFAR void CorrelatorStep::processCPU(const SubbandProcInputData &input, SubbandProcOutputData &output) { // Propagate the flags. - MultiDimArray<LOFAR::SparseSet<unsigned>, 1> flags = input.inputFlags; if (correlatorPPF) { + flagsWithHistorySamples = input.inputFlags; + // Put the history flags in front of the sample flags, - // because Flagger::propagateFlags expects it that way. firFilterKernel->prefixHistoryFlags( - flags, input.blockID.subbandProcSubbandIdx); + flagsWithHistorySamples, input.blockID.subbandProcSubbandIdx); + + // Transform the flags to channel flags: taking in account + // reduced resolution in time and the size of the filter + Cobalt::Flagger::convertFlagsToChannelFlags( + flagsWithHistorySamples, + flagsPerChannel, + ps.settings.blockSize, + ps.settings.correlator.nrChannels, + NR_TAPS - 1); + + // Calculate the number of flags per baseline and assign to + // output object. + Flagger::calcNrValidSamples(ps, flagsPerChannel, output.correlatedData); + } else { + // Calculate the number of flags per baseline and assign to + // output object. + Flagger::calcNrValidSamples(ps, input.inputFlags, output.correlatedData); } - - Flagger::propagateFlags(ps, flags, output.correlatedData); } diff --git a/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/CorrelatorStep.h b/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/CorrelatorStep.h index 23be8fe062ee1eac057bfc370c65cfe849b529ab..4e4c0daba15cfa85d138a8e5948d52b04ee1c34a 100644 --- a/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/CorrelatorStep.h +++ b/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/CorrelatorStep.h @@ -40,10 +40,13 @@ #include "ProcessStep.h" #include <GPUProc/PerformanceCounter.h> +#include <GPUProc/Flagger.h> #include <GPUProc/KernelFactory.h> #include <GPUProc/Kernels/DelayAndBandPassKernel.h> #include <GPUProc/Kernels/FIR_FilterKernel.h> #include <GPUProc/Kernels/FFT_Kernel.h> +#include <GPUProc/Kernels/IntToFloatKernel.h> +#include <GPUProc/Kernels/ZeroingKernel.h> #include <GPUProc/Kernels/CorrelatorKernel.h> @@ -59,6 +62,9 @@ namespace LOFAR SmartPtr< KernelFactory<FFT_Kernel> > fft; SmartPtr< KernelFactory<FIR_FilterKernel> > firFilter; + SmartPtr< KernelFactory<IntToFloatKernel> > intToFloat; + + KernelFactory<ZeroingKernel> zeroing; KernelFactory<DelayAndBandPassKernel> delayAndBandPass; @@ -86,20 +92,12 @@ namespace LOFAR // \c propagateFlags can be called parallel to the kernels. // After the data is copied from the the shared buffer // \c applyNrValidSamples can be used to weight the visibilities - class Flagger + class Flagger: public Cobalt::Flagger { public: - // 1. Convert input flags to channel flags, calculate the amount flagged - // samples and save this in output - static void propagateFlags(Parset const & parset, - MultiDimArray<LOFAR::SparseSet<unsigned>, 1>const &inputFlags, - SubbandProcOutputData::CorrelatedData &output); - // 1.1 Convert the flags per station to channel flags, change time scale // if nchannel > 1 - static void convertFlagsToChannelFlags(Parset const &ps, - MultiDimArray<SparseSet<unsigned>, 1> const &inputFlags, - MultiDimArray<SparseSet<unsigned>, 1> &flagsPerChannel); + // (Uses convertFlagsToChannelFlags) // 2. Calculate the weight based on the number of flags and apply this // weighting to all output values @@ -132,6 +130,12 @@ namespace LOFAR private: const bool correlatorPPF; + // Preallocated flags for input data, to prefix with FIR history -- used locally + MultiDimArray<SparseSet<unsigned>, 1> flagsWithHistorySamples; + + // Preallocated flags for FFT-ed data -- used locally + MultiDimArray<SparseSet<unsigned>, 1> flagsPerChannel; + //Data members boost::shared_ptr<gpu::DeviceMemory> devA; boost::shared_ptr<gpu::DeviceMemory> devB; @@ -147,6 +151,12 @@ namespace LOFAR // FFT SmartPtr<FFT_Kernel> fftKernel; + // IntToFloat (in case of no FFT) + SmartPtr<IntToFloatKernel> intToFloatKernel; + + // Zeroing + SmartPtr<ZeroingKernel> zeroingKernel; + // Delay and Bandpass std::auto_ptr<DelayAndBandPassKernel> delayAndBandPassKernel; diff --git a/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/SubbandProcInputData.cc b/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/SubbandProcInputData.cc index fa4e3c752050e3c59c80392b68b914ce385a1f03..4a01bdca66162ed159787dfa18903a2d01bb8bb4 100644 --- a/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/SubbandProcInputData.cc +++ b/RTCP/Cobalt/GPUProc/src/cuda/SubbandProcs/SubbandProcInputData.cc @@ -77,7 +77,8 @@ namespace LOFAR // extract and apply the flags inputFlags[station] = metaData.flags; - flagInputSamples(station, metaData); + // NOTE: We do not zero flagged samples here anymore, + // as we are using the ZeroingKernel to do so. // extract and assign the delays for the station beams @@ -115,29 +116,6 @@ namespace LOFAR tabDelays[SAP][station][tab] = 0.0; } } - - - // flag the input samples. - void SubbandProcInputData::flagInputSamples(unsigned station, - const SubbandMetaData& metaData) - { - - // Get the size of a sample in bytes. - size_t sizeof_sample = sizeof *inputSamples.origin(); - - // Calculate the number elements to skip when striding over the second - // dimension of inputSamples. - size_t stride = inputSamples[station][0].num_elements(); - - // Zero the bytes in the input data for the flagged ranges. - for(SparseSet<unsigned>::const_iterator it = metaData.flags.getRanges().begin(); - it != metaData.flags.getRanges().end(); ++it) - { - void *offset = inputSamples[station][it->begin].origin(); - size_t size = stride * (it->end - it->begin) * sizeof_sample; - memset(offset, 0, size); - } - } } } diff --git a/RTCP/Cobalt/GPUProc/src/gpu_load.cc b/RTCP/Cobalt/GPUProc/src/gpu_load.cc index b1721d8e0f19aa77a39ffb64ea236bef1fc97f79..bd77798a6e9d50bae733cf4675f578c73c785ebe 100644 --- a/RTCP/Cobalt/GPUProc/src/gpu_load.cc +++ b/RTCP/Cobalt/GPUProc/src/gpu_load.cc @@ -113,18 +113,21 @@ int main(int argc, char **argv) { // Initialize subbands partitioning administration (struct BlockID). We only // do the 1st block of whatever. - in.blockID.block = 0; - in.blockID.globalSubbandIdx = 0; - in.blockID.localSubbandIdx = 0; - in.blockID.subbandProcSubbandIdx = 0; - - size_t block(0); - LOG_INFO("Processing ..."); - for (block = 0; block < nrBlocksPerIntegration && !out.emit_correlatedData; block++) { - LOG_DEBUG_STR("Processing block #" << block); - cwq.processSubband(in, out); - cwq.postprocessSubband(out); + for(size_t iteration = 0; iteration < 10; iteration++) { + in.blockID.block = 0; + in.blockID.globalSubbandIdx = 0; + in.blockID.localSubbandIdx = 0; + in.blockID.subbandProcSubbandIdx = 0; + out.emit_correlatedData = false; + + size_t block(0); + + for (block = 0; block < nrBlocksPerIntegration && !out.emit_correlatedData; block++) { + LOG_DEBUG_STR("Processing block #" << block); + cwq.processSubband(in, out); + cwq.postprocessSubband(out); + } } return 0; diff --git a/RTCP/Cobalt/GPUProc/src/scripts/cobalt_functions.sh b/RTCP/Cobalt/GPUProc/src/scripts/cobalt_functions.sh index f570a5b3d816fe8cca7a4ec4b82f72fd62eb6cdd..bc4981a6738521f09b6931e988b39a787f06f72a 100755 --- a/RTCP/Cobalt/GPUProc/src/scripts/cobalt_functions.sh +++ b/RTCP/Cobalt/GPUProc/src/scripts/cobalt_functions.sh @@ -54,29 +54,57 @@ function read_cluster_model { HEADNODE=head.cep4.control.lofar SLURM_PARTITION=cpu SLURM_RESERVATION=cobalt - # Get the nodes in the cobalt reservation. The reservation must be active. - RESVNODES=$(ssh $HEADNODE scontrol show res -o $SLURM_RESERVATION | perl -n -e 'm/Nodes=(.*?) .*State=ACTIVE/ ? print STDOUT $1 : die "No active cobalt reservation found"') - if [ $? -eq 0 ]; then - echo "Active reservation '$SLURM_RESERVATION' found, get online nodes in the reservation" - SINFO_FLAGS="--responding --states=idle,mixed,alloc,reserved -n $RESVNODES" - else - echo "WARNING: No reservation '$SLURM_RESERVATION' found, defaulting to all online nodes in partition '$SLURM_PARTITION'" - SINFO_FLAGS="--responding --states=idle,mixed,alloc --partition=$SLURM_PARTITION" + RESVCACHE=$LOFARROOT/var/run/slurmresv.cache + COMPCACHE=$LOFARROOT/var/run/compnodes.cache + + # Get the reserved CEP4 nodes for output writing. Try three methods in order of precedence: + # 1. Get nodes from the cobalt slurm reservation (must have state active) + # 2. Read a cache file with the node list + # 3. Default to a particular set of nodes + echo "Reading the slurm '$SLURM_RESERVATION' reservation.." + RESVNODES=$(ssh $HEADNODE scontrol show res -o $SLURM_RESERVATION | \ + perl -n -e 'm/Nodes=(.*?) .*State=ACTIVE/ ? print STDOUT $1 : die "WARNING: No active reservation found\n"') + if [ -n "$RESVNODES" ]; then + # save in cache + cat <<-CAT > $RESVCACHE + echo "Cache created at $(date)" + RESVNODES="$RESVNODES" + CAT + elif [ -s $RESVCACHE ]; then + echo "Reading the cache file '$RESVCACHE'" + source $RESVCACHE + else + echo "WARNING: No reserved nodes and no cache file found, using defaults" + RESVNODES="cpu[40-44]" fi + echo "Reserved nodes: $RESVNODES" + + # Checking online status: try three methods in order of precedence: + # 1. Check slurm for the node status (sinfo) + # 2. Read a cache file with the node list + # 3. Default to a particular set of nodes + echo "Checking online status" + SINFO_FLAGS="--responding --states=idle,mixed,alloc,reserved -n $RESVNODES" COMPUTENODES="$(ssh $HEADNODE sinfo --format=%n.cep4.infiniband.lofar,%T --noheader --sort=N $SINFO_FLAGS | fgrep -v ,draining | cut -f1 -d,)" - # OLD COMPUTENODES="`ssh $HEADNODE sinfo --responding --states=idle,mixed,alloc --format=%n.cep4.infiniband.lofar,%T --noheader --partition=$SLURM_PARTITION --sort=N | fgrep -v ,draining | cut -f1 -d,`" - if [ -z "$COMPUTENODES" ]; then - echo "ERROR: Could not obtain list of available CEP4 nodes. Defaulting to all." - COMPUTENODES="`seq -f "cpu%02.0f.cep4.infiniband.lofar" 1 47`" + if [ -n "$COMPUTENODES" ]; then + # save in cache + cat <<-CAT > $COMPCACHE + echo "Cache created at $(date)" + COMPUTENODES="$COMPUTENODES" + CAT + elif [ -s $COMPCACHE ]; then + echo "Reading the cache file '$COMPCACHE'" + source $COMPCACHE + else + echo "WARNING: No active nodes and no cache file found, using defaults" + COMPUTENODES="`seq -f "cpu%02.0f.cep4.infiniband.lofar" 40 44`" fi + echo -e "Nodes used for output writing:\n${COMPUTENODES}" - GLOBALFS_DIR=/data - - #SLURM=true - SLURM=false # Don't use SLURM for now, let's get it working without it first GLOBALFS=true - DOCKER=false # disabled as outputproc is too slow on docker 1.9.1 (#9522) - + GLOBALFS_DIR=/data + SLURM=false # Don't use SLURM for now, let's get it working without it first + DOCKER=false # disabled as outputproc is too slow on docker 1.9.1 (#9522) OUTPUTPROC_ROOT="`echo '/opt/outputproc-${LOFAR_TAG}' | docker-template`" ;; DRAGNET) diff --git a/RTCP/Cobalt/GPUProc/test/Kernels/CMakeLists.txt b/RTCP/Cobalt/GPUProc/test/Kernels/CMakeLists.txt index f26a7e0014098581dbf51ca0b889cdcdec031388..b57f2cf655f81ab8357305828dc74b88a7b0b568 100644 --- a/RTCP/Cobalt/GPUProc/test/Kernels/CMakeLists.txt +++ b/RTCP/Cobalt/GPUProc/test/Kernels/CMakeLists.txt @@ -32,6 +32,7 @@ if(UNITTEST++_FOUND AND BUILD_TESTING) lofar_add_test(tFFT_Kernel tFFT_Kernel.cc) lofar_add_test(tFFTShiftKernel tFFTShiftKernel.cc) lofar_add_test(tFIR_FilterKernel tFIR_FilterKernel.cc) + lofar_add_test(tZeroingKernel tZeroingKernel.cc) lofar_add_test(tKernelFunctions tKernelFunctions.cc) lofar_add_test(tCoherentStokesKernel tCoherentStokesKernel.cc KernelTestHelpers.cc) @@ -44,6 +45,7 @@ if(UNITTEST++_FOUND AND BUILD_TESTING) tFFTShiftKernel tKernelFunctions tCoherentStokesKernel + tZeroingKernel PROPERTIES ENVIRONMENT "LOFARROOT=${PACKAGE_SOURCE_DIR}" ) # This test uses quite a lot of memory, so force it to run serially. diff --git a/RTCP/Cobalt/GPUProc/test/Kernels/tIntToFloatKernel.cc b/RTCP/Cobalt/GPUProc/test/Kernels/tIntToFloatKernel.cc index 1736f9c0aee34b7547d224421ed7239b46a285eb..b7f6e4a8fa45f127f191c7055428d617a2fb4e5c 100644 --- a/RTCP/Cobalt/GPUProc/test/Kernels/tIntToFloatKernel.cc +++ b/RTCP/Cobalt/GPUProc/test/Kernels/tIntToFloatKernel.cc @@ -49,7 +49,7 @@ int main() { gpu::Stream stream(ctx); Parset ps("tIntToFloatKernel.in_parset"); - KernelFactory<IntToFloatKernel> factory(ps); + KernelFactory<IntToFloatKernel> factory(IntToFloatKernel::Parameters(ps, true, true)); size_t nSampledData = factory.bufferSize(IntToFloatKernel::INPUT_DATA) / sizeof(char); size_t sizeSampledData = nSampledData * sizeof(char); diff --git a/RTCP/Cobalt/GPUProc/test/Kernels/tZeroingKernel.cc b/RTCP/Cobalt/GPUProc/test/Kernels/tZeroingKernel.cc new file mode 100644 index 0000000000000000000000000000000000000000..7179ddf2da920930d81881462e8689f549ab69fb --- /dev/null +++ b/RTCP/Cobalt/GPUProc/test/Kernels/tZeroingKernel.cc @@ -0,0 +1,293 @@ +//# tZeroingKernel.cc: test ZeroingKernel class +//# +//# Copyright (C) 2013 ASTRON (Netherlands Institute for Radio Astronomy) +//# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +//# +//# This file is part of the LOFAR software suite. +//# The LOFAR software suite is free software: you can redistribute it and/or +//# modify it under the terms of the GNU General Public License as published +//# by the Free Software Foundation, either version 3 of the License, or +//# (at your option) any later version. +//# +//# The LOFAR software suite is distributed in the hope that it will be useful, +//# but WITHOUT ANY WARRANTY; without even the implied warranty of +//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +//# GNU General Public License for more details. +//# +//# You should have received a copy of the GNU General Public License along +//# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +//# +//# $Id$ + +#include <lofar_config.h> + + +#include <lofar_config.h> + +#include <GPUProc/Kernels/ZeroingKernel.h> +#include <GPUProc/MultiDimArrayHostBuffer.h> +#include <CoInterface/BlockID.h> +#include <CoInterface/SubbandMetaData.h> +#include <CoInterface/Config.h> +#include <CoInterface/Parset.h> +#include <Common/LofarLogger.h> + +#include <UnitTest++.h> +#include <boost/format.hpp> +#include <boost/lexical_cast.hpp> +#include <boost/scoped_ptr.hpp> +#include <iostream> +#include <iomanip> +#include <vector> + +using namespace std; +using namespace boost; +using namespace LOFAR; +using namespace LOFAR::Cobalt; + +typedef complex<float> fcomplex; + +// Fixture for testing correct translation of parset values +struct ParsetSUT +{ + size_t nrChannels, nrStations, nrSamples; + + Parset parset; + + ParsetSUT(size_t nrStations, size_t nrChannels, + size_t nrSamples) + : + nrChannels(nrChannels), + nrStations(nrStations), + nrSamples(nrSamples) + { + // 4 for number of stokes + parset.add("Observation.DataProducts.Output_Correlated.enabled", "true"); + parset.add("Cobalt.Correlator.nrChannelsPerSubband", lexical_cast<string>(nrChannels)); + parset.add("Observation.VirtualInstrument.stationList", + str(format("[%d*RS000]") % nrStations)); + parset.add("Observation.antennaSet", "LBA_INNER"); + parset.add("Observation.rspBoardList", "[0]"); + parset.add("Observation.rspSlotList", "[0]"); + parset.add("Cobalt.blockSize", + lexical_cast<string>(nrSamples)); + parset.add("Observation.nrBeams", "1"); + parset.add("Observation.Beam[0].subbandList", "[0]"); + parset.add("Observation.DataProducts.Output_Correlated.filenames", "[dummy.raw]"); + parset.add("Observation.DataProducts.Output_Correlated.locations", "[:.]"); + //parset.add(""); //ps.settings.beamFormer.nrDelayCompensationChannels + parset.updateSettings(); + + } +}; + + +struct SUTWrapper : ParsetSUT +{ + gpu::Device device; + gpu::Context context; + gpu::Stream stream; + size_t nrSTABs; + KernelFactory<ZeroingKernel> factory; + MultiDimArrayHostBuffer<fcomplex, 4> hData; + MultiDimArrayHostBuffer<fcomplex, 4> hRefOutput; + gpu::DeviceMemory deviceMemory; + scoped_ptr<ZeroingKernel> kernel; + + SUTWrapper(size_t nrStations, size_t nrChannels, size_t nrSamples) : + ParsetSUT(nrStations, nrChannels, nrSamples), + device(gpu::Platform().devices()[0]), + context(device), + stream(context), + nrSTABs(parset.settings.antennaFields.size()), + factory(ZeroingKernel::Parameters(parset, nrStations, nrChannels)), + hData( + boost::extents[nrStations][NR_POLARIZATIONS][nrSamples / nrChannels][nrChannels], + context), + hRefOutput( + boost::extents[nrStations][NR_POLARIZATIONS][nrSamples / nrChannels][nrChannels], + context), + deviceMemory(context, factory.bufferSize(ZeroingKernel::INPUT_DATA)), + kernel(factory.create(stream, deviceMemory, deviceMemory)) + { + initializeHostBuffers(); + } + + + // Initialize all the elements of the input host buffer to (1, 2) + void initializeHostBuffers() + { + cout << "Kernel buffersize set to: " << factory.bufferSize( + ZeroingKernel::INPUT_DATA) << endl; + cout << "\nInitializing host buffers..." << endl + << " buffers.input.size() = " << setw(7) << deviceMemory.size() << endl + << " hData.size() = " << setw(7) << hData.size() << endl + << " buffers.output.size() = " << setw(7) << deviceMemory.size() + << endl; + CHECK_EQUAL(deviceMemory.size(), hData.size()); + fill(hData.data(), hData.data() + hData.num_elements(), + fcomplex(1.0f, 2.0f)); + fill(hRefOutput.data(), hRefOutput.data() + hRefOutput.num_elements(), + fcomplex(1.0f, 2.0f)); + } + + void runKernel(const MultiDimArray<SparseSet<unsigned>, 1> &channelFlags) + { + // Dummy BlockID + BlockID blockId; + // Copy input data from host- to device buffer synchronously + stream.writeBuffer(deviceMemory, hData, true); + // Launch the kernel + kernel->enqueue(blockId, channelFlags); + // Copy output data from device- to host buffer synchronously + stream.readBuffer(hData, deviceMemory, true); + } + +}; + +// Test if we can succesfully create all necessary classes and run the kernel +TEST(BasicRun) +{ + cout << "running test: BasicRun" << endl; + SUTWrapper sut(2, 1, 4096); + MultiDimArray<SparseSet<unsigned>, 1> channelFlags(boost::extents[sut.nrSTABs]); + + sut.runKernel(channelFlags); +} + +// If we flag nothing, nothing should change +TEST(NothingFlaggedTest) +{ + cout << "running test: NothingFlaggedTest" << endl; + SUTWrapper sut(5, 64, 1024); + MultiDimArray<SparseSet<unsigned>, 1> channelFlags(boost::extents[sut.nrSTABs]); + + // run kernel + cout << "Running kernel" << endl; + sut.runKernel(channelFlags); + + // compare output + cout << "Comparing output" << endl; + CHECK_ARRAY_EQUAL(sut.hRefOutput.data(), + sut.hData.data(), + sut.hData.num_elements()); + cout << "Comparing output: done" << endl; +} + +// If we flag one sample, it should zero in the output +TEST(SingleFlagTest) +{ + cout << "running test: SingleFlagTest" << endl; + + SUTWrapper sut(1, 64, 1024); + MultiDimArray<SparseSet<unsigned>, 1> channelFlags(boost::extents[sut.nrSTABs]); + + // flag a sample + cout << "Flagging 1 sample" << endl; + channelFlags[0].include(13); + + // also zero reference output for flagged sample + for (unsigned pol = 0; pol < NR_POLARIZATIONS; pol++) + for (unsigned c = 0; c < sut.nrChannels; c++) + sut.hRefOutput[0][pol][13][c] = 0.0f; + + // run kernel + cout << "Running kernel" << endl; + sut.runKernel(channelFlags); + + // compare output + cout << "Comparing output" << endl; + CHECK_ARRAY_EQUAL(sut.hRefOutput.data(), + sut.hData.data(), + sut.hData.num_elements()); + cout << "Comparing output: done" << endl; +} + +// Flag patterns of input and check if the kernel zeroes the correct samples +TEST(PatternsTest) +{ + cout << "running test: PatternsTest" << endl; + + size_t nrStations[] = { 12, 53, 66, 77, 80 }; + size_t nrSamples = 16384; + size_t nrChannels[] = { 1, 16, 64, 256 }; + + for (unsigned st = 0; st < sizeof nrStations / sizeof nrStations[0]; ++st) + for (unsigned ch = 0; ch < sizeof nrChannels / sizeof nrChannels[0]; ++ch) + { + cout << "******* pattern testing stations: " << nrStations[st] + << " channels: " << nrChannels[ch] << endl; + + SUTWrapper sut(nrStations[st], nrChannels[ch], nrSamples); + MultiDimArray<SparseSet<unsigned>, 1> channelFlags(boost::extents[sut.nrSTABs]); + + // flag samples (with different patterns per station) + cout << "Flagging samples" << endl; + for (unsigned st_z = 0; st_z < nrStations[st]; st_z++) + for (unsigned sample_z = st_z; sample_z < nrSamples/nrChannels[ch]; sample_z += st_z + 1) { + channelFlags[st_z].include(sample_z); + + // also zero reference output + for (unsigned pol = 0; pol < NR_POLARIZATIONS; pol++) + for (unsigned c = 0; c < sut.nrChannels; c++) + sut.hRefOutput[st_z][pol][sample_z][c] = 0.0f; + } + + // run kernel + cout << "Running kernel" << endl; + sut.runKernel(channelFlags); + + // compare output + cout << "Comparing output" << endl; + CHECK_ARRAY_EQUAL(sut.hRefOutput.data(), + sut.hData.data(), + sut.hData.num_elements()); + cout << "Comparing output: done" << endl; + } +} + +// Flag all samples, and test GPU performance +TEST(PerformanceTest) +{ + cout << "running test: PerformanceTest" << endl; + + size_t nrStations = 80; + size_t nrSamples = 196608; + size_t nrChannels[] = { 1, 16, 64, 256 }; + + for (unsigned ch = 0; ch < sizeof nrChannels / sizeof nrChannels[0] ; ++ch) + { + cout << "******* performance testing stations: " << nrStations + << " channels: " << nrChannels[ch] << endl; + + SUTWrapper sut(nrStations, nrChannels[ch], nrSamples); + MultiDimArray<SparseSet<unsigned>, 1> channelFlags(boost::extents[sut.nrSTABs]); + + // flag all samples (worst case) + cout << "Flagging samples" << endl; + for (unsigned st_z = 0; st_z < nrStations; st_z++) + channelFlags[st_z].include(0, nrSamples / nrChannels[ch]); + + // run kernel + cout << "Running kernel" << endl; + for (int n = 0; n < 10; ++n) + sut.runKernel(channelFlags); + } +} + + +int main() +{ + INIT_LOGGER("tZeroingKernel"); + + try { + gpu::Platform pf; + } + catch (gpu::GPUException&) { + cerr << "No GPU device(s) found. Skipping tests." << endl; + return 3; + } + return UnitTest::RunAllTests() == 0 ? 0 : 1; + +} + diff --git a/RTCP/Cobalt/GPUProc/test/Kernels/tZeroingKernel.sh b/RTCP/Cobalt/GPUProc/test/Kernels/tZeroingKernel.sh new file mode 100755 index 0000000000000000000000000000000000000000..1b06659b6bad332519888659b0a76bffb26a9a7b --- /dev/null +++ b/RTCP/Cobalt/GPUProc/test/Kernels/tZeroingKernel.sh @@ -0,0 +1,3 @@ +#!/bin/bash +./runctest.sh tZeroingKernel + diff --git a/RTCP/Cobalt/GPUProc/test/Storage/tStorageProcesses.queue b/RTCP/Cobalt/GPUProc/test/Storage/tStorageProcesses.queue index 9b34b8552cfa076fc3f6bbba1df4442d72b4376f..3059e070fc62184337122657931702d02099c60c 100644 --- a/RTCP/Cobalt/GPUProc/test/Storage/tStorageProcesses.queue +++ b/RTCP/Cobalt/GPUProc/test/Storage/tStorageProcesses.queue @@ -32,6 +32,8 @@ Observation.DataProducts.Output_Correlated_[0].percentageWritten=0 Observation.DataProducts.Output_Correlated_[0].size=0 Observation.DataProducts.Output_Correlated_[0].startTime=2015-01-01 00:00:00 Observation.DataProducts.Output_Correlated_[0].stationSubband=0 +Observation.DataProducts.Output_Correlated_[0].storageWriter=LOFAR +Observation.DataProducts.Output_Correlated_[0].storageWriterVersion=3 Observation.DataProducts.Output_Correlated_[0].subband=0 </payload> </message> diff --git a/RTCP/Cobalt/GPUProc/test/SubbandProcs/CMakeLists.txt b/RTCP/Cobalt/GPUProc/test/SubbandProcs/CMakeLists.txt index c906aeb43a7312c84ad702ba7c63d033cc73a09c..4a693506fbf7af9f111e17e629077a78c26d2de3 100644 --- a/RTCP/Cobalt/GPUProc/test/SubbandProcs/CMakeLists.txt +++ b/RTCP/Cobalt/GPUProc/test/SubbandProcs/CMakeLists.txt @@ -5,14 +5,13 @@ include(LofarCTest) if(UNITTEST++_FOUND) lofar_add_test(tCorrelatorSubbandProc tCorrelatorSubbandProc.cc) lofar_add_test(tCorrelatorStep tCorrelatorStep.cc) + lofar_add_test(tCorrelatorSubbandProcProcessSb tCorrelatorSubbandProcProcessSb.cc) endif() # This test is instable. Added to issue tracker: https://support.astron.nl/lofar_issuetracker/issues/5807 # Understand the proble lofar_add_test(tBeamFormerSubbandProcProcessSb tBeamFormerSubbandProcProcessSb.cc) -lofar_add_test(tCorrelatorSubbandProcProcessSb - tCorrelatorSubbandProcProcessSb.cc) lofar_add_test(tCoherentStokesBeamFormerSubbandProcProcessSb tCoherentStokesBeamFormerSubbandProcProcessSb.cc ../Kernels/KernelTestHelpers.cc) lofar_add_test(tFlysEyeBeamFormerSubbandProcProcessSb diff --git a/RTCP/Cobalt/GPUProc/test/SubbandProcs/tBeamFormerSubbandProcProcessSb.cc b/RTCP/Cobalt/GPUProc/test/SubbandProcs/tBeamFormerSubbandProcProcessSb.cc index 51bafeb59c04f534db6cc0c8d4193602493c4f8c..b3df8475e33705d1a39f405574af16ae8f47897c 100644 --- a/RTCP/Cobalt/GPUProc/test/SubbandProcs/tBeamFormerSubbandProcProcessSb.cc +++ b/RTCP/Cobalt/GPUProc/test/SubbandProcs/tBeamFormerSubbandProcProcessSb.cc @@ -86,8 +86,7 @@ int main() { const size_t nrBitsPerSample = ps.settings.nrBitsPerSample; const size_t nrBytesPerComplexSample = ps.nrBytesPerComplexSample(); - const unsigned fft1Size = ps.settings.beamFormer.nrDelayCompensationChannels; - const unsigned fft2Size = ps.settings.beamFormer.nrHighResolutionChannels / fft1Size; + const unsigned fftSize = ps.settings.beamFormer.nrDelayCompensationChannels; // We only support 8-bit or 16-bit input samples ASSERT(nrBitsPerSample == 8 || nrBitsPerSample == 16); @@ -105,8 +104,7 @@ int main() { "\n nrSamplesPerSubband = " << nrSamplesPerSubband << "\n nrBitsPerSample = " << nrBitsPerSample << "\n nrBytesPerComplexSample = " << nrBytesPerComplexSample << - "\n fft1Size = " << fft1Size << - "\n fft2Size = " << fft2Size); + "\n fftSize = " << fftSize); // Create very simple kernel programs, with predictable output. Skip as much // as possible. Nr of channels/sb from the parset is 1, so the PPF will not @@ -186,7 +184,7 @@ int main() { // Coherent Stokes takes the stokes of the sums of all fields (stokes(sum(x))). // We can calculate the expected output values, since we're supplying a // complex sine/cosine input signal. We only have Stokes-I, so the output - // should be: nrStations * (amp * scaleFactor * fft1Size * fft2Size) ** 2 + // should be: nrStations * (amp * scaleFactor * fftSize) ** 2 // - amp is set to the maximum possible value for the bit-mode: // i.e. 127 for 8-bit and 32767 for 16-bit mode // - scaleFactor is the scaleFactor applied by the IntToFloat kernel. @@ -195,7 +193,7 @@ int main() { // - for 16-bit input: (2 * 32767 * 1 * 64 * 64)^2 = 72053196058525696 // - for 8-bit input: (2 * 127 * 16 * 64 * 64)^2 = 277094110068736 - float coh_outVal = sqr(nrBFStations * amplitude * scaleFactor * fft1Size * fft2Size); + float coh_outVal = sqr(nrBFStations * amplitude * scaleFactor * fftSize); cout << "coherent outVal = " << coh_outVal << endl; for (size_t t = 0; t < ps.settings.beamFormer.coherentSettings.nrSamples; t++) @@ -210,7 +208,7 @@ int main() { // Incoherent Stokes sums the stokes of each field (sum(stokes(x))). // We can calculate the expected output values, since we're supplying a // complex sine/cosine input signal. We only have Stokes-I, so the output - // should be: nrStation * (amp * scaleFactor * fft1Size * fft2Size)^2 + // should be: nrStation * (amp * scaleFactor * fftSize)^2 // - amp is set to the maximum possible value for the bit-mode: // i.e. 127 for 8-bit and 32767 for 16-bit mode // - scaleFactor is the scaleFactor applied by the IntToFloat kernel. @@ -219,7 +217,7 @@ int main() { // - for 16-bit input: 2 * (32767 * 1 * 64 * 64)^2 = 36026598029262848 // - for 8-bit input: 2 * (127 * 16 * 64 * 64)^2 = 138547055034368 - float incoh_outVal = nrBFStations * sqr(amplitude * scaleFactor * fft1Size * fft2Size); + float incoh_outVal = nrBFStations * sqr(amplitude * scaleFactor * fftSize); cout << "incoherent outVal = " << incoh_outVal << endl; for (size_t t = 0; t < ps.settings.beamFormer.incoherentSettings.nrSamples; t++) diff --git a/RTCP/Cobalt/GPUProc/test/SubbandProcs/tCoherentStokesBeamFormerSubbandProcProcessSb.cc b/RTCP/Cobalt/GPUProc/test/SubbandProcs/tCoherentStokesBeamFormerSubbandProcProcessSb.cc index 377400a75b973c35a71f8bbd317b4f2388c02473..3220febe8e078307ae10cb9d5038da035d42ad8f 100644 --- a/RTCP/Cobalt/GPUProc/test/SubbandProcs/tCoherentStokesBeamFormerSubbandProcProcessSb.cc +++ b/RTCP/Cobalt/GPUProc/test/SubbandProcs/tCoherentStokesBeamFormerSubbandProcProcessSb.cc @@ -99,8 +99,7 @@ int main(/*int argc, char *argv[]*/) { const size_t nrBitsPerSample = ps.settings.nrBitsPerSample; const size_t nrBytesPerComplexSample = ps.nrBytesPerComplexSample(); - const unsigned fft1Size = ps.settings.beamFormer.nrDelayCompensationChannels; - const unsigned fft2Size = ps.settings.beamFormer.nrHighResolutionChannels / fft1Size; + const unsigned fftSize = ps.settings.beamFormer.nrDelayCompensationChannels; // We only support 8-bit or 16-bit input samples ASSERT(nrBitsPerSample == 8 || nrBitsPerSample == 16); @@ -118,8 +117,7 @@ int main(/*int argc, char *argv[]*/) { "\n nrSamplesPerSubband = " << nrSamplesPerSubband << "\n nrBitsPerSample = " << nrBitsPerSample << "\n nrBytesPerComplexSample = " << nrBytesPerComplexSample << - "\n fft1Size = " << fft1Size << - "\n fft2Size = " << fft2Size); + "\n fftSize = " << fftSize); // Output array sizes const size_t nrStokes = ps.settings.beamFormer.coherentSettings.nrStokes; @@ -210,7 +208,7 @@ int main(/*int argc, char *argv[]*/) { // We can calculate the expected output values, since we're supplying a // complex sine/cosine input signal. We only have Stokes-I, so the output - // should be: (nrStations * amp * scaleFactor * fft1Size * fft2Size) ** 2 + // should be: (nrStations * amp * scaleFactor * fftSize) ** 2 // - amp is set to the maximum possible value for the bit-mode: // i.e. 127 for 8-bit and 32767 for 16-bit mode // - scaleFactor is the scaleFactor applied by the IntToFloat kernel. @@ -220,8 +218,8 @@ int main(/*int argc, char *argv[]*/) { // - for 8-bit input: (5 * 127 * 16 * 64 * 64) ** 2 = 1731838187929600 float outVal = - (nrStations * amplitude * scaleFactor * fft1Size * fft2Size) * - (nrStations * amplitude * scaleFactor * fft1Size * fft2Size); + (nrStations * amplitude * scaleFactor * fftSize) * + (nrStations * amplitude * scaleFactor * fftSize); cout << "outVal = " << setprecision(12) << outVal << endl; // Skip output validation when started with commandline parsed parameters! diff --git a/RTCP/Cobalt/GPUProc/test/SubbandProcs/tCorrelatorStep.cc b/RTCP/Cobalt/GPUProc/test/SubbandProcs/tCorrelatorStep.cc index 9cd96a11db0d31c4b1c5a11526a49b8cb11be868..9f435aab1e240a3bf4070a9f9d9621571f3ae2a0 100644 --- a/RTCP/Cobalt/GPUProc/test/SubbandProcs/tCorrelatorStep.cc +++ b/RTCP/Cobalt/GPUProc/test/SubbandProcs/tCorrelatorStep.cc @@ -76,7 +76,13 @@ TEST(convertFlagsToChannelFlags) boost::extents[parset.settings.antennaFields.size()]); // ****** perform the translation - CorrelatorStep::Flagger::convertFlagsToChannelFlags(parset, inputFlags, flagsPerChannel); + CorrelatorStep::Flagger::convertFlagsToChannelFlags( + inputFlags, + flagsPerChannel, + parset.settings.blockSize, + parset.settings.correlator.nrChannels, + NR_TAPS - 1 + ); // ****** //validate the corner cases @@ -136,8 +142,24 @@ TEST(propagateFlags) BudgetTimer processCPUTimer("processCPU", parset.settings.blockDuration() / nrSubbandsPerSubbandProc, true, true); + //propageFlags: exercise the functionality + MultiDimArray<LOFAR::SparseSet<unsigned>, 1> flagsPerChannel(boost::extents[parset.settings.antennaFields.size()]); + processCPUTimer.start(); - CorrelatorStep::Flagger::propagateFlags(parset, flags, correlatedData); + + // Compress input flags to flags per channel + Flagger::convertFlagsToChannelFlags( + flags, + flagsPerChannel, + parset.settings.blockSize, + parset.settings.correlator.nrChannels, + + // we don't have a FIR history to prepend + 0); + + // calculate nr of valid samples per baseline + CorrelatorStep::Flagger::calcNrValidSamples(parset, flagsPerChannel, correlatedData); + processCPUTimer.stop(); } diff --git a/RTCP/Cobalt/GPUProc/test/SubbandProcs/tCorrelatorSubbandProc.cc b/RTCP/Cobalt/GPUProc/test/SubbandProcs/tCorrelatorSubbandProc.cc index 5c137cc629788d5d566151d241fb71602e7663ea..6b13abf34283a269158a4134ca684a166ef6f258 100644 --- a/RTCP/Cobalt/GPUProc/test/SubbandProcs/tCorrelatorSubbandProc.cc +++ b/RTCP/Cobalt/GPUProc/test/SubbandProcs/tCorrelatorSubbandProc.cc @@ -87,7 +87,20 @@ TEST(propagateFlags) // ********************************************************************************************* //propageFlags: exercise the functionality - CorrelatorStep::Flagger::propagateFlags(parset, inputFlags, output); + MultiDimArray<SparseSet<unsigned>, 1> flagsPerChannel(boost::extents[parset.settings.antennaFields.size()]); + + // Compress input flags to flags per channel + Cobalt::Flagger::convertFlagsToChannelFlags( + inputFlags, + flagsPerChannel, + parset.settings.blockSize, + parset.settings.correlator.nrChannels, + + // We don't prepend FIR history, but we need it to obtain the right weights + NR_TAPS - 1); + + // calculate nr of valid samples per baseline + CorrelatorStep::Flagger::calcNrValidSamples(parset, flagsPerChannel, output); // now perform weighting of the data based on the number of valid samples CorrelatorStep::Flagger::applyNrValidSamples(parset, *output.subblocks[0]); diff --git a/RTCP/Cobalt/GPUProc/test/SubbandProcs/tCorrelatorSubbandProcProcessSb.cc b/RTCP/Cobalt/GPUProc/test/SubbandProcs/tCorrelatorSubbandProcProcessSb.cc index bd14f401529520869f2da3a006c2cef9266c9ed9..6244d056365b1a0e5e47135dc73973ac2e16ff4b 100644 --- a/RTCP/Cobalt/GPUProc/test/SubbandProcs/tCorrelatorSubbandProcProcessSb.cc +++ b/RTCP/Cobalt/GPUProc/test/SubbandProcs/tCorrelatorSubbandProcProcessSb.cc @@ -29,148 +29,293 @@ #include <GPUProc/SubbandProcs/KernelFactories.h> #include <GPUProc/SubbandProcs/SubbandProc.h> +#include <UnitTest++.h> + using namespace std; using namespace LOFAR; using namespace LOFAR::Cobalt; +struct SubbandProcWrapper { + // Create very simple kernel programs, with predictable output. Skip as much + // as possible. Nr of channels/sb from the parset is 1, so the PPF will not + // even run. Parset also has turned of delay compensation and bandpass + // correction (but that kernel will run to convert int to float and to + // transform the data order). + + gpu::Device device; + gpu::Context ctx; + + Parset ps; + KernelFactories factories; + SubbandProc cwq; + SubbandProcInputData in; + SubbandProcOutputData out; + + fcomplex inputValue; + + SubbandProcWrapper(Parset &ps): + device(0), + ctx(device), + ps(ps), + factories(ps, 1), + cwq(ps, ctx, factories), + in(ps.settings.SAPs.size(), + ps.settings.antennaFields.size(), + ps.settings.nrPolarisations, + ps.settings.beamFormer.maxNrTABsPerSAP(), + ps.settings.blockSize, + ps.nrBytesPerComplexSample(), + ctx), + out(ps, ctx), + + inputValue(1,1) + { + // Input info + const size_t nrBeams = ps.settings.SAPs.size(); + const size_t nrStations = ps.settings.antennaFields.size(); + const size_t nrPolarisations = ps.settings.nrPolarisations; + const size_t maxNrTABsPerSAP = ps.settings.beamFormer.maxNrTABsPerSAP(); + const size_t nrSamplesPerIntegration = ps.settings.correlator.nrSamplesPerIntegration(); + const size_t nrSamplesPerSubband = ps.settings.blockSize; + const size_t nrBitsPerSample = ps.settings.nrBitsPerSample; + const size_t nrBytesPerComplexSample = ps.nrBytesPerComplexSample(); + + // We only support 8-bit or 16-bit input samples + ASSERT(nrBitsPerSample == 8 || nrBitsPerSample == 16); + + // Output info + const size_t nrBaselines = ps.nrBaselines(); + const size_t nrBlocksPerIntegration = + ps.settings.correlator.nrBlocksPerIntegration; + const size_t nrChannelsPerSubband = ps.settings.correlator.nrChannels; + const size_t integrationSteps = ps.settings.correlator.nrSamplesPerIntegration(); + + LOG_INFO_STR( + "\nInput info:" << + "\n nrBeams = " << nrBeams << + "\n nrStations = " << nrStations << + "\n nrPolarisations = " << nrPolarisations << + "\n maxNrTABsPerSAP = " << maxNrTABsPerSAP << + "\n nrSamplesPerIntegration = " << nrSamplesPerIntegration << + "\n nrSamplesPerSubband = " << nrSamplesPerSubband << + "\n nrBitsPerSample = " << nrBitsPerSample << + "\n nrBytesPerComplexSample = " << nrBytesPerComplexSample << + "\n ----------------------------" << + "\n Total bytes = " << in.inputSamples.size()); + + LOG_INFO_STR( + "\nOutput info:" << + "\n nrBaselines = " << nrBaselines << + "\n nrBlockPerIntegration = " << nrBlocksPerIntegration << + "\n nrChannelsPerSubband = " << nrChannelsPerSubband << + "\n integrationSteps = " << integrationSteps << + "\n ----------------------------" << + "\n Total bytes = " << out.correlatedData.subblocks[0]->visibilities.size()); + + // Initialize synthetic input to all (1, 1). + for (size_t st = 0; st < nrStations; st++) + for (size_t i = 0; i < nrSamplesPerSubband; i++) + for (size_t pol = 0; pol < nrPolarisations; pol++) + setInputValue(st, i, pol, inputValue); + + // Initialize subbands partitioning administration (struct BlockID). We only + // do the 1st block of whatever. + in.blockID.block = 0; + in.blockID.globalSubbandIdx = 0; + in.blockID.localSubbandIdx = 0; + in.blockID.subbandProcSubbandIdx = 0; + + // Initialize delays. We skip delay compensation, but init anyway, + // so we won't copy uninitialized data to the device. + for (size_t i = 0; i < in.delaysAtBegin.size(); i++) + in.delaysAtBegin.get<float>()[i] = 0.0f; + for (size_t i = 0; i < in.delaysAfterEnd.size(); i++) + in.delaysAfterEnd.get<float>()[i] = 0.0f; + for (size_t i = 0; i < in.phase0s.size(); i++) + in.phase0s.get<float>()[i] = 0.0f; + } + + void setInputValue(unsigned station, unsigned t, unsigned pol, fcomplex value) { + const size_t nrBitsPerSample = ps.settings.nrBitsPerSample; + + switch(nrBitsPerSample) { + case 8: + reinterpret_cast<i8complex&>(in.inputSamples[station][t][pol][0]) = + i8complex(value); + break; + case 16: + reinterpret_cast<i16complex&>(in.inputSamples[station][t][pol][0]) = + i16complex(value); + break; + } + } + + void process() { + const ssize_t nrBlocksPerIntegration = + ps.settings.correlator.nrBlocksPerIntegration; + + ssize_t block(0); + + LOG_INFO("Processing ..."); + for (block = -1; block < nrBlocksPerIntegration; block++) { + LOG_DEBUG_STR("Processing block #" << block); + + in.blockID.block = block; + cwq.processSubband(in, out); + if (block >= 0) + cwq.postprocessSubband(out); + } + ASSERT(block == nrBlocksPerIntegration); + ASSERT(out.emit_correlatedData); + } + + fcomplex outputValue() const { + // The output is the correlation-product of two inputs (with identical + // `inputValue`). + + const size_t scaleFactor = ps.settings.nrBitsPerSample == 16 ? 1 : 16; + + return norm(inputValue) * scaleFactor * scaleFactor; + } + + void verifyOutput(const fcomplex expectedValue) const { + // we don't process the FFT in our reference calculations yet + ASSERT(ps.settings.correlator.nrChannels == 1); + + LOG_INFO("Verifying output ..."); + for (size_t b = 0; b < ps.nrBaselines(); b++) + for (size_t c = 0; c < ps.settings.correlator.nrChannels; c++) + for (size_t pol0 = 0; pol0 < ps.settings.nrPolarisations; pol0++) + for (size_t pol1 = 0; pol1 < ps.settings.nrPolarisations; pol1++) + ASSERTSTR(fpEquals(out.correlatedData.subblocks[0]->visibilities[b][c][pol0][pol1], expectedValue), + "out[" << b << "][" << c << "][" << pol0 << + "][" << pol1 << "] = " << out.correlatedData.subblocks[0]->visibilities[b][c][pol0][pol1] << + "; expectedValue = " << expectedValue); + } +}; + +// Test the output on clean data -- should produce normal output +TEST(output_noflags_uniform) { + Parset ps("tCorrelatorSubbandProcProcessSb.parset"); + SubbandProcWrapper wrapper(ps); + + wrapper.process(); + wrapper.verifyOutput(wrapper.outputValue()); +} + +// Test the output on clean data with a peak -- should produce elevated output +TEST(output_noflags_peak) { + Parset ps("tCorrelatorSubbandProcProcessSb.parset"); + SubbandProcWrapper wrapper(ps); + + // Replace one value with an extreme value to know whether it's actually skipped + for(size_t st = 0; st < ps.settings.antennaFields.size(); st++) { + wrapper.setInputValue(st, 13, 0, fcomplex(100,0)); + wrapper.setInputValue(st, 13, 1, fcomplex(100,0)); + } + + // Each unmodified input value of 1+1i produces a correlation of (1+1i)(1-1i) = 2 + // Each modified input value of 100 produces a correlation of 100 * 100 = 1e4 + // All correlations are averaged and scaled up. + + wrapper.process(); + wrapper.verifyOutput(3011.5); // = ((blockSize-1) * 2 + 1e4) / blockSize * scaleFactor * scaleFactor +} + +// Test the output on clean data with a flaged peak -- should produce normal output +TEST(output_flags) { + Parset ps("tCorrelatorSubbandProcProcessSb.parset"); + SubbandProcWrapper wrapper(ps); + + // Flag one sample + wrapper.in.inputFlags[0].include(13); + // Replace it with an extreme value to know whether it's actually skipped + wrapper.setInputValue(0, 13, 0, fcomplex(100,100)); + wrapper.setInputValue(0, 13, 1, fcomplex(100,100)); + + // process + wrapper.process(); + wrapper.verifyOutput(wrapper.outputValue()); +} + +// Test the final weights if there is flagged data +TEST(weights_flags_1ch) { + // Override nr channels to 1 + Parset ps("tCorrelatorSubbandProcProcessSb.parset"); + ps.replace("Cobalt.Correlator.nrChannelsPerSubband", "1"); + ps.updateSettings(); + SubbandProcWrapper wrapper(ps); + + // Flag one sample + wrapper.in.inputFlags[0].include(13); + + // process + wrapper.process(); + + // We reuse the flagged sample in all of the blocks, so + // we actually lose "nrBlocks" samples. + const unsigned nrBlocks = ps.settings.correlator.nrBlocksPerIntegration; + const unsigned nrValidSamples = wrapper.ps.settings.correlator.nrSamplesPerIntegration(); + const unsigned nrValidSamplesFlagged = + nrValidSamples + // we lose 1 sample per block for each flagged sample + - nrBlocks * 1; + + LOG_INFO("Verifying output weights ..."); + for (size_t b = 0; b < wrapper.ps.nrBaselines(); b++) { + // baseline 0 and 1 contain station 0 with a flagged sample. + unsigned expected = b < 2 ? nrValidSamplesFlagged : nrValidSamples; + ASSERTSTR(wrapper.out.correlatedData.subblocks[0]->getNrValidSamples(b, 0) == expected, + "nrValidSamples[" << b << "][0] = " + << wrapper.out.correlatedData.subblocks[0]->getNrValidSamples(b, 0) + << "; expected " << expected); + } +} + +// Test the final weights after FFT if there is flagged data +TEST(weights_flags_64ch) { + // Override nr channels to 64 + Parset ps("tCorrelatorSubbandProcProcessSb.parset"); + ps.replace("Cobalt.Correlator.nrChannelsPerSubband", "64"); + ps.updateSettings(); + SubbandProcWrapper wrapper(ps); + + // Flag one sample + wrapper.in.inputFlags[0].include(13); + + // process + wrapper.process(); + + // We reuse the flagged sample in all of the blocks, so + // we actually lose "nrBlocks" samples. + const unsigned nrBlocks = ps.settings.correlator.nrBlocksPerIntegration; + const unsigned nrValidSamples = wrapper.ps.settings.correlator.nrSamplesPerIntegration(); + const unsigned nrValidSamplesFlagged = + nrValidSamples + // we lose "NR_TAPS" samples per block for each flagged sample + - nrBlocks * NR_TAPS; + + LOG_INFO("Verifying output weights ..."); + for (size_t b = 0; b < wrapper.ps.nrBaselines(); b++) + for (size_t c = 1; c < wrapper.ps.settings.correlator.nrChannels; c++) { + // baseline 0 and 1 contain station 0 with a flagged sample. + unsigned expected = b < 2 ? nrValidSamplesFlagged : nrValidSamples; + ASSERTSTR(wrapper.out.correlatedData.subblocks[0]->getNrValidSamples(b, c) == expected, + "nrValidSamples[" << b << "][" << c << "] = " + << wrapper.out.correlatedData.subblocks[0]->getNrValidSamples(b, c) + << "; expected " << expected); + } +} + int main() { INIT_LOGGER("tCorrelatorSubbandProcProcessSb"); try { gpu::Platform pf; - cout << "Detected " << pf.size() << " CUDA devices" << endl; + return UnitTest::RunAllTests() > 0; } catch (gpu::CUDAException& e) { cerr << e.what() << endl; return 3; } - - gpu::Device device(0); - vector<gpu::Device> devices(1, device); - gpu::Context ctx(device); - - Parset ps("tCorrelatorSubbandProcProcessSb.parset"); - - // Input info - const size_t nrBeams = ps.settings.SAPs.size(); - const size_t nrStations = ps.settings.antennaFields.size(); - const size_t nrPolarisations = ps.settings.nrPolarisations; - const size_t maxNrTABsPerSAP = ps.settings.beamFormer.maxNrTABsPerSAP(); - const size_t nrSamplesPerChannel = ps.settings.correlator.nrSamplesPerIntegration(); - const size_t nrSamplesPerSubband = ps.settings.blockSize; - const size_t nrBitsPerSample = ps.settings.nrBitsPerSample; - const size_t nrBytesPerComplexSample = ps.nrBytesPerComplexSample(); - const fcomplex inputValue(1,1); - - // We only support 8-bit or 16-bit input samples - ASSERT(nrBitsPerSample == 8 || nrBitsPerSample == 16); - - // Output info - const size_t nrBaselines = nrStations * (nrStations + 1) / 2; - const size_t nrBlocksPerIntegration = - ps.settings.correlator.nrBlocksPerIntegration; - const size_t nrChannelsPerSubband = ps.settings.correlator.nrChannels; - const size_t integrationSteps = ps.settings.correlator.nrSamplesPerIntegration(); - const size_t scaleFactor = nrBitsPerSample == 16 ? 1 : 16; - - // The output is the correlation-product of two inputs (with identical - // `inputValue`). - const fcomplex outputValue = - norm(inputValue) * scaleFactor * scaleFactor; - - // Create very simple kernel programs, with predictable output. Skip as much - // as possible. Nr of channels/sb from the parset is 1, so the PPF will not - // even run. Parset also has turned of delay compensation and bandpass - // correction (but that kernel will run to convert int to float and to - // transform the data order). - - KernelFactories factories(ps, 1); - SubbandProc cwq(ps, ctx, factories); - - SubbandProcInputData in( - nrBeams, nrStations, nrPolarisations, maxNrTABsPerSAP, - nrSamplesPerSubband, nrBytesPerComplexSample, ctx); - - SubbandProcOutputData out(ps, ctx); - - LOG_INFO_STR( - "\nInput info:" << - "\n nrBeams = " << nrBeams << - "\n nrStations = " << nrStations << - "\n nrPolarisations = " << nrPolarisations << - "\n maxNrTABsPerSAP = " << maxNrTABsPerSAP << - "\n nrSamplesPerChannel = " << nrSamplesPerChannel << - "\n nrSamplesPerSubband = " << nrSamplesPerSubband << - "\n nrBitsPerSample = " << nrBitsPerSample << - "\n nrBytesPerComplexSample = " << nrBytesPerComplexSample << - "\n inputValue = " << inputValue << - "\n ----------------------------" << - "\n Total bytes = " << in.inputSamples.size()); - - LOG_INFO_STR( - "\nOutput info:" << - "\n nrBaselines = " << nrBaselines << - "\n nrBlockPerIntegration = " << nrBlocksPerIntegration << - "\n nrChannelsPerSubband = " << nrChannelsPerSubband << - "\n integrationSteps = " << integrationSteps << - "\n scaleFactor = " << scaleFactor << - "\n outputValue = " << outputValue << - "\n ----------------------------" << - "\n Total bytes = " << out.correlatedData.subblocks[0]->visibilities.size()); - - // Initialize synthetic input to all (1, 1). - for (size_t st = 0; st < nrStations; st++) - for (size_t i = 0; i < nrSamplesPerSubband; i++) - for (size_t pol = 0; pol < nrPolarisations; pol++) - { - switch(nrBitsPerSample) { - case 8: - reinterpret_cast<i8complex&>(in.inputSamples[st][i][pol][0]) = - i8complex(inputValue); - break; - case 16: - reinterpret_cast<i16complex&>(in.inputSamples[st][i][pol][0]) = - i16complex(inputValue); - break; - } - } - - // Initialize subbands partitioning administration (struct BlockID). We only - // do the 1st block of whatever. - in.blockID.block = 0; - in.blockID.globalSubbandIdx = 0; - in.blockID.localSubbandIdx = 0; - in.blockID.subbandProcSubbandIdx = 0; - - // Initialize delays. We skip delay compensation, but init anyway, - // so we won't copy uninitialized data to the device. - for (size_t i = 0; i < in.delaysAtBegin.size(); i++) - in.delaysAtBegin.get<float>()[i] = 0.0f; - for (size_t i = 0; i < in.delaysAfterEnd.size(); i++) - in.delaysAfterEnd.get<float>()[i] = 0.0f; - for (size_t i = 0; i < in.phase0s.size(); i++) - in.phase0s.get<float>()[i] = 0.0f; - - size_t block(0); - - LOG_INFO("Processing ..."); - for (block = 0; block < nrBlocksPerIntegration && !out.emit_correlatedData; block++) { - LOG_DEBUG_STR("Processing block #" << block); - cwq.processSubband(in, out); - cwq.postprocessSubband(out); - } - ASSERT(block == nrBlocksPerIntegration); - - LOG_INFO("Verifying output ..."); - for (size_t b = 0; b < nrBaselines; b++) - for (size_t c = 0; c < nrChannelsPerSubband; c++) - for (size_t pol0 = 0; pol0 < nrPolarisations; pol0++) - for (size_t pol1 = 0; pol1 < nrPolarisations; pol1++) - ASSERTSTR(fpEquals(out.correlatedData.subblocks[0]->visibilities[b][c][pol0][pol1], outputValue), - "out[" << b << "][" << c << "][" << pol0 << - "][" << pol1 << "] = " << out.correlatedData.subblocks[0]->visibilities[b][c][pol0][pol1] << - "; outputValue = " << outputValue); - - LOG_INFO("Test OK"); - return 0; } diff --git a/RTCP/Cobalt/GPUProc/test/SubbandProcs/tCorrelatorSubbandProcProcessSb.parset b/RTCP/Cobalt/GPUProc/test/SubbandProcs/tCorrelatorSubbandProcProcessSb.parset index a56e0c6d1b99db919b58eace8db85ae6a790230c..38ac7f70550552f8cd6de0d38d3720123caa9275 100644 --- a/RTCP/Cobalt/GPUProc/test/SubbandProcs/tCorrelatorSubbandProcProcessSb.parset +++ b/RTCP/Cobalt/GPUProc/test/SubbandProcs/tCorrelatorSubbandProcProcessSb.parset @@ -1,8 +1,6 @@ -OLAP.CNProc.integrationSteps = 256 Cobalt.Correlator.nrBlocksPerIntegration = 4 Cobalt.correctBandPass = F Observation.nrBitsPerSample = 8 -OLAP.CNProc.nrPPFTaps = 16 Observation.VirtualInstrument.stationList = [CS002] Observation.antennaSet = HBA_DUAL Observation.nrBeams = 1 @@ -17,6 +15,7 @@ Cobalt.delayCompensation = F Observation.nrPolarisations = 2 Cobalt.Correlator.nrChannelsPerSubband = 1 Observation.sampleClock = 200 +Cobalt.blockSize = 1024 diff --git a/RTCP/Cobalt/GPUProc/test/SubbandProcs/tFlysEyeBeamFormerSubbandProcProcessSb.cc b/RTCP/Cobalt/GPUProc/test/SubbandProcs/tFlysEyeBeamFormerSubbandProcProcessSb.cc index be24b2dfdcd36507d323234930c44a306dc128f7..642398919d064b0be7225d5f291f161044ff12c8 100644 --- a/RTCP/Cobalt/GPUProc/test/SubbandProcs/tFlysEyeBeamFormerSubbandProcProcessSb.cc +++ b/RTCP/Cobalt/GPUProc/test/SubbandProcs/tFlysEyeBeamFormerSubbandProcProcessSb.cc @@ -82,8 +82,7 @@ int main() { const size_t nrBitsPerSample = ps.settings.nrBitsPerSample; const size_t nrBytesPerComplexSample = ps.nrBytesPerComplexSample(); - const unsigned fft1Size = ps.settings.beamFormer.nrDelayCompensationChannels; - const unsigned fft2Size = ps.settings.beamFormer.nrHighResolutionChannels / fft1Size; + const unsigned fftSize = ps.settings.beamFormer.nrDelayCompensationChannels; // We only support 8-bit or 16-bit input samples ASSERT(nrBitsPerSample == 8 || nrBitsPerSample == 16); @@ -101,8 +100,7 @@ int main() { "\n nrSamplesPerSubband = " << nrSamplesPerSubband << "\n nrBitsPerSample = " << nrBitsPerSample << "\n nrBytesPerComplexSample = " << nrBytesPerComplexSample << - "\n fft1Size = " << fft1Size << - "\n fft2Size = " << fft2Size); + "\n fftSize = " << fftSize); // Because this is fly's eye mode! ASSERT(nrStations == maxNrTABsPerSAP); @@ -199,7 +197,7 @@ int main() { // We can calculate the expected output values, since we're supplying a // complex sine/cosine input signal. We only have Stokes-I, so the output - // should be: (amp * scaleFactor * fft1Size * fft2Size) ** 2 + // should be: (amp * scaleFactor * fftSize) ** 2 // - amp is set to the maximum possible value for the bit-mode: // i.e. 127 for 8-bit and 32767 for 16-bit mode // - scaleFactor is the scaleFactor applied by the IntToFloat kernel. @@ -209,8 +207,8 @@ int main() { // - for 8-bit input: (127 * 16 * 64 * 64) ** 2 = 69273527517184 float outVal = - amplitude * scaleFactor * fft1Size * fft2Size * - amplitude * scaleFactor * fft1Size * fft2Size; + amplitude * scaleFactor * fftSize * + amplitude * scaleFactor * fftSize; cout << "outVal = " << setprecision(12) << outVal << endl; for (size_t tab = 0; tab < nrTABs; tab++) diff --git a/RTCP/Cobalt/GPUProc/test/cuda/tDelayAndBandPass.cc b/RTCP/Cobalt/GPUProc/test/cuda/tDelayAndBandPass.cc index 2272df30b5d09e962de11fa975ca7ff0bd2a3a2e..f78dab5af26fdbd43884b80b0dadefefbaa0b571 100644 --- a/RTCP/Cobalt/GPUProc/test/cuda/tDelayAndBandPass.cc +++ b/RTCP/Cobalt/GPUProc/test/cuda/tDelayAndBandPass.cc @@ -59,7 +59,6 @@ const unsigned NR_POLARIZATIONS = 2; const unsigned NR_SAPS = 8; const double SUBBAND_BANDWIDTH = 0.0 * NR_CHANNELS; -const bool INPUT_IS_STATIONDATA = false; const bool BANDPASS_CORRECTION = true; const bool DELAY_COMPENSATION = false; const bool DO_TRANSPOSE = true; @@ -150,8 +149,6 @@ CompileDefinitions getDefaultCompileDefinitions() boost::lexical_cast<string>(NR_STATIONS); defs["NR_DELAYS"] = boost::lexical_cast<string>(NR_DELAYS); - if (INPUT_IS_STATIONDATA) - defs["INPUT_IS_STATIONDATA"] = "1"; defs["NR_CHANNELS"] = boost::lexical_cast<string>(NR_CHANNELS); @@ -178,9 +175,7 @@ CompileDefinitions getDefaultCompileDefinitions() return defs; } -// T is an LCS i*complex type, or complex<float> when #chnl > 1. // It is the value type of the data input array. -template <typename T> vector<fcomplex> runTest(const CompileDefinitions& compileDefs, double subbandFrequency, unsigned beam, @@ -192,15 +187,7 @@ vector<fcomplex> runTest(const CompileDefinitions& compileDefs, gpu::Context ctx(stream->getContext()); boost::scoped_ptr<MultiDimArrayHostBuffer<fcomplex, 4> > outputData; - boost::scoped_ptr<MultiDimArrayHostBuffer<T, 4> > inputData; - - if (compileDefs.find("INPUT_IS_STATIONDATA") == compileDefs.end()) { - // If input does not come from station, we'll read fcomplex. - ASSERT(sizeof(T) == sizeof(fcomplex)); - } else { - // If input does NOT come from station, NR_BITS_PER_SAMPLE needs to match T. - ASSERT(boost::lexical_cast<unsigned>(compileDefs.at("NR_BITS_PER_SAMPLE")) == 8 * sizeof(T) / 2); - } + boost::scoped_ptr<MultiDimArrayHostBuffer<fcomplex, 4> > inputData; if(compileDefs.find("DO_TRANSPOSE") != compileDefs.end()) outputData.reset( @@ -225,7 +212,7 @@ vector<fcomplex> runTest(const CompileDefinitions& compileDefs, unsigned nchnl = boost::lexical_cast<unsigned>(cit->second); if (nchnl == 1) // integer input data (FIR+FFT skipped) inputData.reset( - new MultiDimArrayHostBuffer<T, 4>(boost::extents + new MultiDimArrayHostBuffer<fcomplex, 4>(boost::extents [NR_STATIONS] [NR_SAMPLES_PER_CHANNEL] [NR_CHANNELS] @@ -317,7 +304,7 @@ TEST(BandPass) // The input samples are all ones. After correction, multiply with 2. // The first and the last complex values are retrieved. They should be scaled // with the bandPassFactor == 2 - vector<fcomplex> results(runTest<fcomplex>( + vector<fcomplex> results(runTest( defs, 0.0, // sb freq 0U, // beam @@ -342,7 +329,7 @@ TEST(Phase0s) defs["DELAY_COMPENSATION"] = "1"; defs["SUBBAND_BANDWIDTH"] = "1.0"; - vector<fcomplex> results(runTest<fcomplex>( + vector<fcomplex> results(runTest( defs, 1.0, // sb freq 0U, // beam @@ -371,7 +358,7 @@ SUITE(DelayCompensation) defs["DELAY_COMPENSATION"] = "1"; defs["SUBBAND_BANDWIDTH"] = "1.0"; - vector<fcomplex> results(runTest<fcomplex>( + vector<fcomplex> results(runTest( defs, 1.0, // sb freq 0U, // beam @@ -468,7 +455,7 @@ SUITE(DelayCompensation) defs["DELAY_COMPENSATION"] = "1"; defs["SUBBAND_BANDWIDTH"] = "1.0"; - vector<fcomplex> results(runTest<fcomplex>( + vector<fcomplex> results(runTest( defs, 1.0, // sb freq 0U, // beam @@ -549,7 +536,7 @@ TEST(AllAtOnce) defs["DELAY_COMPENSATION"] = "1"; defs["SUBBAND_BANDWIDTH"] = "1.0"; - vector<fcomplex> results(runTest<fcomplex>( + vector<fcomplex> results(runTest( defs, 1.0, // sb freq 0U, // beam diff --git a/RTCP/Cobalt/OutputProc/src/MeasurementSetFormat.h b/RTCP/Cobalt/OutputProc/src/MeasurementSetFormat.h index d4176d548f94378b3c4138815b23a86fdfad0e43..75cf01eece3feb638a4b6317d234a2952271b420 100644 --- a/RTCP/Cobalt/OutputProc/src/MeasurementSetFormat.h +++ b/RTCP/Cobalt/OutputProc/src/MeasurementSetFormat.h @@ -29,6 +29,7 @@ #include <MSLofar/MSLofar.h> #include <CoInterface/Parset.h> #include <CoInterface/SmartPtr.h> +#include <CoInterface/OutputTypes.h> //for LofarStManVersion #include <casa/aips.h> #include <casa/Utilities/DataType.h> @@ -50,8 +51,6 @@ * http://www.lofar.org/operations/lib/exe/fetch.php?media=public:documents:ms2_description_for_lofar_2.08.00.pdf */ -static const unsigned LofarStManVersion = 3; - //# Forward Declarations namespace casa { diff --git a/RTCP/Cobalt/OutputProc/test/tFastFileStream.cc b/RTCP/Cobalt/OutputProc/test/tFastFileStream.cc index ca909939ab6da6711e6192769c412ae7959c00f6..44f5408b20e46fb59210e39d3981437392cd520b 100644 --- a/RTCP/Cobalt/OutputProc/test/tFastFileStream.cc +++ b/RTCP/Cobalt/OutputProc/test/tFastFileStream.cc @@ -39,10 +39,10 @@ using namespace LOFAR::Cobalt; class TempFile { public: - TempFile( const string &dirname = "/tmp/") + TempFile( const string &dirname = ".") /* note: /tmp could be using tmpfs, which does not support O_DIRECT */ { char templ[1024]; - snprintf(templ, sizeof templ, "%stFastFileStreamXXXXXX", dirname.c_str()); + snprintf(templ, sizeof templ, "%stFastFileStream_tmpXXXXXX", dirname.c_str()); /* ./$test_tmp* is cleaned up by assay */ fd = mkstemp(templ); diff --git a/RTCP/Cobalt/Tools/plot_cobalt_flagging.py b/RTCP/Cobalt/Tools/plot_cobalt_flagging.py new file mode 100755 index 0000000000000000000000000000000000000000..e9c207f35513c0751d794cc50306a1706a48c0d3 --- /dev/null +++ b/RTCP/Cobalt/Tools/plot_cobalt_flagging.py @@ -0,0 +1,264 @@ +#! /usr/bin/env python + +import sys +import argparse +import time +import datetime +import re +import numpy + +import matplotlib +# On Mac OSX set this to whatever. :-) +matplotlib.use("qt4agg") + +import pylab +# Set some sane defaults for pylab/matplotlib. +pylab.rcParams["axes.formatter.limits"] = -18, 18 +pylab.rcParams["figure.autolayout"] = True +#pylab.rcParams["figure.dpi"] = 300 +pylab.rcParams["image.interpolation"] = "bicubic" +pylab.rcParams["pdf.fonttype"] = 42 +pylab.rcParams["pdf.use14corefonts"] = False +pylab.rcParams["ps.fonttype"] = 42 +pylab.rcParams["ps.papersize"] = "a4" +pylab.rcParams["ps.useafm"] = False +pylab.rcParams["ps.usedistiller"] = False +pylab.rcParams["savefig.dpi"] = 600 +pylab.rcParams["savefig.orientation"] = "landscape" +pylab.rcParams["text.usetex"] = True +pylab.rcParams["timezone"] = "UTC" + +import matplotlib.pyplot + + +def _setup_command_line_arguments(): + """ + Setup the command line argument parser and return it. + :return: the parser for the command line arguments + :rtype: argparse.ArgumentParser + """ + parser = argparse.ArgumentParser(description = "") + parser.add_argument( + "cobalt_log_file", help = "The Cobalt log file which contains the flagging information.") + parser.add_argument( + "--station_list", help = "Run the tool at least once. Then choose from the station list - which is printed during the execution of the script - the ones you want to be in the plot.", nargs = "+") + parser.add_argument( + "--ignore_zero_values", help = "Ignore data points when stations logged a 0.0%% value for flagged data.", action = "store_true") + return parser + + +def read_file(input_file): + ''' + Open and read a file in. + :param input_file: The file that will be read and then split into lines. + :return: The file neatly split up into a list of lines. + ''' + with open(input_file, "r") as file_stream: + file_lines = file_stream.read().splitlines() + return file_lines + + +def build_regexp_format(): + ''' + Create a regular expression that triggers on Cobalt log lines which contain + flagging information. + The relevant log lines look like this: + rtcp:07@cbt004 2018-10-09 13:30:06.676 WARN RTCP.Cobalt.GPUProc - [block 57] Flagging: CS004LBA: 0.0%, CS006LBA: 0.0%, CS021LBA: 0.0%, CS030LBA: 0.0%, CS501LBA: 0.5%, [Pipeline.cc:449] + :return: A regular expression strnig that allows to filter out non-matching + log lines. + ''' + # Time stamp = YYYY-MM-DD HH:mm:SS.mmm + time_stamp_regex = "\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}.\d{3}" + # block # = n + block_regex = "\d+" + # Staion flagging = CS004LBA: 0.0%, CS006LBA: 0.0%, + stations_flagging_regex = "\w+:\s+\d+\.\d+%,\s*" + format = r"^.*" \ + "(?P<time_stamp>{time_stamp_regex})" \ + "\s+WARN\s+.*\[block\s+" \ + "(?P<block>{block_regex})" \ + "\]\s+Flagging:\s+" \ + "(?P<stations_flagging>(?:{stations_flagging_regex})+)".format( + time_stamp_regex=time_stamp_regex, + block_regex=block_regex, + stations_flagging_regex=stations_flagging_regex) + return format + + +def split_log_line(regexp_format, log_line): + """ + Identify and split Cobalt log lines that contain flagging information. Then + gather that information in a dict that has this structure: + {'time_stamp': datetime.datetime(2018, 10, 9, 13, 29, 9, 590000), 'block': 0, 'stations_flagging': [{'CS004LBA': 0.0}, {'CS006LBA': 0.0}]} + :param match: A regexp match the matches the time stamp, the block number and the stations with their flagging percentage value. + :param log_line: A Cobalt log line. Example: + rtcp:07@cbt004 2018-10-09 13:30:06.676 WARN RTCP.Cobalt.GPUProc - [block 57] Flagging: CS004LBA: 0.0%, CS006LBA: 0.0%, CS021LBA: 0.0%, CS030LBA: 0.0%, CS501LBA: 0.5%, [Pipeline.cc:449] + :return: a dict of dicts that contain the time stamp and the + the end_date, the rcu_mode, and the beam_switch_delay. + :rtype: dict + """ + match = re.match(regexp_format, log_line) + + stations_flagging = dict() + if match is not None: + stations_flagging = match.groupdict() + # Massage the content in the dict returned by match a bit. + stations_flagging_string = stations_flagging["stations_flagging"] + # I need to extract the name of each of the stations. + stations_flagging_list = stations_flagging_string.rstrip().rstrip("%,").split("%, ") + beautified_stations_flagging_list = list() + for station in stations_flagging_list: + # Split the station name off from its flagging value. + station_name, flagging = station.split(": ") + # Store both. + beautified_stations_flagging_list.append( + {station_name: float(flagging)}) + # Keep everything nice and tidy. Store the data in a new dict. + stations_flagging["stations_flagging"] = beautified_stations_flagging_list + stations_flagging["block"] = int(stations_flagging["block"]) + stations_flagging["time_stamp"] = datetime.datetime.strptime( + stations_flagging["time_stamp"], "%Y-%m-%d %H:%M:%S.%f") + + return dict(stations_flagging) + + +def identify_cobalt_flagging_lines(log_lines): + ''' + Apply a regexp to the Cobalt log lines and filter out the lines that do not + contain flagging information. The return the remaining log lines for + further processing. + :param log_lines: The log lines of the Cobalt log file in a list. + :return: A list of Cobalt log lines that contains only flagging information. + ''' + regexp_format = build_regexp_format() + flagging_lines = list() + for log_line in log_lines: + flagging_line = split_log_line(regexp_format, log_line) + if flagging_line is not None and isinstance(flagging_line, dict) and len(flagging_line) > 0: + flagging_lines.append(flagging_line) + return flagging_lines + + +def reorder_flagging_information(flagging_dict, ignore_zero_values): + ''' + Reshuffle the data into some numpy arrays that are compatible with matplot. + :param flagging_dict: a dict that contains the flagging data. Example: + {'time_stamp': datetime.datetime(2018, 10, 9, 13, 29, 9, 590000), 'block': 0, 'stations_flagging': [{'CS004LBA': 0.0}, {'CS006LBA': 0.0}]} + :return: dict[station name] + ["time_stamps"]: time stamps when data was given + ["block"]: block # + ["flagging_percentage"]: percentage of flagged data + ''' + time_stamps = list() + blocks = list() + stations = list() + station_flagging_dict = dict() + + for item in flagging_dict: + stations_flagging = item["stations_flagging"] + time_stamp = item["time_stamp"] + block = item["block"] + + for single_station in stations_flagging: + (station, flagging_percentage) = single_station.popitem() + if flagging_percentage > 0.0 or ignore_zero_values is False: + if station in station_flagging_dict: + station_flagging_dict[station]["time_stamps"].append(time_stamp) + station_flagging_dict[station]["blocks"].append(block) + station_flagging_dict[station]["flagging_percentage"].append(flagging_percentage) + else: + station_flagging_dict[station] = dict() + station_flagging_dict[station]["time_stamps"] = list() + station_flagging_dict[station]["time_stamps"].append(time_stamp) + station_flagging_dict[station]["blocks"] = list() + station_flagging_dict[station]["blocks"].append(block) + station_flagging_dict[station]["flagging_percentage"] = list() + station_flagging_dict[station]["flagging_percentage"].append(flagging_percentage) + return station_flagging_dict + + +def closeEvent(event): + ''' + When the close window button is clicked this function gets called. It + simply exits. + ''' + sys.exit(0) + + +def keyPressEvent(event): + ''' + Handle key-press events. + Save the plot to disk if "S" or "s" is pressed. + Leave the plotting hell if ESC, "Q" or "q" is pressed. + ''' + if event.key == "escape" or event.key.lower() == "q": + sys.exit(0) + elif event.key.lower() == "s": + # Save the current plot. + fileName = time.strftime("%Y-%m-%dT%H.%M.%S-Cobalt_data_flagging.png", time.gmtime()) + matplotlib.pyplot.savefig(fileName, dpi = 600, orientation = "landscape", papertype = "a4", format = "png") + + + +def main(): + cla_parser = _setup_command_line_arguments() + arguments = cla_parser.parse_args() + print("Reading the Cobalt log file \"%s\"..." % (arguments.cobalt_log_file)) + cobalt_log_lines = read_file(arguments.cobalt_log_file) + print("Identifying the flagging log lines...") + cobalt_flags = identify_cobalt_flagging_lines(cobalt_log_lines) + # Convert the data to numpy arrays for plotting. + print("Preparing the data for plotting...") + stations_dict = reorder_flagging_information(cobalt_flags, arguments.ignore_zero_values) + + station_list = list(stations_dict.keys()) + print("\nThe following stations flagged data:\n%s\n" % (" ".join(station_list))) + + # Set-up of the matplotlib stuff. + print("Set up the matplotlib canvas...") + figure, axes = matplotlib.pyplot.subplots() + # Erase everything. + axes.cla() + # Set up the x-axis to display time and date. +# axes.xaxis.set_major_locator(matplotlib.dates.MinuteLocator()) + axes.xaxis.set_major_formatter(matplotlib.dates.DateFormatter('%Y-%m-%d %H:%M')) +# axes.xaxis.set_minor_locator(matplotlib.dates.SecondLocator()) + axes.xaxis.set_minor_formatter(matplotlib.dates.DateFormatter('%H:%M:%S')) + axes.fmt_xdata = matplotlib.dates.DateFormatter('%Y-%m-%d %H:%M:%S') + figure.autofmt_xdate() + # Display a grid. + matplotlib.pyplot.grid(True) + # Set the plot title. + matplotlib.pyplot.title("Cobalt station flagging") + # Label the axes. + matplotlib.pyplot.xlabel("Time (YYYY-MM-DD HH:MM:SS)") + matplotlib.pyplot.ylabel("Flagged data (\%)") + + plots = list() + print("Plot everything...") + if arguments.station_list is not None: + station_list = arguments.station_list + print("The following stations will be plotted:\n%s\n" % (arguments.station_list)) + for station in station_list: + station_flagging = stations_dict[station] + print("Adding station %s to the plot..." % (station)) + plots.append(axes.plot(station_flagging["time_stamps"], station_flagging["flagging_percentage"], marker = "+", label = station, alpha = 0.2)) + # Update the figure and add a legend, + matplotlib.pyplot.legend() + figure.canvas.draw() + + # Create an event handler for the close_event. + figure.canvas.mpl_connect("close_event", closeEvent) + + # Create an event handler for the key_press_event. + figure.canvas.mpl_connect("key_press_event", keyPressEvent) + + # Start everything. This call blocks until the window close button is clicked. + matplotlib.pyplot.show() + # That's it. Thanks and goodbye! + matplotlib.pyplot.close(figure) + print("Goodbye!") + + +if __name__ == "__main__": + main() diff --git a/SAS/DataManagement/DataManagementCommon/path.py b/SAS/DataManagement/DataManagementCommon/path.py index fa801d79c7fc0dcf3406360836de40eaeff05bdf..8c721c964b8ca50e7d864d5351d8024978629241 100644 --- a/SAS/DataManagement/DataManagementCommon/path.py +++ b/SAS/DataManagement/DataManagementCommon/path.py @@ -167,8 +167,8 @@ class PathResolver: # get the subdirectories of the given path cmd = ['lfs', 'find', '--type', 'd', '--maxdepth', '1', path.rstrip('/')] hostname = socket.gethostname() - if not 'mgmt0' in hostname: - cmd = ['ssh', 'lofarsys@mgmt01.cep4.control.lofar'] + cmd + if not 'head' in hostname: + cmd = ['ssh', 'lofarsys@head.cep4.control.lofar'] + cmd logger.debug(' '.join(cmd)) proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = proc.communicate() @@ -189,8 +189,8 @@ class PathResolver: def pathExists(self, path): cmd = ['lfs', 'ls', path] hostname = socket.gethostname() - if not 'mgmt0' in hostname: - cmd = ['ssh', 'lofarsys@mgmt01.cep4.control.lofar'] + cmd + if not 'head' in hostname: + cmd = ['ssh', 'lofarsys@head.cep4.control.lofar'] + cmd logger.debug(' '.join(cmd)) proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = proc.communicate() diff --git a/SAS/DataManagement/StorageQueryService/cache.py b/SAS/DataManagement/StorageQueryService/cache.py index 43bd5fdbd411eafc6887afdf040dd5d82abf2ab5..7892a26440c7f705368c7e275c7a07459b6f8f41 100644 --- a/SAS/DataManagement/StorageQueryService/cache.py +++ b/SAS/DataManagement/StorageQueryService/cache.py @@ -16,6 +16,7 @@ from lofar.messaging import EventMessage, ToBus from lofar.common.util import humanreadablesize from lofar.common.datetimeutils import format_timedelta from lofar.sas.datamanagement.storagequery.diskusage import getDiskUsageForPath as du_getDiskUsageForPath +from lofar.sas.datamanagement.storagequery.diskusage import getOTDBIdFromPath from lofar.sas.datamanagement.storagequery.diskusage import DiskUsage from lofar.sas.datamanagement.common.datamanagementbuslistener import DataManagementBusListener from lofar.sas.otdb.OTDBBusListener import OTDBBusListener @@ -29,7 +30,7 @@ from lofar.mom.momqueryservice.config import DEFAULT_MOMQUERY_BUSNAME, DEFAULT_M logger = logging.getLogger(__name__) -MAX_CACHE_ENTRY_AGE = datetime.timedelta(hours=8) +MAX_CACHE_ENTRY_AGE = datetime.timedelta(hours=3*24) class CacheManager: def __init__(self, @@ -50,7 +51,7 @@ class CacheManager: self.otdb_listener = OTDBBusListener(busname=otdb_notification_busname, subject=otdb_notification_subject, broker=broker, - numthreads=2) + numthreads=1) self.otdb_listener.onObservationAborted = self.onObservationAborted self.otdb_listener.onObservationFinished = self.onObservationFinished @@ -58,7 +59,7 @@ class CacheManager: self.dm_listener = DataManagementBusListener(busname=dm_notification_busname, subjects=dm_notification_prefix + '*', broker=broker, - numthreads=2) + numthreads=1) self.dm_listener.onTaskDeleted = self.onTaskDeleted @@ -99,10 +100,15 @@ class CacheManager: try: if os.path.exists(self._cache_path): with open(self._cache_path, 'r') as file: + cache_from_disk = eval(file.read().strip()) #slow! with self._cacheLock: - self._cache = eval(file.read().strip()) + self._cache = cache_from_disk if not isinstance(self._cache, dict): self._cache = {'path_du_results': {}, 'otdb_id2path': {} } + if 'path_du_results' not in self._cache: + self._cache['path_du_results'] = {} + if 'otdb_id2path' not in self._cache: + self._cache['otdb_id2path'] = {} except Exception as e: logger.error("Error while reading in du cache: %s", e) with self._cacheLock: @@ -111,11 +117,20 @@ class CacheManager: def _writeCacheToDisk(self): try: + # only persist (a subset of) the cache to disk every once in a while. if datetime.datetime.utcnow() - self._last_cache_write_timestamp > datetime.timedelta(minutes=5): tmp_path = '/tmp/tmp_storagequery_cache.py' cache_str = '' with self._cacheLock: - cache_str = str(self._cache) + # Take a subset of the entire cache + # only the path_du_results for paths at project level (like /data/projects, /data/projects/LC9_001) + # Do not store path_du_results for deeper levels on disk, because that makes the disk read/write too slow, + # and the deeper levels can be obtained via rhb-du calls quite fast anyway. + # Furthermore, once a deeper level du results is stored in the memory cache, then it is also available for fast lookup. + # We just don't store these deep levels on disk. + sub_cache = { path:du_result for path,du_result in self._cache['path_du_results'].items() + if self.getDepthToProjectsDir(path) <= 1 and du_result.get('found') } + cache_str = str(sub_cache) with open(tmp_path, 'w') as file: file.write(cache_str) @@ -140,10 +155,9 @@ class CacheManager: if path in path_cache: otdb_id = du_result.get('otdb_id') - if not du_result['found']: - #make sure disk_usage is set when not found - du_result['disk_usage'] = 0 - du_result['disk_usage_readable'] = '0B' + # if still None, try to get the id from the path + if otdb_id is None: + otdb_id = getOTDBIdFromPath(path) if not path in path_cache or path_cache[path]['disk_usage'] != du_result['disk_usage']: # update the cache entry, even when no du result found, @@ -151,13 +165,20 @@ class CacheManager: logger.info('updating cache entry: %s', du_result) path_cache[path] = du_result - path_cache[path]['cache_timestamp'] = datetime.datetime.utcnow() - path_cache[path]['needs_update'] = False - if otdb_id != None: otdb_id2path_cache[otdb_id] = path - self._writeCacheToDisk() + if not du_result['found']: + # even when the du for the path is not found, + # keep a copy in the cache for fast lookup by clients + # Make sure the size is 0 + du_result['disk_usage'] = 0 + du_result['disk_usage_readable'] = humanreadablesize(0) + + path_cache[path]['cache_timestamp'] = datetime.datetime.utcnow() + path_cache[path]['needs_update'] = False + + self._writeCacheToDisk() self._sendDiskUsageChangedNotification(path, du_result['disk_usage'], otdb_id) @@ -179,28 +200,41 @@ class CacheManager: return result + def getDepthToProjectsDir(self, path): + return len(path.replace(self.disk_usage.path_resolver.projects_path, '').strip('/').split('/')) + def _scanProjectsTree(self): try: def addSubDirectoriesToCache(directory): - depth = len(directory.replace(self.disk_usage.path_resolver.projects_path, '').strip('/').split('/')) - if depth > 3: + depth = self.getDepthToProjectsDir(directory) + MAX_SCAN_DEPTH=2 + #depth=0 : projects + #depth=1 : projects/<project> + #depth=2 : projects/<project>/<obs> + #depth=3 : projects/<project>/<obs>/<sub_dir> + if depth > MAX_SCAN_DEPTH: return + add_empty_du_result_to_cache = False with self._cacheLock: path_cache = self._cache['path_du_results'] - if not directory in path_cache: - logger.info('tree scan: adding \'%s\' with empty disk_usage to cache which will be du\'ed later', directory) - empty_du_result = {'found': True, 'disk_usage': None, 'path': directory, 'name': directory.split('/')[-1]} - self._updateCache(empty_du_result) + add_empty_du_result_to_cache = not directory in path_cache + + if add_empty_du_result_to_cache: + logger.info('tree scan: adding \'%s\' with empty disk_usage to cache which will be du\'ed later', directory) + empty_du_result = {'found': True, 'disk_usage': None, 'path': directory, 'name': directory.split('/')[-1]} + self._updateCache(empty_du_result) - if directory in path_cache: - # mark cache entry for directory to be updated - path_cache[directory]['needs_update'] = True + with self._cacheLock: + path_cache = self._cache['path_du_results'] + if directory in path_cache: + # mark cache entry for directory to be updated + path_cache[directory]['needs_update'] = True if not self._cacheThreadsRunning: return - if depth < 3: + if depth < MAX_SCAN_DEPTH: logger.info('tree scan: scanning \'%s\'', directory) sd_result = self.disk_usage.path_resolver.getSubDirectories(directory) @@ -231,12 +265,12 @@ class CacheManager: updateable_entries = old_entries + needs_update_entries - if updateable_entries: - logger.info('%s old cache entries need to be updated, #age:%s #needs_update:%s', - len(updateable_entries), - len(old_entries), - len(needs_update_entries)) + logger.info('%s old cache entries need to be updated, #age:%s #needs_update:%s', + len(updateable_entries), + len(old_entries), + len(needs_update_entries)) + if updateable_entries: # sort them oldest to newest, 'needs_update' paths first def compareFunc(entry1, entry2): if entry1.get('needs_update') and not entry2.get('needs_update'): @@ -244,6 +278,13 @@ class CacheManager: if not entry1.get('needs_update') and entry2.get('needs_update'): return 1 + depth1 = self.getDepthToProjectsDir(entry1['path']) + depth2 = self.getDepthToProjectsDir(entry2['path']) + + if depth1 != depth2: + # lower level dirs are sorted in front of higher level dirs + return depth2 - depth1 + if entry1['cache_timestamp'] < entry2['cache_timestamp']: return -1 if entry1['cache_timestamp'] > entry2['cache_timestamp']: @@ -254,34 +295,45 @@ class CacheManager: cacheUpdateStart = datetime.datetime.utcnow() - for i, cache_entry in enumerate(updateable_entries): + #do a quick update of each entry by applying the sum of the subdirs to the path's du result... + #this make a best guess immediately available... + for cache_entry in updateable_entries: try: path = cache_entry.get('path') if path: - logger.info('_updateOldEntriesInCache: examining entry %s/%s. timestamp:%s age:%s needs_update:%s path: \'%s\'', - i, - len(updateable_entries), - cache_entry['cache_timestamp'], - format_timedelta(now - cache_entry['cache_timestamp']), - cache_entry.get('needs_update', False), - path) - - #do a quick update of the entry sy applying the sum of the subdirs to the path's du result... - #this make a best guess immedeiately available... self._updatePathCacheEntryToSubDirTotal(path, False) + except Exception as e: + logger.error(str(e)) - #...and in the mean time, du a full update from disk, which might be (really) slow. - result = du_getDiskUsageForPath(path) - logger.debug('trying to update old entry in cache: %s', result) - self._updateCache(result) + for i, cache_entry in enumerate(updateable_entries): + try: + # it might be that the cache_entry was already updated via another way + # so only update it if still to old or needs_update + now = datetime.datetime.utcnow() + if now - cache_entry['cache_timestamp'] > MAX_CACHE_ENTRY_AGE or cache_entry.get('needs_update', False): + path = cache_entry.get('path') + if path: + logger.info('_updateOldEntriesInCache: examining entry %s/%s. timestamp:%s age:%s needs_update:%s path: \'%s\'', + i, + len(updateable_entries), + cache_entry['cache_timestamp'], + format_timedelta(now - cache_entry['cache_timestamp']), + cache_entry.get('needs_update', False), + path) + + #du a full update from disk, which might be (really) slow. + result = du_getDiskUsageForPath(path) + logger.debug('trying to update old entry in cache: %s', result) + self._updateCache(result) except Exception as e: logger.error(str(e)) if not self._cacheThreadsRunning: + logger.info('exiting _updateCacheThread') return if datetime.datetime.utcnow() - cacheUpdateStart > datetime.timedelta(minutes=10): - # break out of cache update loop if full update takes more than 10min + # break out of cache update loop if full update takes more than 1min # next loop we'll start with the oldest cache entries again logger.info('skipping remaining %s old cache entries updates, they will be updated next time', len(updateable_entries)-i) break @@ -289,31 +341,35 @@ class CacheManager: #update the CEP4 capacities in the RADB once in a while... self._updateCEP4CapacitiesInRADB() - #sleep for a minute, (or stop if requested) - for i in range(60): + #sleep for a while, (or stop if requested) + for i in range(10): sleep(1) if not self._cacheThreadsRunning: + logger.info('exiting _updateCacheThread') return except Exception as e: - logger.error(str(e)) + logger.exception(str(e)) def _updatePathCacheEntryToSubDirTotal(self, path, force_update=False): - sd_result = self.disk_usage.path_resolver.getSubDirectories(path) + with self._cacheLock: + path_cache_result = self._cache['path_du_results'].get(path) - if sd_result['found']: - subdir_paths = [os.path.join(path, sd) for sd in sd_result['sub_directories']] + if path_cache_result: + path_depth = path.count('/') + all_dirs = self._cache['path_du_results'].keys() + subdir_paths = [sdp for sdp in all_dirs + if sdp.startswith(path) and sdp.count('/') == path_depth+1] - subdir_du_results = [self.getDiskUsageForPath(sd, force_update=force_update) for sd in subdir_paths] - sum_du = sum([du['disk_usage'] for du in subdir_du_results]) + subdir_du_results = [self.getDiskUsageForPath(sd, force_update=force_update) for sd in subdir_paths] + valid_subdir_du_results = [du for du in subdir_du_results if du.get('disk_usage')] + sum_du = sum([du['disk_usage'] for du in valid_subdir_du_results]) - with self._cacheLock: - if path in self._cache['path_du_results']: - path_result = self._cache['path_du_results'][path] - path_result['disk_usage'] = sum_du - path_result['disk_usage_readable'] = humanreadablesize(sum_du) - path_result['needs_update'] = True - self._updateCache(path_result) + if sum_du > 0: + logger.info('_updatePathCacheEntryToSubDirTotal: setting disk usage for %s to sum of %s known cached subdirs of %s', + path, len(valid_subdir_du_results), humanreadablesize(sum_du)) + path_cache_result['disk_usage'] = sum_du + path_cache_result['disk_usage_readable'] = humanreadablesize(sum_du) def _updateCEP4CapacitiesInRADB(self): try: @@ -444,10 +500,8 @@ class CacheManager: scratch_path_du_result = self.getDiskUsageForPath(scratch_path, force_update=force_update) path_du_result['scratch_paths'][scratch_path] = scratch_path_du_result - self._updateCache(path_du_result) return path_du_result - self._updateCache(path_result) return {'found': False, 'path': path_result['path']} def getDiskUsageForTasks(self, radb_ids=None, mom_ids=None, otdb_ids=None, include_scratch_paths=True, force_update=False): @@ -471,8 +525,9 @@ class CacheManager: def getDiskUsageForPath(self, path, force_update=False): logger.info("cache.getDiskUsageForPath('%s', force_update=%s)", path, force_update) needs_cache_update = False - with self._cacheLock: - needs_cache_update |= path not in self._cache['path_du_results'] + if not force_update: + with self._cacheLock: + needs_cache_update |= path not in self._cache['path_du_results'] if needs_cache_update or force_update: logger.info("cache update needed for %s", path) diff --git a/SAS/DataManagement/StorageQueryService/diskusage.py b/SAS/DataManagement/StorageQueryService/diskusage.py index 67a96da340ce7dc21ccf8ff24d68f5cbf7526e49..cf8f15d507bf2b6f4e9a0d1d82b6f1e1f0ea6f7b 100644 --- a/SAS/DataManagement/StorageQueryService/diskusage.py +++ b/SAS/DataManagement/StorageQueryService/diskusage.py @@ -17,14 +17,26 @@ from lofar.mom.momqueryservice.config import DEFAULT_MOMQUERY_BUSNAME, DEFAULT_M logger = logging.getLogger(__name__) def getDiskUsageForPath(path): - logger.info('getDiskUsageForPath(\'%s\')', path) + # 20180829: until lustre has been updated and robinhood has been switched back on (in october) use normal du + return getDiskUsageForPath_du(path) + + result = getDiskUsageForPath_rbh_du(path) + + if not result.get('found') or result.get('nr_of_files', None) is None: + logger.info('getDiskUsageForPath(\'%s\') could not obtain valid robinhood result, trying normal du.', path) + result = getDiskUsageForPath_du(path) + + return result + +def getDiskUsageForPath_rbh_du(path): + logger.info('getDiskUsageForPath_rbh_du(\'%s\')', path) result = {'found': False, 'path': path, 'disk_usage': None, 'name': path.split('/')[-1] } cmd = ['rbh-du', '-bd', path] hostname = socket.gethostname() - if not 'mgmt0' in hostname: - cmd = ['ssh', 'lofarsys@mgmt01.cep4.control.lofar'] + cmd + if not 'head' in hostname: + cmd = ['ssh', 'lofarsys@head.cep4.control.lofar'] + cmd logger.info(' '.join(cmd)) proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) @@ -56,24 +68,71 @@ def getDiskUsageForPath(path): if dir_lines: result['found'] = True result['disk_usage'] = 0 - result['nr_of_files'] = 0 - - try: - path_items = path.rstrip('/').split('/') - if len(path_items) >=3 and path_items[-1].startswith('L') and path_items[-1][1:].isdigit() and 'projects' in path_items[-3]: - logger.info('found path for otdb_id %s %s', path_items[-1][1:], path) - result['otdb_id'] = int(path_items[-1][1:]) - except Exception as e: - logger.error('Could not parse otdb_id from path %s %s', path, e) + result['nr_of_files'] = None else: logger.error(out + err) result['message'] = out result['disk_usage_readable'] = humanreadablesize(result['disk_usage']) - logger.info('getDiskUsageForPath(\'%s\') returning: %s', path, result) + otdb_id = getOTDBIdFromPath(path) + if otdb_id: + result['otdb_id'] = otdb_id + + logger.info('getDiskUsageForPath_rbh_du(\'%s\') returning: %s', path, result) return result +def getDiskUsageForPath_du(path): + logger.info('getDiskUsageForPath_du(\'%s\')', path) + + result = {'found': False, 'path': path, 'disk_usage': None, 'name': path.split('/')[-1] } + + cmd = ['du', '-bcs', path] + hostname = socket.gethostname() + if not 'head' in hostname: + cmd = ['ssh', 'lofarsys@head.cep4.control.lofar'] + cmd + logger.info(' '.join(cmd)) + + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + out, err = proc.communicate() + + if proc.returncode == 0: + # example of out + # 7025510839 /data/projects/HOLOG_WINDMILL_TESTS/L662734/uv/ + # 7025510839 total + + #parse out + lines = [l.strip() for l in out.split('\n')] + total_lines = [l for l in lines if 'total' in l] + if total_lines: + parts = [p.strip() for p in total_lines[0].split()] + if len(parts) == 2: + result['found'] = True + result['disk_usage'] = int(parts[0]) + result['nr_of_files'] = None + else: + logger.error(out + err) + result['message'] = out + + result['disk_usage_readable'] = humanreadablesize(result['disk_usage']) + + otdb_id = getOTDBIdFromPath(path) + if otdb_id: + result['otdb_id'] = otdb_id + + logger.info('getDiskUsageForPath_du(\'%s\') returning: %s', path, result) + return result + +def getOTDBIdFromPath(path): + try: + path_items = path.rstrip('/').split('/') + if len(path_items) >=3 and path_items[-1].startswith('L') and path_items[-1][1:].isdigit() and 'projects' in path_items[-3]: + logger.info('found path for otdb_id %s %s', path_items[-1][1:], path) + return int(path_items[-1][1:]) + except Exception as e: + logger.error('Could not parse otdb_id from path %s %s', path, e) + return None + def getDiskFreeSpaceForMountpoint(mountpoint=CEP4_DATA_MOUNTPOINT): logger.info('getDiskFreeSpaceForMountpoint(\'%s\')', mountpoint) @@ -81,8 +140,8 @@ def getDiskFreeSpaceForMountpoint(mountpoint=CEP4_DATA_MOUNTPOINT): cmd = ['df', mountpoint] hostname = socket.gethostname() - if not 'mgmt0' in hostname: - cmd = ['ssh', 'lofarsys@mgmt01.cep4.control.lofar'] + cmd + if not 'head' in hostname: + cmd = ['ssh', 'lofarsys@head.cep4.control.lofar'] + cmd logger.info(' '.join(cmd) + ' ...waiting for result...') proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) diff --git a/SAS/DataManagement/StorageQueryService/rpc.py b/SAS/DataManagement/StorageQueryService/rpc.py index 7192032deae37ba58db65d1d46a9bbf7f079d97e..e79c158746b2c2f64bea9d258c14e2c89249784e 100644 --- a/SAS/DataManagement/StorageQueryService/rpc.py +++ b/SAS/DataManagement/StorageQueryService/rpc.py @@ -12,8 +12,9 @@ logger = logging.getLogger(__name__) class StorageQueryRPC(RPCWrapper): def __init__(self, busname=DEFAULT_BUSNAME, servicename=DEFAULT_SERVICENAME, + timeout=18000, broker=None): - super(StorageQueryRPC, self).__init__(busname, servicename, broker, timeout=18000) + super(StorageQueryRPC, self).__init__(busname, servicename, broker, timeout=timeout) def _convertTimestamps(self, result): if isinstance(result, dict): diff --git a/SAS/DataManagement/StorageQueryService/service.py b/SAS/DataManagement/StorageQueryService/service.py index e027fec29e2a9ac92b22c044a4824b66dad57ee3..10f83a442c6a872d4e751c06ff47ac7217ee46de 100644 --- a/SAS/DataManagement/StorageQueryService/service.py +++ b/SAS/DataManagement/StorageQueryService/service.py @@ -67,7 +67,7 @@ def createService(busname=DEFAULT_BUSNAME, servicename=DEFAULT_SERVICENAME, brok busname=busname, broker=broker, use_service_methods=True, - numthreads=8, + numthreads=4, verbose=verbose, handler_args={'mountpoint': mountpoint, 'radb_busname':RADB_BUSNAME, diff --git a/SAS/MoM/MoMQueryService/MoMQueryServiceClient/momqueryrpc.py b/SAS/MoM/MoMQueryService/MoMQueryServiceClient/momqueryrpc.py index 6389a25a34a10fa396aa28cc434802b19a5867f6..ceba0dd06f83de2ce6bbeb61bec3dca3bfdaedc9 100644 --- a/SAS/MoM/MoMQueryService/MoMQueryServiceClient/momqueryrpc.py +++ b/SAS/MoM/MoMQueryService/MoMQueryServiceClient/momqueryrpc.py @@ -335,6 +335,15 @@ class MoMQueryRPC(RPCWrapper): time_restrictions = self.rpc('GetTriggerTimeRestrictions', mom_id=mom_id) return time_restrictions + def get_storagemanager(self, mom_id): + """ + Returns the storagemanager for given mom id. + :param mom_id: int + :return: string + """ + logger.info("Calling GetStoragemanager for mom id "+str(mom_id)) + storagemanager = self.rpc('GetStoragemanager', mom_id=mom_id) + return storagemanager def main(): # Check the invocation arguments diff --git a/SAS/MoM/MoMQueryService/MoMQueryServiceServer/momqueryservice.py b/SAS/MoM/MoMQueryService/MoMQueryServiceServer/momqueryservice.py index 57ba49cdf9fbb11ea4ae3841542c5b7d1de39f4e..41a4e63eee3e7f9d821adee221f90f96c3fba722 100755 --- a/SAS/MoM/MoMQueryService/MoMQueryServiceServer/momqueryservice.py +++ b/SAS/MoM/MoMQueryService/MoMQueryServiceServer/momqueryservice.py @@ -1219,6 +1219,26 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = " return station_selection + + def get_storagemanager(self, mom_id): + """ returns storagemanager if mom_id has that in the misc field or else raise KeyError. Raises ValueError if + no entry is found for given mom_id + :param mom_id: + :return: string or None + """ + logger.info("get_storagemanager for mom_id: %s", mom_id) + + misc = self._get_misc_contents(mom_id) + if misc is None: + raise ValueError("mom_id (%s) not found in MoM database" % mom_id) + if 'storagemanager' not in misc: + raise KeyError("misc field for mom_id (%s) does not contain storagemanager" % mom_id) + storagemanager = misc['storagemanager'] + + logger.info("get_storagemanager for mom_id (%s): %s", mom_id, storagemanager) + return storagemanager + + class ProjectDetailsQueryHandler(MessageHandlerInterface): """ handler class for details query in mom db @@ -1257,7 +1277,8 @@ class ProjectDetailsQueryHandler(MessageHandlerInterface): 'GetStationSelection': self.get_station_selection, 'GetTriggerQuota': self.get_trigger_quota, 'UpdateTriggerQuota': self.update_trigger_quota, - 'GetTriggerTimeRestrictions': self.get_trigger_time_restrictions + 'GetTriggerTimeRestrictions': self.get_trigger_time_restrictions, + 'GetStoragemanager': self.get_storagemanager } def prepare_loop(self): @@ -1374,6 +1395,9 @@ class ProjectDetailsQueryHandler(MessageHandlerInterface): def get_station_selection(self, mom_id): return self.momdb.get_station_selection(mom_id) + def get_storagemanager(self, mom_id): + return self.momdb.get_storagemanager(mom_id) + def createService(busname=DEFAULT_MOMQUERY_BUSNAME, servicename=DEFAULT_MOMQUERY_SERVICENAME, diff --git a/SAS/MoM/MoMQueryService/test/t_momqueryservice.py b/SAS/MoM/MoMQueryService/test/t_momqueryservice.py index 400562c9a38002a711a71e75abbb08439605b57f..5d5dbb107e1f6268dd63aa445727500327a043cb 100755 --- a/SAS/MoM/MoMQueryService/test/t_momqueryservice.py +++ b/SAS/MoM/MoMQueryService/test/t_momqueryservice.py @@ -674,6 +674,14 @@ class TestProjectDetailsQueryHandler(unittest.TestCase): self.assertEqual(result["used_triggers"], used) self.assertEqual(result["allocated_triggers"], max) + def test_get_storagemanager_returns_what_the_mom_wrapper_returns(self): + + return_value = "d.y.s.c.o." + self.mom_database_wrapper_mock().get_storagemanager.return_value = return_value + result = self.project_details_query_handler.get_storagemanager(1234) + + self.assertEqual(return_value, result) + class TestMomQueryRPC(unittest.TestCase): test_id = 1234 trigger_id = 12345 @@ -1951,6 +1959,28 @@ class TestMoMDatabaseWrapper(unittest.TestCase): self.mysql_mock.connect().cursor().rowcount = 1 self.mom_database_wrapper.update_trigger_quota('myproject') + def test_get_storagemanager_returns_value_from_query_result(self): + value = "d.y.s.c.o." + self.mysql_mock.connect().cursor().fetchall.return_value = \ + [{u'misc': '{"storagemanager": \"' + value + '\"}'}] + result = self.mom_database_wrapper.get_storagemanager(self.mom_id) + self.assertEqual(result, value) + + + def test_get_storagemanager_throws_ValueError_on_empty_query_result(self): + self.mysql_mock.connect().cursor().fetchall.return_value = [] + with self.assertRaises(ValueError): + self.mom_database_wrapper.get_storagemanager(1234) + + def test_get_storagemanager_throws_ValueError_if_station_selection_not_present_in_misc(self): + details_result = [{u"misc": json.dumps({u"timeWindow": {u'minDuration': 300, u'maxDuration': 300}})}] + self.mysql_mock.connect().cursor().fetchall.return_value = details_result + + with self.assertRaises(ValueError): + self.mom_database_wrapper.get_storagemanager(1234) + + + @unittest.skip("Skipping integration test") class IntegrationTestMoMDatabaseWrapper(unittest.TestCase): database_credentials = Credentials() @@ -2562,6 +2592,22 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase): used_t, max_t = self.mom_database_wrapper.get_trigger_quota(self.project_name) self.assertEqual(used_t, 2) + def test_get_storagemanager_returns_correct_value_from_db(self): + value = "d.y.s.c.o." + self.execute( + "insert into mom2object values(1, NULL, NULL, 2, 'LOFAR_OBSERVATION', '%(project_name)s', 'test-lofar', " + "NULL, 1704653, NULL, NULL, 0, 0, 0)" % {"project_name": self.project_name}) + # id, mom2objectid, observation_id, instrument, user_specification_id, system_specification_id, default_template, tbb_template, tbb_piggyback_allowed, parset, nr_output_correlated, nr_output_beamformed, nr_output_coherent_stokes, nr_output_incoherent_stokes, nr_output_flyseye, nr_output_correlated_valid, nr_output_beamformed_valid, nr_output_coherent_stokes_valid, nr_output_incoherent_stokes_valid, nr_output_flyseye_valid, feedback, aartfaac_piggyback_allowed, storage_cluster_id, processing_cluster_id, nico_testing + self.execute("insert into lofar_observation values(83, 1, NULL, 'Interferometer', 47, 48, NULL, NULL, 0," + " NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL)") + # id, type, correlated_data, filtered_data, beamformed_data, coherent_stokes_data, incoherent_stokes_data, antenna, clock_mode, instrument_filter, integration_interval, channels_per_subband, cn_integration_steps, pencilbeams_flyseye, pencilbeams_nr_pencil_rings, pencilbeams_ring_size, stokes_selection, stokes_integrate_channels, stokes_integration_steps, station_set, timeframe, starttime, endtime, spec_duration, coherent_dedisperse_channels, dispersion_measure, subbands_per_file_cs, subbands_per_file_bf, collapsed_channels_cs, collapsed_channels_is, downsampling_steps_cs, downsampling_steps_is, which_cs, which_is, bypass_pff, enable_superterp, flyseye, tab_nr_rings, tab_ring_size, bits_per_sample, misc + self.execute("insert into lofar_observation_specification values(47, 'USER', 1, 0, 0, 0, 0, 'HBA Dual', " + "'160 MHz', '170-230 MHz', 1, NULL, NULL, 0, NULL, NULL, NULL, 0, NULL, 'Custom', NULL, NULL, " + "NULL, NULL, 0, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, 0, 0, NULL, NULL, " + "16, '{\"storagemanager\":\"%s\"')" % value) + + result = self.mom_database_wrapper.get_storagemanager(2) + self.assertEqual(result, value) if __name__ == "__main__": logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) diff --git a/SAS/OTDB/test/CMakeLists.txt b/SAS/OTDB/test/CMakeLists.txt index b308bfd5c8e431685b78cde28c0859fe81a299b8..956188ab070120150d48052313bbfb11c52dd1de 100644 --- a/SAS/OTDB/test/CMakeLists.txt +++ b/SAS/OTDB/test/CMakeLists.txt @@ -3,6 +3,7 @@ include(LofarCTest) lofar_add_test(t_getTreeGroup) +lofar_add_executable(test_db_consistency test_db_consistency.cc) lofar_add_test(tCampaign tCampaign.cc) lofar_add_test(tPICtree tPICtree.cc) lofar_add_test(tPICvalue tPICvalue.cc) diff --git a/SAS/OTDB/test/test_db_consistency.cc b/SAS/OTDB/test/test_db_consistency.cc new file mode 100644 index 0000000000000000000000000000000000000000..9c3834420baa38d74e09eeb9b82465d75f560f92 --- /dev/null +++ b/SAS/OTDB/test/test_db_consistency.cc @@ -0,0 +1,173 @@ +/** + * Copyright (C) 2018 + * ASTRON (Netherlands Foundation for Research in Astronomy) + * P.O.Box 2, 7990 AA Dwingeloo, The Netherlands, softwaresupport@astron.nl + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + * + * $Id$ + */ + + +//# Always #include <lofar_config.h> first! +#include <lofar_config.h> + +#include <iostream> +#include <unistd.h> +#include <cstring> +#include <boost/date_time.hpp> +#include <Common/StringUtil.h> +#include <OTDB/OTDBconnection.h> +#include <libgen.h> + + +void help(const std::string& programName) +{ + std::cout << "Usage: " + << programName + << " -dDATABASE -hHOSTNAME -uUSERNAME -pPASSWORD\n\n" + "Query the OTB DB for the VIC treelist. This can help to spot " + "inconsistencies in the DB. Especially when some entry in the DB " + "contains an illegal value for a time-stamp.\n\n"; + exit(1); + +} + +void showTreeList(const std::vector< LOFAR::OTDB::OTDBtree >& trees) +{ + const std::size_t size(trees.size()); + std::cout << size << " records\n"; + + std::cout + << "treeID|Classif|Creator |Creationdate |Type|Campaign|Starttime |ModiDate\n" + << "------+-------+----------+--------------------+----+--------+--------------------+--------------------\n"; + for(std::size_t i(0); i < size; ++i) + { + std::cout << LOFAR::formatString( + "%6d|%7d|%-10.10s|%-20.20s|%4d|%-8.8s|%-20.20s|%s", + trees[i].treeID(), trees[i].classification, + trees[i].creator.c_str(), + boost::posix_time::to_simple_string( + trees[i].creationDate).c_str(), + trees[i].type, + trees[i].campaign.c_str(), + boost::posix_time::to_simple_string( + trees[i].starttime).c_str(), + boost::posix_time::to_simple_string( + trees[i].modificationDate).c_str()) + << "\n"; + } +} + + +int main(int argc, char* argv[]) +{ + const std::string programName(basename(argv[0])); + if(argc != 5) + { + help(programName); + } + + std::string dbName; + std::string hostName; + std::string pw; + std::string user; + int opt(0); + while((opt = getopt(argc, argv, "d:h:p:u:")) != -1) + { + switch(opt) + { + case 'd': + dbName = optarg; + break; + + case 'h': + hostName = optarg; + break; + + case 'p': + pw = optarg; + break; + + case 'u': + user = optarg; + break; + + default: + { + help(programName); + } + break; + } + } + + std::cout << "Using database " + << dbName + << " on host " + << hostName + << "\n"; + + // Open the database connection + LOFAR::OTDB::OTDBconnection conn(user, pw, dbName, hostName); + + try + { + std::cout << conn << "\n" << "Trying to connect to the database\n"; + if(!conn.connect()) + { + std::cout << "Connnection failed!\n"; + return (2); + } + else if(!conn.isConnected()) + { + std::cout << "Connnection flag failed!\n"; + return (3); + } + else + { + std::cout << "Connection succesful: " + << conn + << "\n" + << "Executing \"getTreeList(30,0)\"...\n"; + } + + std::vector< LOFAR::OTDB::OTDBtree > treeList(conn.getTreeList(30, 0)); + const std::size_t size(treeList.size()); + if(size > 0) + { + std::cout << "Received " + << size + << " rows from the OTB DB.\n"; + showTreeList(treeList); + } + else + { + std::cout << "Received no rows from the OTB DB!\n" + << conn.errorMsg() + << "\n"; + } + } + catch(std::exception& ex) + { + std::cout << "Unexpected exception: " + << ex.what() + << "\nErrormsg: " + << conn.errorMsg() + << "\n"; + return (4); + } + + return (0); +} diff --git a/SAS/OTDB_Services/test/CMakeLists.txt b/SAS/OTDB_Services/test/CMakeLists.txt index 513a09629072e60ae5465fda6babbf58318636b9..3336d559d7ea6d35fecbd32f9cf4d81c9301bdc7 100644 --- a/SAS/OTDB_Services/test/CMakeLists.txt +++ b/SAS/OTDB_Services/test/CMakeLists.txt @@ -8,12 +8,13 @@ set(_qpid_tests t_TreeService t_TreeStatusEvents) -if(HAVE_QPID) +execute_process(COMMAND qpid-config RESULT_VARIABLE QPID_CONFIG_RESULT OUTPUT_QUIET ERROR_QUIET) + +if(${QPID_CONFIG_RESULT} EQUAL 0) foreach(_test ${_qpid_tests}) lofar_add_test(${_test}) endforeach() else() lofar_join_arguments(_qpid_tests) - message(WARNING "Qpid is not set." - "The following tests will not be run: ${_qpid_tests}") + message(WARNING "No running qpid daemon found. The following tests will not be run: ${_qpid_tests}") endif() diff --git a/SAS/QPIDInfrastructure/bin/populateDB.sh b/SAS/QPIDInfrastructure/bin/populateDB.sh index 8b3daa6410c58ee7926101bc0dd5ba18bc9424e9..f56d103e1ca47d2b12cdd1efe7d2d37c838d0286 100755 --- a/SAS/QPIDInfrastructure/bin/populateDB.sh +++ b/SAS/QPIDInfrastructure/bin/populateDB.sh @@ -219,13 +219,29 @@ addtoQPIDDB.py --broker $SCU --exchange ${PREFIX}lofar.mac.notification addtoQPIDDB.py --broker $SCU --queue mom.importxml --federation $MOM_SYSTEM # ----------------------------------------- -# Ingest -> ResourceAssignment +# Ingest -> SCU # ----------------------------------------- addtoQPIDDB.py --broker $LEXAR --exchange ${PREFIX}lofar.lta.ingest.notification --federation $SCU + +# ----------------------------------------- +# Ingest -> ResourceAssignment @ SCU +# ----------------------------------------- + addtoQPIDDB.py --broker $SCU --queue ${PREFIX}lofar.lta.ingest.notification.autocleanupservice addtoQPIDDB.py --broker $SCU --bind --exchange ${PREFIX}lofar.lta.ingest.notification --queue ${PREFIX}lofar.lta.ingest.notification.autocleanupservice --routingkey LTAIngest.# +# ----------------------------------------- +# Ingest -> LTA-storage-overview @ SCU +# ----------------------------------------- + +addtoQPIDDB.py --broker $SCU --queue ${PREFIX}lofar.lta.ingest.notification.for.ltastorageoverview +addtoQPIDDB.py --broker $SCU --bind --exchange ${PREFIX}lofar.lta.ingest.notification --queue ${PREFIX}lofar.lta.ingest.notification.for.ltastorageoverview --routingkey LTAIngest.# + + +# ----------------------------------------- +# CEP4 cpu nodes +# ----------------------------------------- for head in head01.cep4.control.lofar do diff --git a/SAS/ResourceAssignment/Common/CMakeLists.txt b/SAS/ResourceAssignment/Common/CMakeLists.txt index b6fd18fadafe49ea3687574c6f4541bfe2503267..7b3ba62ad2e7440917b29732cb594d0d79bf7d35 100644 --- a/SAS/ResourceAssignment/Common/CMakeLists.txt +++ b/SAS/ResourceAssignment/Common/CMakeLists.txt @@ -1,6 +1,6 @@ # $Id: CMakeLists.txt 30355 2014-11-04 13:46:05Z loose $ -lofar_package(RACommon 0.1 DEPENDS pyparameterset MoMQueryService ResourceAssignmentService ResourceAssigner) +lofar_package(RACommon 0.1 DEPENDS pyparameterset MoMQueryService ResourceAssignmentService ) include(PythonInstall) set(USE_PYTHON_COMPILATION Off) diff --git a/SAS/ResourceAssignment/Common/lib/specification.py b/SAS/ResourceAssignment/Common/lib/specification.py index 7f33265a2886fe01ce7fa6aaac490e7adf450458..611a04a1062822f00afe15ebc34e91cadb281d7c 100644 --- a/SAS/ResourceAssignment/Common/lib/specification.py +++ b/SAS/ResourceAssignment/Common/lib/specification.py @@ -22,6 +22,10 @@ """ This is a class to manage and manipulate Task specifications and sync them between OTDB, MoM and RADB. +This should probably be refactored further into a Task class, Estimates, Specification and ResouceClaims classes owned +by/properties of the task. +The current class is a mix of backward compatible dicts in internal_dict, specfication info and methods, and Task +properties and methods. """ from lofar.parameterset import parameterset @@ -76,6 +80,7 @@ class Specification: self.type = None # Task type in RADB self.subtype = None # Task type in RADB self.status = None # Task status, as used in OTDB/MoM. + self.storagemanager = None #Inputs for the scheduler self.min_starttime = None @@ -93,27 +98,36 @@ class Specification: @staticmethod def parse_timedelta(input_value): - if input_value == u"None": - return None + ''' + translates int/float input to a timedelate. None input (and 'None' strings) will be translated to timedelta(0) + ''' + if input_value is None: + return timedelta(0) + elif input_value == u"None": + return timedelta(0) elif input_value == "None": - return None + return timedelta(0) elif isinstance(input_value, IntType): return timedelta(seconds=input_value) elif isinstance(input_value, FloatType): return timedelta(seconds=input_value) else: - return input_value + return input_value # todo: maybe raise an Exception instead? @staticmethod def parse_datetime(input_value): + ''' + translates a datetime string to a datetime object, 'None' strings will be translates to actual None. + ''' if input_value == u"None": + # todo: should we translate to a reasonable default datetuime like with timedelta? return None elif input_value == "None": return None elif isinstance(input_value, StringTypes): return parseDatetime(input_value) else: - return input_value + return input_value # todo: maybe raise an Exception instead? def as_dict(self): """"Mostly a serialization function to make a qpid message and for backward compatibility with old functions. @@ -143,6 +157,8 @@ class Specification: for p in self.predecessors: result["predecessors"].append(p.as_dict()) result["successors"] = self.successor_ids + result["storagemanager"] = self.storagemanager + result["specification"]["storagemanager"] = self.storagemanager # To have the ResourceEstimator find it return result def from_dict(self, input_dict): @@ -172,6 +188,7 @@ class Specification: spec.from_dict(p) self.predecessors.append(spec) self.successor_ids = input_dict["successors"] + self.storagemanager = input_dict["storagemanager"] def isObservation(self): """:return if the Specification is for an observation.""" @@ -201,32 +218,69 @@ class Specification: # ========================= MoM related methods ======================================================================= + def read_time_restrictions_from_mom(self): + """ + Read the time restrictions from mom and, if present, write values to corresponding instance variables + """ + try: + time_restrictions = self.momquery.get_trigger_time_restrictions(self.mom_id) + self.logger.info("Received time_restrictions from MoM: %s", time_restrictions) + if time_restrictions: + if "minStartTime" in time_restrictions: + self.min_starttime = Specification.parse_datetime(time_restrictions["minStartTime"]) + if "maxDuration" in time_restrictions: + self.max_duration = Specification.parse_timedelta(time_restrictions["maxDuration"]) + if "minDuration" in time_restrictions: + self.min_duration = Specification.parse_timedelta(time_restrictions["minDuration"]) + if "maxEndTime" in time_restrictions: + self.max_endtime = Specification.parse_datetime(time_restrictions["maxEndTime"]) + # todo: why is the trigger_id part of this response? This is a time restrictions query. + # todo: We should at least call a generic query 'get_misc', but it's probably better to have + # todo: specific queries (there is a get_trigger_id) to make the transition easier when this + # todo: works against a service where these things are implemented properly. + if "trigger_id" in time_restrictions: + self.trigger_id = time_restrictions["trigger_id"] + self.logger.info('Found a task mom_id=%s with a trigger_id=%s', self.mom_id, self.trigger_id) + + except Exception as e: + self.logger.exception("read_time_restrictions_from_mom: " + str(e), exc_info=True) + self.set_status("error") + + def read_storagemanager_from_mom(self): + """ + Read the storagemanager from mom and, if present, write the value to the corresponding instance variable + """ + try: + # set storagemanager from misc + storagemanager = self.momquery.get_storagemanager(self.mom_id) + if storagemanager: + self.storagemanager = storagemanager + self.logger.info("Found a task mom_id=%s with storagemanager=%s from MoM", + self.mom_id, self.storagemanager) + except KeyError as ke: + # set default + # self.logger.exception("read_storagemanager_from_mom: " + str(ke), exc_info=False) + self.logger.info("Storagemanager not found in MoM") + # If the key exists in the VIC tree in OTDB, we use that instead if read_from_otdb has been called. + + except Exception as e: + # unexpected error (no entry for momid) + self.logger.exception("read_storagemanager_from_mom: " + str(e), exc_info=True) + self.set_status("error") + + def read_from_mom(self): """"Read specification values from the MoM database, mostly the misc field time restrictions Tries to set min_starttime, max_endtime, min_duration, max_duration, if the Specification has a mom_id - example input: + + Please be aware of potential race conditions if the mom-otdb-adapter hasn't updated MoM yet after changes in + OTDB. Don't read values from MoM that originate or might have more recent values in OTDB. """ if self.mom_id: # We might need more than the misc field in the future. # Right now we assume start/end times from OTDB always have priority for example. - try: - misc = self.momquery.get_trigger_time_restrictions(self.mom_id) - self.logger.info("Received misc field from MoM: %s", misc) - if misc: - if "minStartTime" in misc: - self.min_starttime = Specification.parse_datetime(misc["minStartTime"]) - if "minDuration" in misc: - self.max_duration = Specification.parse_timedelta(misc["maxDuration"]) - if "maxDuration" in misc: - self.min_duration = Specification.parse_timedelta(misc["minDuration"]) - if "maxEndTime" in misc: - self.max_endtime = Specification.parse_datetime(misc["maxEndTime"]) - if "trigger_id" in misc: - self.trigger_id = misc["trigger_id"] - self.logger.info('Found a task mom_id=%s with a trigger_id=%s', self.mom_id, self.trigger_id) - except Exception as e: - self.logger.exception("read_from_mom: " + str(e), exc_info=True) - self.set_status("error") + self.read_time_restrictions_from_mom() + self.read_storagemanager_from_mom() else: self.logger.info("This task does not have a mom_id.") @@ -261,6 +315,7 @@ class Specification: @staticmethod def _resourceIndicatorsFromParset(radb_type, radb_subtype, parset, PARSET_PREFIX): """ Extract the parset keys that are required for resource assignment. + Mostly gets used in the ResourceEstimator. The internal_dict should probably be refactored out at some point. :param radb_type: task type in RADB format :param radb_subtype: task subtype in RADB format @@ -376,8 +431,13 @@ class Specification: add("Observation.DataProducts.Input_Correlated.identifications", as_strvector) add("Observation.DataProducts.Input_InstrumentModel.enabled", as_bool) add("Observation.DataProducts.Input_InstrumentModel.identifications", as_strvector) + # NOTE: currently these are the only pipelines that use these DPPP keys + # Other pipelines are modelled to possibly do these steps as well, but currently no Default Templates exist add("Observation.ObservationControl.PythonControl.DPPP.demixer.freqstep", as_int) add("Observation.ObservationControl.PythonControl.DPPP.demixer.timestep", as_int) + # Note: hould not actually be used in the ResourceAssinger as the value is stored in + # Specification.storagemanager using get_storagemanager_from_parset + #add("Observation.ObservationControl.PythonControl.DPPP.storagemanager.name") if radb_type == "pipeline" and radb_subtype in ["imaging pipeline", "imaging pipeline msss"]: # Imaging pipeline @@ -388,6 +448,9 @@ class Specification: add("Observation.DataProducts.Input_Correlated.identifications", as_strvector) add("Observation.ObservationControl.PythonControl.Imaging.slices_per_image") add("Observation.ObservationControl.PythonControl.Imaging.subbands_per_image") + # Note: hould not actually be used in the ResourceAssinger as the value is stored in + # Specification.storagemanager using get_storagemanager_from_parset + #add("Observation.ObservationControl.PythonControl.DPPP.storagemanager.name") if radb_type == "pipeline" and radb_subtype == "long baseline pipeline": # Long-baseline pipeline @@ -398,6 +461,9 @@ class Specification: add("Observation.DataProducts.Input_Correlated.identifications", as_strvector) add("Observation.ObservationControl.PythonControl.LongBaseline.subbandgroups_per_ms", as_int) add("Observation.ObservationControl.PythonControl.LongBaseline.subbands_per_subbandgroup", as_int) + # Note: hould not actually be used in the ResourceAssinger as the value is stored in + # Specification.storagemanager using get_storagemanager_from_parset + #add("Observation.ObservationControl.PythonControl.DPPP.storagemanager.name") if radb_type == "pipeline" and radb_subtype == "pulsar pipeline": # Pulsar pipeline @@ -463,8 +529,8 @@ class Specification: :return: mom_id as int if found, otherwise None """ - #FIXME: Race condition when asking MoM as the mom-otdb-adapter might not have heard that the - # task is on approved and might still be on approved pending in MoM. + # NOTE: Implemented this way to avoid race condition when asking MoM as the mom-otdb-adapter might + # not have heard that the task is on approved and might still be on approved pending in MoM. # mom_ids = self.momquery.getMoMIdsForOTDBIds([otdb_id]) # So we get the parset for all tasks we receive and try to find a mom_id in there. try: @@ -508,8 +574,7 @@ class Specification: @staticmethod def _get_duration_from_parset(parset, PARSET_PREFIX): """ - Preferably use the duration specified by the parset. If that's not available, calculate the duration from - the difference between start/end times. If that's also impossible, fall back to a default duration + Preferably use the duration specified by the parset. If that's not available return None :param parset: parameterset :param PARSET_PREFIX: Fixed prefix that's to be added to the used OTDB keys @@ -523,6 +588,25 @@ class Specification: duration = timedelta(0) return duration + @staticmethod + def _get_storagemanager_from_parset(parset, PARSET_PREFIX): + """ + Preferably use the storagemanger specified by the parset. If that's not available return None. + + :param parset: parameterset + :param PARSET_PREFIX: Fixed prefix that's to be added to the used OTDB keys + + :returns the obtained storagemanger or None + """ + + try: + storagemanger = parset.getString(PARSET_PREFIX + 'Observation.ObservationControl.PythonControl.DPPP.storagemanager.name') + # TODO: At some point the value should probably be validated but no validation implemented as there + # is no good source of valid values. Things are scattered in XML gen and OTDB and such. + except (ValueError, KeyError, RuntimeError): #TODO RuntimeError copied from get_duration_from_parset? + storagemanger = None + return storagemanger + def _get_parset_from_OTDB(self): """Obtain parset based on self.otdb_id and convert dict to parameterset object @@ -552,6 +636,8 @@ class Specification: self.duration = Specification._get_duration_from_parset(parset, INPUT_PREFIX) self.mom_id = self._get_mom_id_from_parset(parset, INPUT_PREFIX) self.cluster = self.get_cluster_name(parset, INPUT_PREFIX) + #Gets a default storagemanager from OTDB, possibly overridden in a later call to read_from_mom() + self.storagemanager = self._get_storagemanager_from_parset(parset, INPUT_PREFIX) predecessor_ids = self.get_predecessor_ids_from_parset(parset, INPUT_PREFIX) except Exception as e: self.logger.exception(e) diff --git a/SAS/ResourceAssignment/Common/test/test_specification.in_datasets/tSpecification.in_averaging_pipeline b/SAS/ResourceAssignment/Common/test/test_specification.in_datasets/tSpecification.in_averaging_pipeline index 8f8e9865ccb8c5209b059ab941909844a84d2453..9572868fbb47382ac441d871776d6af809041898 100644 --- a/SAS/ResourceAssignment/Common/test/test_specification.in_datasets/tSpecification.in_averaging_pipeline +++ b/SAS/ResourceAssignment/Common/test/test_specification.in_datasets/tSpecification.in_averaging_pipeline @@ -3,6 +3,4 @@ ObsSW.Observation.momID = 732490 ObsSW.Observation.processType = Pipeline ObsSW.Observation.otdbID = 559779 ObsSW.Observation.processSubtype = Averaging Pipeline -ObsSW.Observation.strategy = averaging pipeline - - +ObsSW.Observation.strategy = averaging pipeline \ No newline at end of file diff --git a/SAS/ResourceAssignment/Common/test/test_specification.in_datasets/tSpecification.in_calibration_pipeline b/SAS/ResourceAssignment/Common/test/test_specification.in_datasets/tSpecification.in_calibration_pipeline index f0a9718ed14c497909e2889e7dc49c970ed16d1c..72fb4629e6bc5b1a7fb73f9397ca15f531ea439d 100644 --- a/SAS/ResourceAssignment/Common/test/test_specification.in_datasets/tSpecification.in_calibration_pipeline +++ b/SAS/ResourceAssignment/Common/test/test_specification.in_datasets/tSpecification.in_calibration_pipeline @@ -389,4 +389,4 @@ ObsSW.Observation.Scheduler.lastPossibleDay = 0 ObsSW.Observation.ObservationControl.PythonControl.BBS.BBDB.Port = 5432 ObsSW.Observation.ObservationControl.PythonControl.BBS.Step.DefaultBBSStep[1].Model.Flagger.Enable = F ObsSW.Observation.Scheduler.nightTimeWeightFactor = 0 - +ObsSW.Observation.ObservationControl.PythonControl.DPPP.storagemanager.name = diff --git a/SAS/ResourceAssignment/Common/test/test_specification.in_datasets/tSpecification.in_preprocessing b/SAS/ResourceAssignment/Common/test/test_specification.in_datasets/tSpecification.in_preprocessing index 98851537e10870e46e9c0981592a68278a486b8d..d894914ae59b9b8d24727b27f934399b0e7dff61 100644 --- a/SAS/ResourceAssignment/Common/test/test_specification.in_datasets/tSpecification.in_preprocessing +++ b/SAS/ResourceAssignment/Common/test/test_specification.in_datasets/tSpecification.in_preprocessing @@ -300,4 +300,4 @@ ObsSW.Observation.Campaign.title = "The LOFAR Two-metre Sky Survey: Opening up a ObsSW.Observation.DataProducts.Output_InstrumentModel.retentiontime = 14 ObsSW.Observation.Scheduler.nightTimeWeightFactor = 0 ObsSW.Observation.DataProducts.Input_Correlated.filenames = [L562059_SAP002_SB244_uv.MS,L562059_SAP002_SB245_uv.MS,L562059_SAP002_SB246_uv.MS,L562059_SAP002_SB247_uv.MS,L562059_SAP002_SB248_uv.MS,L562059_SAP002_SB249_uv.MS,L562059_SAP002_SB250_uv.MS,L562059_SAP002_SB251_uv.MS,L562059_SAP002_SB252_uv.MS,L562059_SAP002_SB253_uv.MS,L562059_SAP002_SB254_uv.MS,L562059_SAP002_SB255_uv.MS,L562059_SAP002_SB256_uv.MS,L562059_SAP002_SB257_uv.MS,L562059_SAP002_SB258_uv.MS,L562059_SAP002_SB259_uv.MS,L562059_SAP002_SB260_uv.MS,L562059_SAP002_SB261_uv.MS,L562059_SAP002_SB262_uv.MS,L562059_SAP002_SB263_uv.MS,L562059_SAP002_SB264_uv.MS,L562059_SAP002_SB265_uv.MS,L562059_SAP002_SB266_uv.MS,L562059_SAP002_SB267_uv.MS,L562059_SAP002_SB268_uv.MS,L562059_SAP002_SB269_uv.MS,L562059_SAP002_SB270_uv.MS,L562059_SAP002_SB271_uv.MS,L562059_SAP002_SB272_uv.MS,L562059_SAP002_SB273_uv.MS,L562059_SAP002_SB274_uv.MS,L562059_SAP002_SB275_uv.MS,L562059_SAP002_SB276_uv.MS,L562059_SAP002_SB277_uv.MS,L562059_SAP002_SB278_uv.MS,L562059_SAP002_SB279_uv.MS,L562059_SAP002_SB280_uv.MS,L562059_SAP002_SB281_uv.MS,L562059_SAP002_SB282_uv.MS,L562059_SAP002_SB283_uv.MS,L562059_SAP002_SB284_uv.MS,L562059_SAP002_SB285_uv.MS,L562059_SAP002_SB286_uv.MS,L562059_SAP002_SB287_uv.MS,L562059_SAP002_SB288_uv.MS,L562059_SAP002_SB289_uv.MS,L562059_SAP002_SB290_uv.MS,L562059_SAP002_SB291_uv.MS,L562059_SAP002_SB292_uv.MS,L562059_SAP002_SB293_uv.MS,L562059_SAP002_SB294_uv.MS,L562059_SAP002_SB295_uv.MS,L562059_SAP002_SB296_uv.MS,L562059_SAP002_SB297_uv.MS,L562059_SAP002_SB298_uv.MS,L562059_SAP002_SB299_uv.MS,L562059_SAP002_SB300_uv.MS,L562059_SAP002_SB301_uv.MS,L562059_SAP002_SB302_uv.MS,L562059_SAP002_SB303_uv.MS,L562059_SAP002_SB304_uv.MS,L562059_SAP002_SB305_uv.MS,L562059_SAP002_SB306_uv.MS,L562059_SAP002_SB307_uv.MS,L562059_SAP002_SB308_uv.MS,L562059_SAP002_SB309_uv.MS,L562059_SAP002_SB310_uv.MS,L562059_SAP002_SB311_uv.MS,L562059_SAP002_SB312_uv.MS,L562059_SAP002_SB313_uv.MS,L562059_SAP002_SB314_uv.MS,L562059_SAP002_SB315_uv.MS,L562059_SAP002_SB316_uv.MS,L562059_SAP002_SB317_uv.MS,L562059_SAP002_SB318_uv.MS,L562059_SAP002_SB319_uv.MS,L562059_SAP002_SB320_uv.MS,L562059_SAP002_SB321_uv.MS,L562059_SAP002_SB322_uv.MS,L562059_SAP002_SB323_uv.MS,L562059_SAP002_SB324_uv.MS,L562059_SAP002_SB325_uv.MS,L562059_SAP002_SB326_uv.MS,L562059_SAP002_SB327_uv.MS,L562059_SAP002_SB328_uv.MS,L562059_SAP002_SB329_uv.MS,L562059_SAP002_SB330_uv.MS,L562059_SAP002_SB331_uv.MS,L562059_SAP002_SB332_uv.MS,L562059_SAP002_SB333_uv.MS,L562059_SAP002_SB334_uv.MS,L562059_SAP002_SB335_uv.MS,L562059_SAP002_SB336_uv.MS,L562059_SAP002_SB337_uv.MS,L562059_SAP002_SB338_uv.MS,L562059_SAP002_SB339_uv.MS,L562059_SAP002_SB340_uv.MS,L562059_SAP002_SB341_uv.MS,L562059_SAP002_SB342_uv.MS,L562059_SAP002_SB343_uv.MS,L562059_SAP002_SB344_uv.MS,L562059_SAP002_SB345_uv.MS,L562059_SAP002_SB346_uv.MS,L562059_SAP002_SB347_uv.MS,L562059_SAP002_SB348_uv.MS,L562059_SAP002_SB349_uv.MS,L562059_SAP002_SB350_uv.MS,L562059_SAP002_SB351_uv.MS,L562059_SAP002_SB352_uv.MS,L562059_SAP002_SB353_uv.MS,L562059_SAP002_SB354_uv.MS,L562059_SAP002_SB355_uv.MS,L562059_SAP002_SB356_uv.MS,L562059_SAP002_SB357_uv.MS,L562059_SAP002_SB358_uv.MS,L562059_SAP002_SB359_uv.MS,L562059_SAP002_SB360_uv.MS,L562059_SAP002_SB361_uv.MS,L562059_SAP002_SB362_uv.MS,L562059_SAP002_SB363_uv.MS,L562059_SAP002_SB364_uv.MS,L562059_SAP002_SB365_uv.MS,L562059_SAP002_SB366_uv.MS,L562059_SAP002_SB367_uv.MS,L562059_SAP002_SB368_uv.MS,L562059_SAP002_SB369_uv.MS,L562059_SAP002_SB370_uv.MS,L562059_SAP002_SB371_uv.MS,L562059_SAP002_SB372_uv.MS,L562059_SAP002_SB373_uv.MS,L562059_SAP002_SB374_uv.MS,L562059_SAP002_SB375_uv.MS,L562059_SAP002_SB376_uv.MS,L562059_SAP002_SB377_uv.MS,L562059_SAP002_SB378_uv.MS,L562059_SAP002_SB379_uv.MS,L562059_SAP002_SB380_uv.MS,L562059_SAP002_SB381_uv.MS,L562059_SAP002_SB382_uv.MS,L562059_SAP002_SB383_uv.MS,L562059_SAP002_SB384_uv.MS,L562059_SAP002_SB385_uv.MS,L562059_SAP002_SB386_uv.MS,L562059_SAP002_SB387_uv.MS,L562059_SAP002_SB388_uv.MS,L562059_SAP002_SB389_uv.MS,L562059_SAP002_SB390_uv.MS,L562059_SAP002_SB391_uv.MS,L562059_SAP002_SB392_uv.MS,L562059_SAP002_SB393_uv.MS,L562059_SAP002_SB394_uv.MS,L562059_SAP002_SB395_uv.MS,L562059_SAP002_SB396_uv.MS,L562059_SAP002_SB397_uv.MS,L562059_SAP002_SB398_uv.MS,L562059_SAP002_SB399_uv.MS,L562059_SAP002_SB400_uv.MS,L562059_SAP002_SB401_uv.MS,L562059_SAP002_SB402_uv.MS,L562059_SAP002_SB403_uv.MS,L562059_SAP002_SB404_uv.MS,L562059_SAP002_SB405_uv.MS,L562059_SAP002_SB406_uv.MS,L562059_SAP002_SB407_uv.MS,L562059_SAP002_SB408_uv.MS,L562059_SAP002_SB409_uv.MS,L562059_SAP002_SB410_uv.MS,L562059_SAP002_SB411_uv.MS,L562059_SAP002_SB412_uv.MS,L562059_SAP002_SB413_uv.MS,L562059_SAP002_SB414_uv.MS,L562059_SAP002_SB415_uv.MS,L562059_SAP002_SB416_uv.MS,L562059_SAP002_SB417_uv.MS,L562059_SAP002_SB418_uv.MS,L562059_SAP002_SB419_uv.MS,L562059_SAP002_SB420_uv.MS,L562059_SAP002_SB421_uv.MS,L562059_SAP002_SB422_uv.MS,L562059_SAP002_SB423_uv.MS,L562059_SAP002_SB424_uv.MS,L562059_SAP002_SB425_uv.MS,L562059_SAP002_SB426_uv.MS,L562059_SAP002_SB427_uv.MS,L562059_SAP002_SB428_uv.MS,L562059_SAP002_SB429_uv.MS,L562059_SAP002_SB430_uv.MS,L562059_SAP002_SB431_uv.MS,L562059_SAP002_SB432_uv.MS,L562059_SAP002_SB433_uv.MS,L562059_SAP002_SB434_uv.MS,L562059_SAP002_SB435_uv.MS,L562059_SAP002_SB436_uv.MS,L562059_SAP002_SB437_uv.MS,L562059_SAP002_SB438_uv.MS,L562059_SAP002_SB439_uv.MS,L562059_SAP002_SB440_uv.MS,L562059_SAP002_SB441_uv.MS,L562059_SAP002_SB442_uv.MS,L562059_SAP002_SB443_uv.MS,L562059_SAP002_SB444_uv.MS,L562059_SAP002_SB445_uv.MS,L562059_SAP002_SB446_uv.MS,L562059_SAP002_SB447_uv.MS,L562059_SAP002_SB448_uv.MS,L562059_SAP002_SB449_uv.MS,L562059_SAP002_SB450_uv.MS,L562059_SAP002_SB451_uv.MS,L562059_SAP002_SB452_uv.MS,L562059_SAP002_SB453_uv.MS,L562059_SAP002_SB454_uv.MS,L562059_SAP002_SB455_uv.MS,L562059_SAP002_SB456_uv.MS,L562059_SAP002_SB457_uv.MS,L562059_SAP002_SB458_uv.MS,L562059_SAP002_SB459_uv.MS,L562059_SAP002_SB460_uv.MS,L562059_SAP002_SB461_uv.MS,L562059_SAP002_SB462_uv.MS,L562059_SAP002_SB463_uv.MS,L562059_SAP002_SB464_uv.MS,L562059_SAP002_SB465_uv.MS,L562059_SAP002_SB466_uv.MS,L562059_SAP002_SB467_uv.MS,L562059_SAP002_SB468_uv.MS,L562059_SAP002_SB469_uv.MS,L562059_SAP002_SB470_uv.MS,L562059_SAP002_SB471_uv.MS,L562059_SAP002_SB472_uv.MS,L562059_SAP002_SB473_uv.MS,L562059_SAP002_SB474_uv.MS,L562059_SAP002_SB475_uv.MS,L562059_SAP002_SB476_uv.MS,L562059_SAP002_SB477_uv.MS,L562059_SAP002_SB478_uv.MS,L562059_SAP002_SB479_uv.MS,L562059_SAP002_SB480_uv.MS,L562059_SAP002_SB481_uv.MS,L562059_SAP002_SB482_uv.MS,L562059_SAP002_SB483_uv.MS,L562059_SAP002_SB484_uv.MS,L562059_SAP002_SB485_uv.MS,L562059_SAP002_SB486_uv.MS] - +ObsSW.Observation.ObservationControl.PythonControl.DPPP.storagemanager.name = dysco diff --git a/SAS/ResourceAssignment/Common/test/test_specification.py b/SAS/ResourceAssignment/Common/test/test_specification.py index e4c8ed7e02ec0db7bf457a7aa231404521d6ce3a..c9d7f826f73a841724c912f729897a7e92a3da03 100755 --- a/SAS/ResourceAssignment/Common/test/test_specification.py +++ b/SAS/ResourceAssignment/Common/test/test_specification.py @@ -48,7 +48,7 @@ class General(unittest.TestCase): # ---------------------------------------------------------------------------------------------- # Tests of functions to read values from MoM - def test_read_from_mom_misc(self): + def test_read_from_mom_with_misc_info(self): """ Verify that get_specification properly generates an RA parset subset for a preprocessing pipeline parset """ # Arrange @@ -56,12 +56,12 @@ class General(unittest.TestCase): max_end_time = datetime(2017, 10, 3, 22, 43, 12) min_duration = timedelta(seconds=200) max_duration = timedelta(seconds=3600) - self.momrpc_mock.get_trigger_time_restrictions.return_value = { - "minStartTime": min_start_time, - "maxEndTime": max_end_time, - "minDuration": min_duration, - "maxDuration": max_duration} - + storagemanager = "dysco" + self.momrpc_mock.get_trigger_time_restrictions.return_value = {"minStartTime": min_start_time, + "maxEndTime": max_end_time, + "minDuration": min_duration, + "maxDuration": max_duration} + self.momrpc_mock.get_storagemanager.return_value = storagemanager self.specification.mom_id = 1 # Act @@ -72,26 +72,61 @@ class General(unittest.TestCase): self.assertEqual(self.specification.max_endtime, max_end_time) self.assertEqual(self.specification.min_duration, min_duration) self.assertEqual(self.specification.max_duration, max_duration) + self.assertEqual(self.specification.storagemanager, storagemanager) - def test_read_from_mom_no_misc(self): + def test_read_from_mom_without_misc_info(self): """ Verify that get_specification properly generates an RA parset subset for a preprocessing pipeline parset """ # Arrange - self.momrpc_mock.get_trigger_time_restrictions.return_value = {"minStartTime", None} + self.momrpc_mock.get_trigger_time_restrictions.return_value = {"minStartTime": None, + "minDuration": None, + "maxDuration": None, + "maxEndTime": None, + "trigger_id": None} + self.momrpc_mock.get_storagemanager.side_effect = KeyError('No "storagemanager" key in misc') self.specification.mom_id = 1 # Act - self.specification.read_from_mom() - - # Assert - self.assertEqual(self.specification.min_starttime, None) - self.assertEqual(self.specification.max_endtime, None) - self.assertEqual(self.specification.min_duration, timedelta(seconds=0)) - self.assertEqual(self.specification.max_duration, timedelta(seconds=0)) + with mock.patch.object(self.specification, 'set_status') as status_mock: + self.specification.read_from_mom() + + # Assert + # Note that whatever was set in specification will be overridden here because momrpc returns defaults + # on an empty misc field + self.assertEqual(self.specification.min_starttime, None) + self.assertEqual(self.specification.max_endtime, None) + self.assertEqual(self.specification.min_duration, timedelta(0)) # None is translated to timedelta(0) + self.assertEqual(self.specification.max_duration, timedelta(0)) # None is translated to timedelta(0) + self.assertEqual(self.specification.storagemanager, None) # default + status_mock.assert_not_called() + + def test_read_from_mom_without_mom_id(self): + """ Verify that get_specification properly generates an RA parset subset for a preprocessing pipeline parset """ + # Arrange + self.momrpc_mock.get_trigger_time_restrictions.return_value = {"minStartTime": None, + "minDuration": None, + "maxDuration": None, + "maxEndTime": None, + "trigger_id": None} + self.momrpc_mock.get_storagemanager.side_effect = ValueError('No match for MoM id') + self.specification.mom_id = 1 - # ---------------------------------------------------------------------------------------------- - # Tests of resourceIndicatorsFromParset one for each type of input parset, might duplicate code - # in RATaskSpecified + # Act + with mock.patch.object(self.specification, 'set_status') as status_mock: + self.specification.read_from_mom() + + # Assert + # Note that whatever was set in specification will be overridden here because momrpc returns defaults on + # and empty misc field + self.assertEqual(self.specification.min_starttime, None) + self.assertEqual(self.specification.max_endtime, None) + self.assertEqual(self.specification.min_duration, timedelta(0)) # None is translated to timedelta(0) + self.assertEqual(self.specification.max_duration, timedelta(0)) # None is translated to timedelta(0) + self.assertEqual(self.specification.storagemanager, None) + status_mock.assert_called_with('error') + + # ------------------------------------------------------------------------------------------------------------------ + # Tests of resourceIndicatorsFromParset one for each type of input parset, might duplicate code in RATaskSpecified def test_preprocessing_pipeline(self): """ Verify that get_specification properly generates an RA parset subset for a preprocessing @@ -131,6 +166,7 @@ class General(unittest.TestCase): result['Observation.ObservationControl.PythonControl.DPPP.demixer.freqstep'], 4) self.assertEqual( result['Observation.ObservationControl.PythonControl.DPPP.demixer.timestep'], 1) + #self.assertEqual(result['Observation.ObservationControl.PythonControl.DPPP.storagemanager.name'], "dysco") def test_beam_observation(self): """ Verify that get_specification properly generates an RA parset subset for a beam @@ -255,6 +291,7 @@ class General(unittest.TestCase): result['Observation.ObservationControl.PythonControl.DPPP.demixer.freqstep'], 4) self.assertEqual( result['Observation.ObservationControl.PythonControl.DPPP.demixer.timestep'], 1) + #self.assertEqual(result['Observation.ObservationControl.PythonControl.DPPP.storagemanager.name'], '') def test_long_baseline_pipeline(self): """ Verify that get_specification properly generates an RA parset subset for a long-baseline @@ -537,6 +574,30 @@ class General(unittest.TestCase): # Assert self.assertEqual(duration, timedelta(0)) + def test_get_storagemanager_from_parset(self): + """ Verify that storagemanager is read properly from parset """ + # Arrange + input_parset_file = os.path.join(self.data_sets_dir, "tSpecification.in_preprocessing") + input_parset = parameterset(input_parset_file) + + # Act + storagemanager = Specification._get_storagemanager_from_parset(input_parset, INPUT_PREFIX) + + # Assert + self.assertEqual(storagemanager, "dysco") + + def test_get_no_storagemanager_from_parset(self): + """ Verify that get_specification properly generates an RA parset subset for a reservation task """ + # Arrange + input_parset_file = os.path.join(self.data_sets_dir, "tSpecification.in_averaging_pipeline") + input_parset = parameterset(input_parset_file) + + # Act + storagemanager = Specification._get_storagemanager_from_parset(input_parset, INPUT_PREFIX) + + # Assert + self.assertEqual(storagemanager, None) + def test_get_parset_from_OTDB(self): """ Verify that _get_parset_from_OTDB gets the partset for a interferometer_observation task """ @@ -587,6 +648,59 @@ class General(unittest.TestCase): self.assertEqual(self.specification.cluster, 'CEP4') self.otdbrpc_mock.taskGetSpecification.assert_any_call(otdb_id=559779) + def test_read_from_otdb_with_get_storagewriter_mocked(self): + """ Verify that _get_parset_from_OTDB gets the partset for a + preprocessing pipeline task if get_storage_writer returns a storagemanager """ + # Arrange + input_parset_file = os.path.join(self.data_sets_dir, "tSpecification.in_preprocessing") + pipeline_specification_tree = parameterset(input_parset_file).dict() + self.otdbrpc_mock.taskGetSpecification.return_value = {'otdb_id': 562063, 'specification': pipeline_specification_tree} + self.radbrpc_mock.getResourceGroupNames.return_value = [{'name': 'CEP4'}] + + with mock.patch.object(self.specification, "_get_storagemanager_from_parset", mock.MagicMock(return_value="dysco")): + + # Act + predecessors = self.specification.read_from_otdb(562063) + + # Assert + #TODO not sure what more to assert here + self.assertEqual(predecessors, [{'source': 'otdb', 'id': 562059}]) + self.assertEqual(self.specification.cluster, 'CEP4') + self.otdbrpc_mock.taskGetSpecification.assert_any_call(otdb_id=562063) + self.specification._get_storagemanager_from_parset.assert_called_once() + # Note: call args are a bit inconvenient to test because specification wraps the dict response in a + # parameterset object internally. So the following is not possible: + # self.specification._get_storagemanager_from_parset.assert_called_with(mocked_parset, 'ObsSW') + (call_parset, call_prefix), kwargs = self.specification._get_storagemanager_from_parset.call_args + self.assertEqual(call_parset.dict(), pipeline_specification_tree) + self.assertEqual(call_prefix, 'ObsSW.') + self.assertEqual(self.specification.storagemanager, "dysco") + + + def test_read_from_otdb_with_storagewriter(self): + """ Verify that _get_parset_from_OTDB gets the partset for a for a + preprocessing pipeline task with a storagemanager defined """ + # Arrange + input_parset_file = os.path.join(self.data_sets_dir, "tSpecification.in_preprocessing") + parset_file = open(input_parset_file) + pipeline_specification_tree = {} + for line in parset_file.readlines(): + if '=' in line: + key, value = line.split('=') + pipeline_specification_tree[key.strip()] = value.strip() + self.otdbrpc_mock.taskGetSpecification.return_value = {'otdb_id': 562063, 'specification': pipeline_specification_tree} + self.radbrpc_mock.getResourceGroupNames.return_value = [{'name': 'CEP4'}] + + # Act + predecessors = self.specification.read_from_otdb(562063) + + # Assert + #TODO not sure what more to assert here + self.assertEqual(predecessors, [{'source': 'otdb', 'id': 562059}]) + self.assertEqual(self.specification.cluster, 'CEP4') + self.otdbrpc_mock.taskGetSpecification.assert_any_call(otdb_id=562063) + self.assertEqual(self.specification.storagemanager, "dysco") + def test_convert_id_to_otdb_ids_other(self): """ Verify that _get_parset_from_OTDB gets the partset for a interferometer_observation task """ diff --git a/SAS/ResourceAssignment/RATaskSpecifiedService/doc/RATaskSpecifiedService.md b/SAS/ResourceAssignment/RATaskSpecifiedService/doc/RATaskSpecifiedService.md index 5b4c7b7d3462b86bd4cb948a181871895ea5f43c..95f4a8a77668c24aff05da451de15e67bac32c15 100644 --- a/SAS/ResourceAssignment/RATaskSpecifiedService/doc/RATaskSpecifiedService.md +++ b/SAS/ResourceAssignment/RATaskSpecifiedService/doc/RATaskSpecifiedService.md @@ -15,9 +15,8 @@ - Auke Klazema <klazema@astron.nl> ### Overview -- See the [resource assigner redesign](https://www.astron.nl/lofarwiki/doku.php?id=rrr:redesign_resource_assignment_system) for some diagrams. -- The service fits in the OTDB Task Watcher in the first diagram, and in the ResourceAssignService of the detailed view. It writes to the lofar.ra.notification bus. -- *todo*: come up with a diagram that matches the actual implementation +- Find diagrams (in graphml/odf format) can be found in the [SVN documentation on SAS redesign for responsive telescope](https://svn.astron.nl/LOFAR/trunk//SAS/doc/SAS_redesign_for_responsive_telescope/) +- Find outdated diagrams in png format in the wiki page [resource assigner redesign](https://www.astron.nl/lofarwiki/doku.php?id=rrr:redesign_resource_assignment_system). This service fits in the OTDB Task Watcher in the first diagram, and in the ResourceAssignService of the detailed view. It writes to the lofar.ra.notification bus. - - - diff --git a/SAS/ResourceAssignment/RATaskSpecifiedService/lib/RATaskSpecified.py b/SAS/ResourceAssignment/RATaskSpecifiedService/lib/RATaskSpecified.py index ddf092625bd4d8bc295fd0e7b1599966a34deab8..5de6a6f8f5ba3003d353669dd46764df6ecbbe23 100755 --- a/SAS/ResourceAssignment/RATaskSpecifiedService/lib/RATaskSpecified.py +++ b/SAS/ResourceAssignment/RATaskSpecifiedService/lib/RATaskSpecified.py @@ -80,6 +80,12 @@ class RATaskSpecified(OTDBBusListener): self.momrpc.close() self.otdbrpc.close() + # This is mainly to trigger the propagation of misc field values through read_from_mom + # and then sending them to the RA to OTDB Service in the resource assigner. + # Might need to be a separate service if we take on more mom-otdb-adapter function. + def onObservationApproved(self, main_id, modificationTime): + self.createAndSendSpecifiedTask(main_id, 'approved') + def onObservationPrescheduled(self, main_id, modificationTime): self.createAndSendSpecifiedTask(main_id, 'prescheduled') diff --git a/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/lib/propagator.py b/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/lib/propagator.py index 7c19669d317e6d9b5170e779f697679acdb34dde..c6c57eac2014b1a55720529662b4b9d6ebc782dd 100755 --- a/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/lib/propagator.py +++ b/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/lib/propagator.py @@ -45,6 +45,8 @@ from lofar.sas.resourceassignment.ratootdbtaskspecificationpropagator.translator from lofar.mom.momqueryservice.momqueryrpc import MoMQueryRPC from lofar.mom.momqueryservice.config import DEFAULT_MOMQUERY_BUSNAME, DEFAULT_MOMQUERY_SERVICENAME +from lofar.sas.resourceassignment.common.specification import Specification + logger = logging.getLogger(__name__) @@ -90,17 +92,36 @@ class RAtoOTDBPropagator(): self.close() def open(self): - """Open rpc connections to radb service and resource estimator service""" + """Open rpc connections to radb, OTDB and MoMQuery service""" self.radbrpc.open() self.otdbrpc.open() self.momrpc.open() def close(self): - """Close rpc connections to radb service and resource estimator service""" + """Close rpc connections to radb, OTDB and MoMQuery service""" self.radbrpc.close() self.otdbrpc.close() self.momrpc.close() + def doTaskApproved(self, otdb_id, mom_id): + logger.info('doTaskApproved: otdb_id=%s' % (otdb_id,)) + if not otdb_id: + logger.warning('doTaskApproved no valid otdb_id: otdb_id=%s' % (otdb_id,)) + return + try: + if not mom_id: + logger.info('doTaskApproved no valid mom_id, we do nothing') + else: + mom_info = self.getMoMinfo(mom_id) + logger.info('MOM info for mom_id=%s: %s' % (mom_id, mom_info.as_dict())) + otdb_info = self.translator.CreateParset(otdb_id, None, None, mom_info) + if otdb_info: + logger.info('Setting specification for otdb_id %s:\n' % (otdb_id,)) + logger.info(pprint.pformat(otdb_info)) + self.otdbrpc.taskSetSpecification(otdb_id, otdb_info) + except Exception as e: + logger.error('doTaskApproved: %s', traceback.format_exc()) + def doTaskConflict(self, otdb_id): logger.info('doTaskConflict: otdb_id=%s' % (otdb_id,)) if not otdb_id: @@ -133,6 +154,10 @@ class RAtoOTDBPropagator(): project_name = 'unknown' + mom_info = self.getMoMinfo(mom_id) + + logger.info('MOM info for mom_id=%s: %s' % (mom_id, mom_info.as_dict())) + if mom_id: #get mom project name try: @@ -144,7 +169,8 @@ class RAtoOTDBPropagator(): logger.info("Using 'unknown' as project name.") project_name = 'unknown' - otdb_info = self.translator.CreateParset(otdb_id, ra_info, project_name) + otdb_info = self.translator.CreateParset(otdb_id, ra_info, project_name, mom_info) + self.setOTDBinfo(otdb_id, otdb_info, 'scheduled') except Exception as e: logger.error('doTaskScheduled: %s', traceback.format_exc()) @@ -232,6 +258,18 @@ class RAtoOTDBPropagator(): return info + def getMoMinfo(self, mom_id): + ''' + Creates a specification object and reads information from MoM + (currently time restrictions and storagemanager from the misc field) + :param mom_id: + :return: RACommon specification object + ''' + spec = Specification(logger, self.otdbrpc, self.momrpc, self.radbrpc) + spec.mom_id = mom_id + spec.read_from_mom() + return spec + def setOTDBinfo(self, otdb_id, otdb_info, otdb_status): try: logger.info('Setting specification for otdb_id %s:\n' % (otdb_id,)) diff --git a/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/lib/rotspservice.py b/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/lib/rotspservice.py index e66822e21b6b73795a084454955cc70cd87933bd..6456d59ac79b0530d5fad2e2410d7e25e6b32a71 100755 --- a/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/lib/rotspservice.py +++ b/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/lib/rotspservice.py @@ -64,6 +64,15 @@ class RATaskStatusChangedListener(RABusListener): if not self.propagator: self.propagator = RAtoOTDBPropagator() + + def onTaskApproved(self, task_ids): + radb_id = task_ids.get('radb_id') + otdb_id = task_ids.get('otdb_id') + mom_id = task_ids.get('mom_id') + logger.info('onTaskApproved: radb_id=%s otdb_id=%s mom_id=%s', radb_id, otdb_id, mom_id) + + self.propagator.doTaskApproved(otdb_id, mom_id) + def onTaskScheduled(self, task_ids): radb_id = task_ids.get('radb_id') otdb_id = task_ids.get('otdb_id') diff --git a/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/lib/translator.py b/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/lib/translator.py index 9af2f57f4dbe36597b4ca5122e8eda781cf78273..7992fb2fbe1b3fed5b84473e5a4bbc107bd66f9a 100755 --- a/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/lib/translator.py +++ b/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/lib/translator.py @@ -463,38 +463,55 @@ class RAtoOTDBTranslator(): return parset_dict - def CreateParset(self, otdb_id, ra_info, project_name): - logger.info('CreateParset for %s with start=%s, end=%s' % (otdb_id, ra_info['starttime'], ra_info['endtime'])) + def CreateParset(self, otdb_id, ra_info, project_name, mom_info): + """ + :param mom_info: Specification object + """ + logger.info('CreateParset for %s' % (otdb_id,)) parset = {} #parset[PREFIX+'momID'] = str(mom_id) - parset[PREFIX+'startTime'] = ra_info['starttime'].strftime('%Y-%m-%d %H:%M:%S') - parset[PREFIX+'stopTime'] = ra_info['endtime'].strftime('%Y-%m-%d %H:%M:%S') - - # Station resources are dealt with as follows: - # * Station list: This is part of the specification, so already in OTDB. The RA only checks for conflicts. - # * Data slots: Stations & Cobalt derive a default data-slot mapping, so no need to specify one until Cobalt - # can read data from the same antenna field for multiple observations (=the use case for data - # slots). - # Cobalt resources are dealt with as follows: - # * Cobalt.blockSize is part of the specification. - # * Cobalt resources are not modelled and allocated, so the defaults (cbt001-8) will be used. - - if 'storage' in ra_info: - logging.info("Adding storage claims to parset\n" + pprint.pformat(ra_info['storage'])) - parset.update(self.ProcessStorageInfo(otdb_id, ra_info['storage'], ra_info['cluster'], project_name)) - if ra_info['type'] == 'observation': - # Atm, the observation inspection plots start script are CEP4-specific, - # and the results are expected to be posted from a single cluster (i.e. CEP4). - # (Inspection plots from station subband stats are independent from this and always avail.) - if any(key.endswith('.locations') and 'CEP4:' in val for key, val in parset.items()): - logging.info("CreateParset: Adding inspection plot commands to parset") - parset[PREFIX+'ObservationControl.OnlineControl.inspectionHost'] = 'head01.cep4.control.lofar' - parset[PREFIX+'ObservationControl.OnlineControl.inspectionProgram'] = 'inspection-plots-observation.sh' - - #special case for dynspec projects for Richard Fallows - if project_name in ['IPS_Commissioning', 'LC6_001', 'LC7_001', 'LC8_001', 'LC9_001']: - logging.info("CreateParset: Overwriting inspectionProgram parset key for dynspec") - parset[PREFIX+'ObservationControl.OnlineControl.inspectionProgram'] = '/data/home/lofarsys/dynspec/scripts/inspection-dynspec-observation.sh' + if ra_info: + logger.info('start=%s, end=%s' % (ra_info['starttime'], ra_info['endtime'])) + parset[PREFIX+'startTime'] = ra_info['starttime'].strftime('%Y-%m-%d %H:%M:%S') + parset[PREFIX+'stopTime'] = ra_info['endtime'].strftime('%Y-%m-%d %H:%M:%S') + + # Station resources are dealt with as follows: + # * Station list: This is part of the specification, so already in OTDB. The RA only checks for conflicts. + # * Data slots: Stations & Cobalt derive a default data-slot mapping, so no need to specify one until Cobalt + # can read data from the same antenna field for multiple observations (=the use case for data + # slots). + # Cobalt resources are dealt with as follows: + # * Cobalt.blockSize is part of the specification. + # * Cobalt resources are not modelled and allocated, so the defaults (cbt001-8) will be used. + + if 'storage' in ra_info: + logging.info("Adding storage claims to parset\n" + pprint.pformat(ra_info['storage'])) + parset.update(self.ProcessStorageInfo(otdb_id, ra_info['storage'], ra_info['cluster'], project_name)) + + if ra_info['type'] == 'observation': + # Atm, the observation inspection plots start script are CEP4-specific, + # and the results are expected to be posted from a single cluster (i.e. CEP4). + # (Inspection plots from station subband stats are independent from this and always avail.) + if any(key.endswith('.locations') and 'CEP4:' in val for key, val in parset.items()): + logging.info("CreateParset: Adding inspection plot commands to parset") + parset[PREFIX+'ObservationControl.OnlineControl.inspectionHost'] = 'head01.cep4.control.lofar' + parset[PREFIX+'ObservationControl.OnlineControl.inspectionProgram'] = 'inspection-plots-observation.sh' + + #special case for dynspec projects for Richard Fallows + ## JK: you know, we had someone entering 'special cases' like this based on pulsar names in the GLOW + ## control software, giving everyone a puzzled expression on their face and a big headache when figuring + ## out why the system was sometimes behaving so funny... + # FIXME: please find a better way to do this or remove this hack when not necessary any more! + if project_name in ['IPS_Commissioning', 'LC6_001', 'LC7_001', 'LC8_001', 'LC9_001', 'LT10_001']: + logging.info("CreateParset: Overwriting inspectionProgram parset key for dynspec") + parset[PREFIX+'ObservationControl.OnlineControl.inspectionProgram'] = '/data/home/lofarsys/dynspec/scripts/inspection-dynspec-observation.sh' + + # Everything else gets added though the mom-otdb-adapter, this is only here to not have to change the code + # in the "no longer maintained" MoM and mom-otdb-adapter + storagemanager = mom_info.storagemanager + if storagemanager is not None: # should be "" or "dysco" + logging.info("Adding storagemanager to parset: %s" % storagemanager) + parset[PREFIX+"ObservationControl.PythonControl.DPPP.msout.storagemanager.name"] = storagemanager return parset diff --git a/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/CMakeLists.txt b/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/CMakeLists.txt index bef78ee1fbe64cba4cc7d0a8fee4630c7559ab18..7b920f513eaa1c09368a584a471f582cbf244186 100644 --- a/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/CMakeLists.txt +++ b/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/CMakeLists.txt @@ -2,5 +2,6 @@ include(LofarCTest) lofar_add_test(t_rotspservice) - +lofar_add_test(t_propagator) +lofar_add_test(t_translator) diff --git a/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_propagator.py b/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_propagator.py new file mode 100644 index 0000000000000000000000000000000000000000..9a299f83b9d981f22d1bff6f686f97a8f7e9ccbc --- /dev/null +++ b/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_propagator.py @@ -0,0 +1,63 @@ +import unittest +from mock import MagicMock +from lofar.sas.resourceassignment.ratootdbtaskspecificationpropagator.propagator import RAtoOTDBPropagator +from lofar.sas.resourceassignment.common.specification import Specification + + +class RAtoOTDBPropagatorTest(unittest.TestCase): + + def setUp(self): + pass + + def test_doTaskScheduled_calls_CreateParset_with_correct_info(self): + + # test values + raid = 1 + otdbid = 2 + momid = 3 + rainfo = {'ra': 'info'} + mominfo = Specification(None, None, None, None) + projectname = 'myproject' + parset = 'par-set' + + # setup mocks + prop = RAtoOTDBPropagator() + prop.getRAinfo = MagicMock(return_value=rainfo) + prop.getMoMinfo = MagicMock(return_value=mominfo) + prop.momrpc.getObjectDetails = MagicMock(return_value={momid:{'project_name':projectname}}) + prop.translator.CreateParset = MagicMock(return_value=parset) + prop.setOTDBinfo = MagicMock() + + # trigger test action + prop.doTaskScheduled(raid, otdbid, momid) + + # assert info was gathered with correct id and createparsec is called with the returned info + prop.getRAinfo.assert_called_once_with(raid) + prop.getMoMinfo.assert_called_once_with(momid) + prop.momrpc.getObjectDetails.assert_called_once_with(momid) + prop.translator.CreateParset.assert_called_once_with(otdbid, rainfo, projectname, mominfo) + prop.setOTDBinfo.assert_called_once_with(otdbid, parset, 'scheduled') + + def test_getMoMinfo_returns_storagemanager_from_MoM(self): + + # test values + momid = 3 + storagemanager = "d.y.s.c.o" + + # setup mocks + prop = RAtoOTDBPropagator() + prop.momrpc = MagicMock() + prop.momrpc.get_storagemanager.return_value = storagemanager + + # trigger test action + mominfo = prop.getMoMinfo(momid) + + # assert momrpc is called with correct id + prop.momrpc.get_storagemanager.assert_called_once_with(momid) + # assert returned value by mom is part of spec (yes, that should strictly not be tested here) + self.assertEqual(mominfo.storagemanager, storagemanager) + + +if __name__ == "__main__": + unittest.main() + diff --git a/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_propagator.run b/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_propagator.run new file mode 100755 index 0000000000000000000000000000000000000000..0094773e70b9bc9d3e2fc4e288980b666aa57a6c --- /dev/null +++ b/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_propagator.run @@ -0,0 +1,6 @@ +#!/bin/bash + +# Run the unit test +source python-coverage.sh +python_coverage_test "propagator*" t_propagator.py + diff --git a/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_propagator.sh b/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_propagator.sh new file mode 100755 index 0000000000000000000000000000000000000000..9662213fdfab65972798a2d78e0d961fb609ce9d --- /dev/null +++ b/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_propagator.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +./runctest.sh t_propagator diff --git a/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_translator.py b/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_translator.py new file mode 100644 index 0000000000000000000000000000000000000000..f71fc532fcba5f915d595d65c8035a807ccd4673 --- /dev/null +++ b/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_translator.py @@ -0,0 +1,37 @@ +import unittest +from mock import MagicMock +from lofar.sas.resourceassignment.ratootdbtaskspecificationpropagator.translator import RAtoOTDBTranslator, PREFIX +from lofar.sas.resourceassignment.common.specification import Specification +import datetime + + +class RAtoOTDBPropagatorTest(unittest.TestCase): + + def setUp(self): + pass + + def test_CreateParset_returns_storagemanager_from_MoM_as_DPPP_parameter(self): + + # test values: + otdb_id = 123 + + start = datetime.datetime.utcnow() + end = datetime.datetime.utcnow() + datetime.timedelta(hours=1) + ra_info = {"starttime" : start, "endtime": end, "status": "test_in_progress", "type": "test", "cluster": "CEP4"} + + project_name = "myproject" + + storagemanager = "d.y.s.c.o." + mom_info = Specification(None, None, None, None) + mom_info.storagemanager = storagemanager + + # trigger action: + value = RAtoOTDBTranslator().CreateParset(otdb_id, ra_info, project_name, mom_info) + + # assert: + self.assertEqual(value[PREFIX+"ObservationControl.PythonControl.DPPP.msout.storagemanager.name"], storagemanager) + + +if __name__ == "__main__": + unittest.main() + diff --git a/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_translator.run b/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_translator.run new file mode 100755 index 0000000000000000000000000000000000000000..ab644b37e44e70b012d30e79d18cfb9100a4e303 --- /dev/null +++ b/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_translator.run @@ -0,0 +1,6 @@ +#!/bin/bash + +# Run the unit test +source python-coverage.sh +python_coverage_test "translator*" t_translator.py + diff --git a/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_translator.sh b/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_translator.sh new file mode 100755 index 0000000000000000000000000000000000000000..722a0090c3d0cbf07b76607779b65216bbded3a8 --- /dev/null +++ b/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/test/t_translator.sh @@ -0,0 +1,4 @@ +#!/bin/sh + +./runctest.sh t_translator + diff --git a/SAS/ResourceAssignment/ResourceAssigner/CMakeLists.txt b/SAS/ResourceAssignment/ResourceAssigner/CMakeLists.txt index f9de242c044c9d6caed1b71118e503af9fe6ac1b..d61d9c29d285501ac32967b401e6bb7390cf2a7d 100644 --- a/SAS/ResourceAssignment/ResourceAssigner/CMakeLists.txt +++ b/SAS/ResourceAssignment/ResourceAssigner/CMakeLists.txt @@ -1,6 +1,6 @@ # $Id: CMakeLists.txt 30355 2014-11-04 13:46:05Z loose $ -lofar_package(ResourceAssigner 0.1 DEPENDS PyMessaging PyCommon pyparameterset OTDB_Services ResourceAssignmentService MoMQueryServiceClient ResourceAssignmentEstimator CleanupClient StorageQueryService MAC_Services MessageBus) +lofar_package(ResourceAssigner 0.1 DEPENDS PyMessaging PyCommon pyparameterset OTDB_Services ResourceAssignmentService MoMQueryServiceClient ResourceAssignmentEstimator CleanupClient StorageQueryService MAC_Services MessageBus ) include(PythonInstall) set(USE_PYTHON_COMPILATION Off) diff --git a/SAS/ResourceAssignment/ResourceAssigner/lib/rabuslistener.py b/SAS/ResourceAssignment/ResourceAssigner/lib/rabuslistener.py index bbeec2501294045f96587ce043fdb7ec67768db6..74bfd9330f42de6d85ab074670be2a4c4cceb39b 100644 --- a/SAS/ResourceAssignment/ResourceAssigner/lib/rabuslistener.py +++ b/SAS/ResourceAssignment/ResourceAssigner/lib/rabuslistener.py @@ -64,11 +64,18 @@ class RABusListener(AbstractBusListener): self.onTaskScheduled(msg.content) elif msg.subject == '%sTaskConflict' % self.subject_prefix: self.onTaskConflict(msg.content) + elif msg.subject == '%sTaskApproved' % self.subject_prefix: + self.onTaskApproved(msg.content) elif msg.subject == '%sTaskError' % self.subject_prefix: self.onTaskError(msg.content) else: logger.error("RABusListener.handleMessage: unknown subject: %s" %str(msg.subject)) + def onTaskApproved(self, task_ids): + '''onTaskApproved is called upon receiving a TaskApproved message. + :param task_ids: a dict containing radb_id, mom_id and otdb_id''' + pass + def onTaskScheduled(self, task_ids): '''onTaskScheduled is called upon receiving a TaskScheduled message. :param task_ids: a dict containing radb_id, mom_id and otdb_id''' diff --git a/SAS/ResourceAssignment/ResourceAssigner/lib/resource_assigner.py b/SAS/ResourceAssignment/ResourceAssigner/lib/resource_assigner.py index 9f0a17c347b6d6c7e273e26d33e6a83435e00b80..c0cd685e1523b015e155f0004679cb286c374dd7 100755 --- a/SAS/ResourceAssignment/ResourceAssigner/lib/resource_assigner.py +++ b/SAS/ResourceAssignment/ResourceAssigner/lib/resource_assigner.py @@ -184,8 +184,9 @@ class ResourceAssigner(object): # Don't perform any scheduling for tasks that are only approved. Do this check after insertion of # specification, task and predecessor/successor relations, so approved tasks appear correctly in the web # scheduler. - if spec.status == 'approved': #TODO should this even still happen? + if spec.status == 'approved': # Only needed to send misc field info (storagemanager) to OTDB logger.info('Task otdb_id=%s is only approved, no resource assignment needed yet' % otdb_id) + self._send_task_status_notification(spec, 'approved') return #TODO have Specification propagate to the estimator? if self._schedule_resources(spec, specification_tree): diff --git a/SAS/ResourceAssignment/ResourceAssigner/lib/resource_availability_checker.py b/SAS/ResourceAssignment/ResourceAssigner/lib/resource_availability_checker.py index 9182b0cf7a3a78f05b99e43b3737be2bf863c15d..067d2fcbcc55a2ca2eea1e63d523ac993c5de29d 100644 --- a/SAS/ResourceAssignment/ResourceAssigner/lib/resource_availability_checker.py +++ b/SAS/ResourceAssignment/ResourceAssigner/lib/resource_availability_checker.py @@ -361,6 +361,10 @@ class ResourceAvailabilityChecker(object): """ Returns list of available resources of type id in needed_resources_by_type_id.keys() starting at group id root_gid in the format [{type_id: {<resource_dict>}, ...}, ...]. """ + # Replace list of dicts to a dict of dicts because rid is not garanteed the correct index + # of the list. + available_recources = {r["id"]:r for r in db_resource_list} + # Search breadth-first starting at root_gid. gids = [root_gid] resources_list = [] @@ -372,11 +376,15 @@ class ResourceAvailabilityChecker(object): res_group = self.resource_group_relations[gids[i]] for rid in res_group['resource_ids']: - type_id = db_resource_list[rid]['type_id'] - if type_id in needed_resources_by_type_id and db_resource_list[rid]['active'] and \ - db_resource_list[rid]['available_capacity'] > 0: - resources[type_id] = db_resource_list[rid] - type_ids_seen.add(type_id) + if rid in available_recources: + available_recource = available_recources[rid] + type_id = available_recource['type_id'] + if type_id in needed_resources_by_type_id and available_recource['active']: + if available_recource['available_capacity'] > 0: + resources[type_id] = available_recource + type_ids_seen.add(type_id) + else: + logger.debug("requested resource id %s is not available/claimable", rid) # Only add resource IDs if all needed types are present in this resource group if type_ids_seen == set(needed_resources_by_type_id): diff --git a/SAS/ResourceAssignment/ResourceAssigner/lib/schedulers.py b/SAS/ResourceAssignment/ResourceAssigner/lib/schedulers.py index 9974e7e6ce30890cc430587f1f31a48f1727d6da..d522baeee0d0f58af80130bd77054d11b466ed78 100644 --- a/SAS/ResourceAssignment/ResourceAssigner/lib/schedulers.py +++ b/SAS/ResourceAssignment/ResourceAssigner/lib/schedulers.py @@ -1,4 +1,5 @@ from datetime import datetime, timedelta +from copy import deepcopy from lofar.common.cache import cache @@ -78,9 +79,9 @@ class BasicScheduler(object): # Any resources that we cannot schedule on for some reason self.unusable_resources = [] - # Duration must be non-negative or weird stuff will happen - if self.starttime > self.endtime: - raise ValueError('BasicScheduler, starttime=%s should be >= endtime=%s', starttime, endtime) + # Duration must be positive or weird stuff will happen + if self.starttime >= self.endtime: + raise ValueError('BasicScheduler, starttime=%s should be >= endtime=%s', self.starttime, self.endtime) def allocate_resources(self): """ @@ -111,8 +112,11 @@ class BasicScheduler(object): allocation_successful = True except ScheduleException, e: - logger.exception("BasicScheduler: scheduling threw exception: %s", e) + logger.exception("%s: scheduling threw ScheduleException: %s", self.__class__.__name__, e) self._handle_schedule_exception() + except Exception, e: + logger.exception("%s: scheduling threw unhandled exception: %s", self.__class__.__name__, e) + raise return allocation_successful @@ -126,11 +130,9 @@ class BasicScheduler(object): def _pre_process_allocation(self): """ - Placeholder for derived classes, available to be able perform for any processing prior to the actual allocation - of resources done by allocate_resources(). - - Does nothing in this base class. + Cleans unusable resources so that on a next try thet will not block based on previous usage. """ + self.unusable_resources = [] logger.debug("BasicScheduler: _pre_process_allocation for task %s", self.task_id) @@ -416,13 +418,17 @@ class StationScheduler(BasicScheduler): wanted_estimates = self._get_station_estimates() # Try to schedule all of the stations. - remaining_estimates = self._schedule_resources(wanted_estimates, available_resources, need_all=False) + # make a (deep)copy of available_resources and use that, + # because _schedule_resources modifies the available_capacity of the tested wanted stations. + # we rollback the radb later in this method, so we should keep the original available_resources intact. + available_resources_copy = deepcopy(available_resources) + remaining_estimates = self._schedule_resources(wanted_estimates, available_resources_copy, need_all=False) # See if our allocation meets the minimal criteria. Note that some stations may be partially allocated, # we do not count those as claimable. unclaimable_stations = set([e["station"] for e in remaining_estimates]) if not self._requirements_satisfied_without(expanded_requirements, unclaimable_stations): - raise ScheduleException("Could not allocate enough stations") + raise ScheduleException("Could not allocate enough stations. unclaimable_stations=%s" % (unclaimable_stations,)) allocated_stations = set([e["station"] for e in wanted_estimates if e not in remaining_estimates]) @@ -544,7 +550,21 @@ class PriorityScheduler(StationScheduler): logger.debug("kill_task_in_radb: task: %s", task) new_endtime = max(task['starttime'], datetime.utcnow()) - self.radb.updateTaskAndResourceClaims(task_id=task['id'], task_status='aborted', endtime=new_endtime, commit=False) + self.radb.updateTaskAndResourceClaims(task_id=task['id'], task_status='aborted', + endtime=new_endtime, commit=False) + + def _unschedule_task_in_radb(self, task): + """ + unschedule the task by setting its status to approved in RADB + and by releasing the task's claims (set them to tentative) + + :param task: task to 'set' to approved + """ + + logger.info("_unschedule_task_in_radb: task: %s", task) + + self.radb.updateTaskAndResourceClaims(task_id=task['id'], task_status='approved', + claim_status='tentative', commit=False) def _propose_potential_starttime(self, newstarttime): """ @@ -567,24 +587,30 @@ class PriorityScheduler(StationScheduler): """ # try to resolve the conflict, and mark any resources unavailable if resolution fails - tasks_to_kill = self._get_resolvable_conflicting_tasks(conflict_claim) - - for t in tasks_to_kill: - logger.info("_resolve_conflict: found task %s to kill for conflict_claim: %s", t, conflict_claim) - - # add it to the list to actually kill later - self.tasks_to_kill.append(t) - - self._kill_task_in_radb(t) + tasks_to_move_out_of_the_way = self._get_resolvable_conflicting_tasks(conflict_claim) + now = datetime.utcnow() + + for t in tasks_to_move_out_of_the_way: + logger.info("_resolve_conflict: found task %s to move out of the way for claim in conflict: %s", t, conflict_claim) + + # kill running task, unschedule otherwise in order to move the blocking task out of the way + if (t['starttime'] <= now and t['endtime'] >= now) or t['status'] == 'active': + # add it to the list to actually kill later + self.tasks_to_kill.append(t) + # and update the administration in the radb + self._kill_task_in_radb(t) + else: + # move the blocking task out of the way + self._unschedule_task_in_radb(t) - if not tasks_to_kill: + if not tasks_to_move_out_of_the_way: logger.info("_resolve_conflict: no tasks to kill for conflict_claim %s", conflict_claim) # record which resources cannot be used anymore, because we can't kill anything on it self.unusable_resources.append(conflict_claim["resource_id"]) # Return True if we killed anything - return tasks_to_kill != [] + return tasks_to_move_out_of_the_way != [] def _get_conflicting_claims_and_tasks(self, conflict_claim): """ @@ -617,7 +643,7 @@ class PriorityScheduler(StationScheduler): """ if conflict_claim["resource_type_id"] == self.resource_availability_checker.resource_types['storage']: - raise ScheduleException("Could not resolve conflict on storage resource") + raise ScheduleException("Cannot resolve conflict on storage resource") # find all conflicting claims & which tasks they belong to conflicting_claims, conflicting_tasks = self._get_conflicting_claims_and_tasks(conflict_claim) diff --git a/SAS/ResourceAssignment/ResourceAssigner/test/radb_common_testing.py b/SAS/ResourceAssignment/ResourceAssigner/test/radb_common_testing.py new file mode 120000 index 0000000000000000000000000000000000000000..83b3ca170d204c92cc70aa0f8d716393976b2899 --- /dev/null +++ b/SAS/ResourceAssignment/ResourceAssigner/test/radb_common_testing.py @@ -0,0 +1 @@ +../../ResourceAssignmentDatabase/tests/radb_common_testing.py \ No newline at end of file diff --git a/SAS/ResourceAssignment/ResourceAssigner/test/t_resource_availability_checker.py b/SAS/ResourceAssignment/ResourceAssigner/test/t_resource_availability_checker.py index 5170f5275a3721ac4a0f366ddb3405d9c8fe4fdb..0e57b4fc59890615fa9e172f0de521ff4449bbf9 100755 --- a/SAS/ResourceAssignment/ResourceAssigner/test/t_resource_availability_checker.py +++ b/SAS/ResourceAssignment/ResourceAssigner/test/t_resource_availability_checker.py @@ -1379,7 +1379,7 @@ class ResourceAvailabilityCheckerTest(unittest.TestCase): expected_claims = [resource_type_3_dict, resource_type_5_dict, resource_type_3_dict, resource_type_5_dict, resource_type_3_dict, resource_type_5_dict, resource_type_3_dict, resource_type_5_dict] - self.logger_mock.debug.assert_any_call('fit_multiple_resources: created claim: %s', expected_claims) + self.logger_mock.info.assert_any_call('fit_multiple_resources: created claims: %s', expected_claims) def test_get_is_claimable_invalid_resource_group(self): """ If we try to find claims with a non-existing root_resource_group, get_is_claimable should fail. """ @@ -1391,15 +1391,14 @@ class ResourceAvailabilityCheckerTest(unittest.TestCase): 'storage': 100 } }] - claimable_resources_list = { - self.cep4storage_resource_id: { + claimable_resources_list = [{ 'id': self.cep4storage_resource_id, 'type_id': 5, 'claimable_capacity': 400, 'available_capacity': 400, 'active': True } - } + ] with self.assertRaises(ValueError): _, _ = self.uut.get_is_claimable(estimates, claimable_resources_list) @@ -1418,21 +1417,20 @@ class ResourceAvailabilityCheckerTest(unittest.TestCase): 'storage': 100 } }] - claimable_resources_list = { - self.cep4bandwidth_resource_id: { + claimable_resources_list = [{ 'id': self.cep4bandwidth_resource_id, 'type_id': 3, 'claimable_capacity': 4000, 'available_capacity': 4000, 'active': True }, - self.cep4storage_resource_id: { + { 'id': self.cep4storage_resource_id, 'type_id': 5, 'claimable_capacity': 400, 'available_capacity': 400, 'active': True - }} + }] claimable_resources = self.uut.get_is_claimable(estimates, claimable_resources_list) @@ -1450,21 +1448,20 @@ class ResourceAvailabilityCheckerTest(unittest.TestCase): 'storage': 100 } }] - claimable_resources_list = { - self.cep4bandwidth_resource_id: { + claimable_resources_list = [{ 'id': self.cep4bandwidth_resource_id, 'type_id': 3, 'claimable_capacity': 4000, 'available_capacity': 4000, 'active': True }, - self.cep4storage_resource_id: { + { 'id': self.cep4storage_resource_id, 'type_id': 5, 'claimable_capacity': 300, 'available_capacity': 300, 'active': True } - } + ] with self.assertRaises(CouldNotFindClaimException): self.uut.get_is_claimable(estimates, claimable_resources_list) @@ -1486,21 +1483,20 @@ class ResourceAvailabilityCheckerTest(unittest.TestCase): 'bandwidth': 1000, 'storage': 100 }}] - claimable_resources_list = { - self.cep4bandwidth_resource_id: { + claimable_resources_list = [{ 'id': self.cep4bandwidth_resource_id, 'type_id': 3, 'claimable_capacity': 5000, 'available_capacity': 5000, 'active': True }, - self.cep4storage_resource_id: { + { 'id': self.cep4storage_resource_id, 'type_id': 5, 'claimable_capacity': 500, 'available_capacity': 500, 'active': True - }} + }] # TODO: verify with Jan David whether this test case (returning a partial fit) should still succeed or whether # an exception is expected to be raised diff --git a/SAS/ResourceAssignment/ResourceAssigner/test/t_schedulers.py b/SAS/ResourceAssignment/ResourceAssigner/test/t_schedulers.py index 8838598149a81f9a7d5035ed03d46eb3f6645001..a4d2b439f8380849e9358e5ccb54b7077ab318d0 100755 --- a/SAS/ResourceAssignment/ResourceAssigner/test/t_schedulers.py +++ b/SAS/ResourceAssignment/ResourceAssigner/test/t_schedulers.py @@ -21,11 +21,9 @@ import unittest import mock - import datetime -from copy import deepcopy -from lofar.sas.resourceassignment.resourceassigner.resource_availability_checker import CouldNotFindClaimException +from lofar.sas.resourceassignment.resourceassigner.resource_availability_checker import ResourceAvailabilityChecker, CouldNotFindClaimException from lofar.sas.resourceassignment.resourceassigner.schedulers import ScheduleException from lofar.sas.resourceassignment.resourceassigner.schedulers import BasicScheduler @@ -33,360 +31,228 @@ from lofar.sas.resourceassignment.resourceassigner.schedulers import StationSche from lofar.sas.resourceassignment.resourceassigner.schedulers import PriorityScheduler from lofar.sas.resourceassignment.resourceassigner.schedulers import DwellScheduler +from lofar.sas.resourceassignment.database.radb import _FETCH_ONE + import logging logger = logging.getLogger(__name__) -class FakeRADatabase(object): - """ Mimic an RA Database, assuming claims overlap fully or not at all. """ - - def __init__(self, resource_capacity): - # database - self.tasks = {} - self.claims = {} - self.next_claim_id = 0 - - # cache committed state here - self.committed_tasks = {} - self.committed_claims = {} - - # maximum capacity of our resource - self.resource_capacity = resource_capacity - - def addTask(self, id, task): - self.tasks[id] = task - self.tasks[id]["id"] = id - self.tasks[id]["specification_id"] = id - self.claims[id] = [] - - self.committed = False - self.rolled_back = False - - def _fits(self, claim): - usage = 0 - resource_id = claim["resource_id"] - - for claims in self.claims.values(): - for c in claims: - overlap_in_time = claim["starttime"] < c["endtime"] and claim["endtime"] > c["starttime"] - overlap_in_resource = c["resource_id"] == resource_id - - if c["status"] != "conflict" and \ - c["id"] != claim.get("id",None) and \ - overlap_in_resource and \ - overlap_in_time: - usage += c["claim_size"] - - return usage + claim["claim_size"] <= self.resource_capacity - - """ Methods to mock radb. """ - - def getTask(self, id): - return self.tasks[id] - - def getTasks(self, task_ids): - return [t for id, t in self.tasks.iteritems() if id in task_ids] - - def updateSpecification(self, specification_id, starttime=None, endtime=None, content=None, cluster=None, - commit=True): - - for task_id, task in self.tasks.iteritems(): - if self.tasks[task_id]["specification_id"] == specification_id: - if starttime is not None: - self.tasks[task_id]["starttime"] = starttime - if endtime is not None: - self.tasks[task_id]["endtime"] = endtime - - return True - - def getResources(self, *args, **kwargs): - # we model six resources, can expand if needed - return [ { "id": x } for x in xrange(6) ] - - def getResourceGroupMemberships(self): - # We model 4 stations: 2 remote, and 2 core - station_groups = { - 100: { - "resource_group_id": 100, - "resource_group_name": "ALL", - "resource_group_type": "", - "child_ids": [101, 102] - }, - 101: { - "resource_group_id": 101, - "resource_group_name": "CORE", - "resource_group_type": "", - "child_ids": [1, 2] - }, - 102: { - "resource_group_id": 102, - "resource_group_name": "REMOTE", - "resource_group_type": "", - "child_ids": [3, 4] - } - } - - def station_name(nr): - if nr < 3: - return "CS%03d" % nr - else: - return "RS%03d" % nr - - stations = { - station_nr: { - "resource_group_id": station_nr, - "resource_group_name": station_name(station_nr), - "resource_group_type": "station", - "child_ids": [], - } for station_nr in xrange(1,5) - } - - resources = station_groups; - resources.update(stations) - - return {"groups": resources} - - def getResourceClaims(self, task_ids, status, extended): - for tid in task_ids: - assert tid in self.tasks - assert tid in self.claims - - return [claim for tid in task_ids for claim in self.claims[tid] if claim["status"] == status] - - def deleteResourceClaims(self, claim_ids, commit): - logger.info("Deleting claims %s", claim_ids) - - for tid in self.claims: - self.claims[tid] = [c for c in self.claims[tid] if c["id"] not in claim_ids] - - def updateResourceClaims(self, where_task_ids, status, commit): - # this is what we support - assert status == "claimed" - - for task_id in where_task_ids: - # can't update conflict claims to claimed - for c in self.claims[task_id]: - if c["status"] != "tentative": - return False - - # update statusses - for c in self.claims[task_id]: - c["status"] = "claimed" - - return True - - def updateTaskAndResourceClaims(self, task_id, starttime=None, endtime=None, **kwargs): - if starttime: - logger.info("Setting starttime of task %s to %s", task_id, starttime) - - self.tasks[task_id]["starttime"] = starttime - - for c in self.claims[task_id]: - c["starttime"] = starttime - - if endtime: - logger.info("Setting endtime of task %s to %s", task_id, endtime) - - self.tasks[task_id]["endtime"] = endtime - - for c in self.claims[task_id]: - c["endtime"] = endtime - - def insertResourceClaims(self, task_id, claims, *args, **kwargs): - for c in claims: - # check whether tasks do not get two claims of the same resource - assert c["resource_id"] not in [d["resource_id"] for d in self.claims[task_id]], "Resource %s claimed twice by task %s" % (c["resource_id"], task_id) - - # derive claim status - c["status"] = "tentative" if self._fits(c) else "conflict" - - # assign ids - c["task_id"] = task_id - c["id"] = self.next_claim_id - self.next_claim_id += 1 - - # add it to our claim list - self.claims[task_id].append(c) - - claim_ids = [c["id"] for c in claims] - logger.info("Added claims %s", claim_ids) - - return claim_ids - - def get_overlapping_claims(self, claim_id, claim_status="claimed"): - overlapping_claims = [] - - logger.info('get_overlapping_claims(claim_id=%s, claim_status=%s) self.claims content:', claim_id, claim_status) - for claim_id, claim_value in self.claims.iteritems(): - logger.info('%s: %s', claim_id, claim_value) - - # all claims overlap - claims_for_id = self.claims[claim_id] - for claim in claims_for_id: - overlapping_claims += [c for _, claims in self.claims.iteritems() for c in claims if - # overlap in space - c["resource_id"] == claim["resource_id"] and - # "conflict" claims do not actually claim resources - c["status"] == claim_status and - # be antireflexive - c["id"] != claim["id"]] +import radb_common_testing - return overlapping_claims +def setUpModule(): + return radb_common_testing.setUpModule() - def commit(self): - logger.info("Commit") +def tearDownModule(): + return radb_common_testing.tearDownModule() - self.rolled_back = False - self.committed = True - self.committed_claims = deepcopy(self.claims) - self.committed_tasks = deepcopy(self.tasks) +class SchedulerTest(radb_common_testing.RADBCommonTest): + """ create test radb postgres instance, and use that in a ResourceAvailabilityChecker""" - def rollback(self): - logger.info("Rollback") - - self.rolled_back = True - self.claims = deepcopy(self.committed_claims) - self.tasks = deepcopy(self.committed_tasks) + def setUp(self): + super(SchedulerTest, self).setUp() + self.resource_availability_checker = ResourceAvailabilityChecker(self.radb) + self._enforce_limited_station_group_list() -class FakeResourceAvailabilityChecker(object): - resource_types = { - "storage": 0, - "bandwidth": 1, - } + def _enforce_limited_station_group_list(self): + # for test simplicity, create a simple virtual instrument which makes debugging easier. + # this is safe, because we are working on a test database - def get_is_claimable(self, requested_resources, available_resources): - if not available_resources: - raise CouldNotFindClaimException + LIMITED_STATION_GROUP_LIST = ('CS001', 'CS002', 'RS106', 'RS205') - # fullfil one request at a time to keep the code simple. We map it on - # the first available resource - r = requested_resources[0] + unwanted_resource_group_ids = [rg['id'] for rg in self.radb.getResourceGroups() + if rg['type'] == 'station' and rg['name'] not in LIMITED_STATION_GROUP_LIST] - # use resource 0, or resource #stationnr - rid = int(r["station"][2:]) if "station" in r else available_resources[0]["id"] - if rid not in [x["id"] for x in available_resources]: - raise CouldNotFindClaimException + self.radb._executeQuery("DELETE FROM virtual_instrument.resource_group rg WHERE rg.id in (%s)" % ( + ', '.join([str(id) for id in unwanted_resource_group_ids])),) + self.radb.commit() - rtype = r["resource_types"].keys()[0] - return [{ - 'requested_resources': [r], - 'claim_size': r["resource_types"][rtype], - 'resource_id': rid, - 'resource_type_id': self.resource_types[rtype] - }] -class SchedulerTest(unittest.TestCase): - """ Setup mechanics to use a FakeRADatabase and FakeResourceAvailabilityChecker to simulate a system with - one resource at one point in time. """ +class BasicSchedulerTest(SchedulerTest): + def new_task(self, mom_otdb_id=0, starttime=None, endtime=None): + """ + insert a new test specification and task into the testing radb + :param mom_otdb_id: optional mom/otdb id + :param starttime: optional starttime if None, then datetime(2017, 1, 1, 1, 0, 0) is used + :param endtime: optional endtime if None, then datetime(2017, 1, 1, 2, 0, 0) is used + :return: the new radb's task id + """ - def mock_ra_database(self): - self.fake_ra_database = FakeRADatabase(resource_capacity=1024) + if starttime is None: + starttime = datetime.datetime(2017, 1, 1, 1, 0, 0) - ra_database_patcher = mock.patch('lofar.sas.resourceassignment.resourceassigner.schedulers.RADatabase') - self.addCleanup(ra_database_patcher.stop) - self.ra_database_mock = ra_database_patcher.start() - self.ra_database_mock.return_value = self.fake_ra_database + if endtime is None: + endtime = datetime.datetime(2017, 1, 1, 2, 0, 0) - def mock_resource_availability_checker(self): - self.fake_resource_availability_checker = FakeResourceAvailabilityChecker() + return self.radb.insertSpecificationAndTask(mom_id=mom_otdb_id, + otdb_id=mom_otdb_id, + task_status='approved', + task_type='observation', + starttime=starttime, + endtime=endtime, + content='', + cluster='CEP4')['task_id'] - def setUp(self): - self.mock_ra_database() - self.mock_resource_availability_checker() + def get_specification_tree(self, task_id): + return {} -class BasicSchedulerTest(SchedulerTest): - def new_task(self, task_id): - self.fake_ra_database.addTask(task_id, { - "starttime": datetime.datetime(2017, 1, 1, 1, 0, 0), - "endtime": datetime.datetime(2017, 1, 1, 2, 0, 0), - }) + def new_scheduler(self, task_id, resource_estimator=None, specification_tree=None): + """factory method returning a scheduler class specific for this test class. + In this case, in the BasicSchedulerTest class, it returns a new BasicScheduler.""" + return self.new_basic_scheduler(task_id, resource_estimator, specification_tree) - self.fake_ra_database.commit() - self.fake_ra_database.committed = False # dont confuse subsequent checks on whether the scheduler committed + def new_basic_scheduler(self, task_id, resource_estimator=None, specification_tree=None): + """return a new BasicScheduler""" + return BasicScheduler(task_id, + specification_tree if specification_tree else self.get_specification_tree(task_id), + resource_estimator if resource_estimator else lambda _:[], + self.resource_availability_checker, self.radb.dbcreds) - def get_specification_tree(self, task_id): - return {} + def get_station_bandwidth_max_capacity(self): + resource_CS001bw0 = [r for r in self.radb.getResources(resource_types="bandwidth", include_availability=True) + if r['name']=='CS001bw0'][0] + return resource_CS001bw0['total_capacity'] - def new_scheduler(self, task_id, resource_estimator): - return BasicScheduler(task_id, self.get_specification_tree(task_id), resource_estimator, self.fake_resource_availability_checker, None) + def get_CEP4_storage_max_capacity(self): + resource_cep4_storage = [r for r in self.radb.getResources(resource_types="storage", include_availability=True) + if r['name']=='CEP4_storage:/data'][0] + return resource_cep4_storage['total_capacity'] def test_schedule_task(self): """ Whether a task (that fits) can be scheduled. """ # Resources we need - self.new_task(0) - estimates = [{ 'resource_types': {'bandwidth': 512} }] - allocation_succesful = self.new_scheduler(0, lambda _: estimates).allocate_resources() + task_id = self.new_task(0) + estimates = [{ 'resource_types': {'bandwidth': 512}, + "root_resource_group": "CS001", + "resource_count": 1 } ] + scheduler = self.new_scheduler(task_id, lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() # Allocation must succeed and be committed self.assertTrue(allocation_succesful) - self.assertTrue(self.fake_ra_database.committed) - self.assertFalse(self.fake_ra_database.rolled_back) + self.assertTrue(scheduler.radb.committed) + self.assertFalse(scheduler.radb.rolled_back) # Claim must be present in database - claims = self.fake_ra_database.claims[0] + claims = self.radb.getResourceClaims(task_ids=task_id, extended=True) self.assertTrue(claims) self.assertEqual(len(claims), 1) # Claim must be valid claim = claims[0] - task = self.fake_ra_database.tasks[0] - - self.assertEqual(claim["status"], "claimed") - self.assertEqual(claim["starttime"], task["starttime"]) - self.assertEqual(claim["endtime"], task["endtime"]) - self.assertEqual(claim["claim_size"], 512) - self.assertEqual(claim["resource_type_id"], FakeResourceAvailabilityChecker.resource_types["bandwidth"]) + task = self.radb.getTask(task_id) + self.assertEqual(claim["status"], "claimed") + self.assertEqual(claim["starttime"], task["starttime"]) + self.assertEqual(claim["endtime"], task["endtime"]) + self.assertEqual(claim["claim_size"], 512) + self.assertEqual(claim["resource_type_name"], "bandwidth") def test_multiple_resources(self): """ Whether a task (that fits) can be scheduled. """ # Resources we need - self.new_task(0) - estimates = [{ 'resource_types': {'bandwidth': 512} }, - { 'resource_types': {'bandwidth': 512} }] - allocation_succesful = self.new_scheduler(0, lambda _: estimates).allocate_resources() + task_id = self.new_task(0) + estimates = [{ 'resource_types': {'bandwidth': 512}, + "root_resource_group": "CS001", + "resource_count": 1 }, + {'resource_types': {'bandwidth': 512}, + "root_resource_group": "CS002", + "resource_count": 1} ] + + scheduler = self.new_scheduler(task_id, lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() + self.assertTrue(scheduler.radb.committed) + self.assertFalse(scheduler.radb.rolled_back) # Allocation must succeed self.assertTrue(allocation_succesful) + # Claim must be present in database + claims = self.radb.getResourceClaims(task_ids=task_id, extended=True) + self.assertTrue(claims) + self.assertEqual(len(claims), 2) + def test_schedule_too_large_task(self): """ Whether a task with too large claims will be rejected by the scheduler. """ # Resources we need - self.new_task(0) - estimates = [{ 'resource_types': {'bandwidth': 2048} }] - allocation_succesful = self.new_scheduler(0, lambda _: estimates).allocate_resources() - - # Allocation must fail, and rollback() called - self.assertFalse(allocation_succesful) - self.assertFalse(self.fake_ra_database.committed) - self.assertTrue(self.fake_ra_database.rolled_back) + task_id = self.new_task(0) + estimates = [{ 'resource_types': {'bandwidth': 1e99}, + "root_resource_group": "CS001", + "resource_count": 1 } ] + scheduler = self.new_scheduler(task_id, lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() + + if self.__class__ == BasicSchedulerTest: # This inheritence of test is not ideal + # Allocation must fail, and commit called so we get a conflicted state + self.assertFalse(allocation_succesful) + self.assertTrue(scheduler.radb.committed) + self.assertFalse(scheduler.radb.rolled_back) + else: + # Allocation must fail, and rollback called + self.assertFalse(allocation_succesful) + self.assertFalse(scheduler.radb.committed) + self.assertTrue(scheduler.radb.rolled_back) def test_schedule_two_tasks_too_large_task(self): """ Whether two tasks that fit individually but not together will be rejected by the scheduler. """ + max_bw_cap = self.get_station_bandwidth_max_capacity() + # First task must succeed - self.new_task(0) - estimates = [{ 'resource_types': {'bandwidth': 512} }] - allocation_succesful = self.new_scheduler(0, lambda _: estimates).allocate_resources() + # we claim two bandwidth resources because CS001 has two network lines + # they should both be claimed, so that the next task cannot just take the other free line. + task_id = self.new_task(0) + estimates = [{ 'resource_types': {'bandwidth': max_bw_cap}, + "root_resource_group": "CS001", + "resource_count": 1 }, + {'resource_types': {'bandwidth': max_bw_cap}, + "root_resource_group": "CS001", + "resource_count": 1} ] + scheduler = self.new_scheduler(task_id, lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() self.assertTrue(allocation_succesful) - # Second task must fail - self.new_task(1) - estimates = [{ 'resource_types': {'bandwidth': 513} }] - allocation_succesful = self.new_scheduler(1, lambda _: estimates).allocate_resources() + # Second task must fail, because both network lines were already filled. + task2_id = self.new_task(1) + estimates = [{ 'resource_types': {'bandwidth': max_bw_cap}, + "root_resource_group": "CS001", + "resource_count": 1 }, + {'resource_types': {'bandwidth': max_bw_cap}, + "root_resource_group": "CS001", + "resource_count": 1} ] + scheduler = self.new_scheduler(task2_id, lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() self.assertFalse(allocation_succesful) + class StationSchedulerTest(BasicSchedulerTest): # The StationScheduler must not regress on the BasicScheduler, so we inherit all its tests def get_specification_tree(self, task_id): - return { "task_type": "observation", "specification": { "Observation.VirtualInstrument.stationList": [] }, "station_requirements": [] } + return { "task_type": "observation", + "specification": { "Observation.VirtualInstrument.stationList": [] }, + "station_requirements": [] } + + def new_scheduler(self, task_id, resource_estimator=None, specification_tree=None): + """overridden factory method returning a scheduler class specific for this test class. + In this case, in the StationSchedulerTest class, it returns a new StationScheduler. + + Please note that in most/all of the tests in this StationSchedulerTest test class + we explicitly use the new_station_scheduler factory method to get the specific + StationScheduler. In derived test classes, this means that we then still use a StationScheduler + and not another scheduler type via a overridden new_scheduler method. + """ + return self.new_station_scheduler(task_id, resource_estimator, specification_tree) - def new_scheduler(self, task_id, resource_estimator): - return StationScheduler(task_id, self.get_specification_tree(task_id), resource_estimator, self.fake_resource_availability_checker, None) + def new_station_scheduler(self, task_id, resource_estimator=None, specification_tree=None): + """factory method returning a StationScheduler. + Can be overridden in derived test classes.""" + return StationScheduler(task_id, + specification_tree if specification_tree else self.get_specification_tree(task_id), + resource_estimator if resource_estimator else self.fake_resource_estimator, + self.resource_availability_checker, self.radb.dbcreds) def fake_resource_estimator(self, specification_tree): """ Return an estimate for each station, plus a fixed storage claim of half the available storage capacity. """ @@ -396,32 +262,31 @@ class StationSchedulerTest(BasicSchedulerTest): # We don't get here without requesting stations assert stations + max_bw_cap = self.get_station_bandwidth_max_capacity() + max_storage_cap = self.get_CEP4_storage_max_capacity() + return [ - { "resource_types": {"bandwidth": 1024}, + { "resource_types": {"bandwidth": max_bw_cap }, "resource_count": 1, "station": station_name, - "root_resource_group": "%sLBA" % (station_name,) + "root_resource_group": station_name } for station_name in stations ] + [ - { "resource_types": {"storage": 512}, + { "resource_types": {"storage": 0.4*max_storage_cap}, "resource_count": 1, - } + "root_resource_group": "CEP4" + } ] - def new_station_scheduler(self, task_id, specification_tree): - """ A new scheduler for station-specific tests. """ - - return StationScheduler(task_id, specification_tree, self.fake_resource_estimator, FakeResourceAvailabilityChecker(), None) - def test_expand_station_list(self): """ Test whether _expand_station_list correctly expands the station sets we defined in our FakeRADatabase. """ - self.new_task(0) - scheduler = self.new_station_scheduler(0, self.get_specification_tree(0)) + task_id = self.new_task(0) + scheduler = self.new_station_scheduler(task_id, specification_tree=self.get_specification_tree(0)) - self.assertEqual(sorted(scheduler._expand_station_list("ALL")), ["CS001","CS002","RS003","RS004"]) + self.assertEqual(sorted(scheduler._expand_station_list("ALL")), ["CS001","CS002","RS106","RS205"]) self.assertEqual(sorted(scheduler._expand_station_list("CORE")), ["CS001","CS002"]) - self.assertEqual(sorted(scheduler._expand_station_list("REMOTE")), ["RS003","RS004"]) + self.assertEqual(sorted(scheduler._expand_station_list("REMOTE")), ["RS106","RS205"]) self.assertEqual(sorted(scheduler._expand_station_list("CS002")), ["CS002"]) with self.assertRaises(ScheduleException): @@ -444,16 +309,17 @@ class StationSchedulerTest(BasicSchedulerTest): """ Test whether a requirement for a single station can be satisfied. """ specification_tree = self.get_specification_tree(0) - specification_tree["station_requirements"] = [ ("RS003", 1), ] + specification_tree["station_requirements"] = [ ("RS106", 1), ] - self.new_task(0) - allocation_succesful = self.new_station_scheduler(0, specification_tree).allocate_resources() + task_id = self.new_task(0) + scheduler = self.new_station_scheduler(task_id, specification_tree=specification_tree) + allocation_succesful = scheduler.allocate_resources() # Allocation must succeed self.assertTrue(allocation_succesful) # The specified station must be allocated, plus storage claim - self.assertTrue(len(self.fake_ra_database.claims[0]) == 2) + self.assertTrue(len(self.radb.getResourceClaims(task_ids=task_id, status='claimed')) == 2) def test_find_any_station(self): """ Test whether a requirement for a single station can be satisfied. """ @@ -461,39 +327,43 @@ class StationSchedulerTest(BasicSchedulerTest): specification_tree = self.get_specification_tree(0) specification_tree["station_requirements"] = [ ("ALL", 1), ] - self.new_task(0) - allocation_succesful = self.new_station_scheduler(0, specification_tree).allocate_resources() + task_id = self.new_task(0) + scheduler = self.new_station_scheduler(task_id, specification_tree=specification_tree) + allocation_succesful = scheduler.allocate_resources() # Allocation must succeed self.assertTrue(allocation_succesful) # All 4 stations must be allocated (allocation is greedy), plus storage claim - self.assertTrue(len(self.fake_ra_database.claims[0]) == 5) + self.assertTrue(len(self.radb.getResourceClaims(task_ids=task_id, status='claimed')) == 5) def test_find_zero_stations(self): """ Test whether a requirement for a zero station cannot be satisfied if no stations are left. """ + # preparation: do a first scheduling, which should succeed and claim the station specification_tree = self.get_specification_tree(0) - specification_tree["station_requirements"] = [ ("CS001", 0), ] + specification_tree["station_requirements"] = [ ("RS106", 1), ] + task_id = self.new_task(0) + scheduler = self.new_station_scheduler(task_id, specification_tree=specification_tree) + allocation_succesful = scheduler.allocate_resources() - self.new_task(0) - task = self.fake_ra_database.tasks[0] + self.assertTrue(allocation_succesful) + self.assertEqual(2, len(self.radb.getResourceClaims(task_ids=task_id, status='claimed'))) - # allocate CS001 by hand - self.fake_ra_database.claims["hidden"] = [{ - "id": "hidden", - "resource_id": 1, - "claim_size": 1024, - "starttime": task["starttime"], - "endtime": task["endtime"], - "status": "claimed", - "task_id": "hidden" - }] + # real test, try to claim same station again. Should fail now. + specification_tree = self.get_specification_tree(0) + specification_tree["station_requirements"] = [ ("RS106", 0), ] - allocation_succesful = self.new_station_scheduler(0, specification_tree).allocate_resources() + task_id = self.new_task(1) + scheduler = self.new_station_scheduler(task_id, specification_tree=specification_tree) + allocation_succesful = scheduler.allocate_resources() - # Allocation must succeed + # Allocation must fail self.assertFalse(allocation_succesful) + self.assertEqual(0, len(self.radb.getResourceClaims(task_ids=task_id, status='claimed'))) + self.assertFalse(scheduler.radb.committed) + self.assertTrue(scheduler.radb.rolled_back) + def test_find_overlap_stations(self): """ Test whether requirements for overlapping station sets can be satisfied. """ @@ -501,14 +371,15 @@ class StationSchedulerTest(BasicSchedulerTest): specification_tree = self.get_specification_tree(0) specification_tree["station_requirements"] = [ ("CORE", 2), ("ALL", 4), ] - self.new_task(0) - allocation_succesful = self.new_station_scheduler(0, specification_tree).allocate_resources() + task_id = self.new_task(0) + scheduler = self.new_station_scheduler(task_id, specification_tree=specification_tree) + allocation_succesful = scheduler.allocate_resources() # Allocation must succeed self.assertTrue(allocation_succesful) # All 4 stations must be allocated (allocation is greedy), plus storage claim - self.assertTrue(len(self.fake_ra_database.claims[0]) == 5) + self.assertTrue(len(self.radb.getResourceClaims(task_ids=task_id, status='claimed')) == 5) def test_require_too_many_stations(self): """ Test whether requiring too many stations (than exist) fails. """ @@ -516,71 +387,68 @@ class StationSchedulerTest(BasicSchedulerTest): specification_tree = self.get_specification_tree(0) specification_tree["station_requirements"] = [ ("CORE", 3), ] - self.new_task(0) - allocation_succesful = self.new_station_scheduler(0, specification_tree).allocate_resources() + task_id = self.new_task(0) + scheduler = self.new_station_scheduler(task_id, specification_tree=specification_tree) + allocation_succesful = scheduler.allocate_resources() # Allocation must fail self.assertFalse(allocation_succesful) - self.assertFalse(self.fake_ra_database.committed) - self.assertTrue(self.fake_ra_database.rolled_back) + self.assertFalse(scheduler.radb.committed) + self.assertTrue(scheduler.radb.rolled_back) def test_require_more_stations_than_available(self): """ Test whether requiring too many stations (than are available) fails. """ specification_tree = self.get_specification_tree(0) - specification_tree["station_requirements"] = [ ("CORE", 2), ] - - self.new_task(0) - task = self.fake_ra_database.tasks[0] + specification_tree["station_requirements"] = [ ("REMOTE", 2), ] - # allocate CS001 by hand - self.fake_ra_database.claims["hidden"] = [{ - "id": "hidden", - "resource_id": 1, - "claim_size": 1024, - "starttime": task["starttime"], - "endtime": task["endtime"], - "status": "claimed", - "task_id": "hidden" - }] + # preparation: do a first scheduling, which should succeed and claim the two remote stations + task_id = self.new_task(0) + scheduler = self.new_station_scheduler(task_id, specification_tree=specification_tree) + allocation_succesful = scheduler.allocate_resources() - self.fake_ra_database.commit() - self.fake_ra_database.committed = False # dont confuse subsequent checks on whether the scheduler committed + self.assertTrue(allocation_succesful) + self.assertEqual(3, len(self.radb.getResourceClaims(task_ids=task_id, status='claimed'))) - # try to allocate our task - allocation_succesful = self.new_station_scheduler(0, specification_tree).allocate_resources() + # real test, try to claim the two remote stations again. Should fail now. + task_id = self.new_task(1) + scheduler = self.new_station_scheduler(task_id, specification_tree=specification_tree) + allocation_succesful = scheduler.allocate_resources() - # Allocation must fail self.assertFalse(allocation_succesful) - self.assertFalse(self.fake_ra_database.committed) - self.assertTrue(self.fake_ra_database.rolled_back) + self.assertEqual(0, len(self.radb.getResourceClaims(task_ids=task_id, status='claimed'))) + self.assertFalse(scheduler.radb.committed) + self.assertTrue(scheduler.radb.rolled_back) + def test_2obs_coexist(self): """ Test whether 2 obs requiring different station sets can be scheduled in parallel. """ - for task_id in (0,1): - station_set = "CORE" if task_id == 0 else "REMOTE" - specification_tree = self.get_specification_tree(task_id) + for mom_id in (0,1): + station_set = "CORE" if mom_id == 0 else "REMOTE" + specification_tree = self.get_specification_tree(mom_id) specification_tree["station_requirements"] = [ (station_set, 2), ] - self.new_task(task_id) - allocation_succesful = self.new_station_scheduler(task_id, specification_tree).allocate_resources() + task_id = self.new_task(mom_id) + scheduler = self.new_station_scheduler(task_id, specification_tree=specification_tree) + allocation_succesful = scheduler.allocate_resources() # Allocation must succeed self.assertTrue(allocation_succesful) - self.assertTrue(len(self.fake_ra_database.claims[task_id]) == 3) # 2 stations + 1 storage claim + self.assertTrue(len(self.radb.getResourceClaims(task_ids=task_id, status='claimed')) == 3) # 2 stations + 1 storage claim def test_2obs_no_fit(self): """ Test whether 2 obs requiring station sets from the same set will conflict. """ allocation_succesful = {} # Two observations both requesting 2 core stations - for task_id in (0,1): - specification_tree = self.get_specification_tree(task_id) - specification_tree["station_requirements"] = [ ("CORE", 2), ] + for mom_id in (0,1): + specification_tree = self.get_specification_tree(mom_id) + specification_tree["station_requirements"] = [ ("REMOTE", 2), ] - self.new_task(task_id) - allocation_succesful[task_id] = self.new_station_scheduler(task_id, specification_tree).allocate_resources() + task_id = self.new_task(mom_id) + scheduler = self.new_station_scheduler(task_id, specification_tree=specification_tree) + allocation_succesful[mom_id] = scheduler.allocate_resources() # Second allocation must fail self.assertTrue(allocation_succesful[0]) @@ -591,13 +459,14 @@ class StationSchedulerTest(BasicSchedulerTest): allocation_succesful = {} # Two observations both requesting 2 core stations - for task_id in (0,1,2): - station_name = { 0: "CS001", 1: "CS002", 2: "RS003" }[task_id] - specification_tree = self.get_specification_tree(task_id) + for mom_id in (0,1,2): + station_name = { 0: "CS001", 1: "CS002", 2: "RS106" }[mom_id] + specification_tree = self.get_specification_tree(mom_id) specification_tree["station_requirements"] = [ (station_name, 1), ] - self.new_task(task_id) - allocation_succesful[task_id] = self.new_station_scheduler(task_id, specification_tree).allocate_resources() + task_id = self.new_task(mom_id) + scheduler = self.new_station_scheduler(task_id, specification_tree=specification_tree) + allocation_succesful[mom_id] = scheduler.allocate_resources() # Second allocation must fail self.assertTrue(allocation_succesful[0]) @@ -628,10 +497,11 @@ class PrioritySchedulerTest(StationSchedulerTest): def mock_datetime(self): datetime_patcher = mock.patch('lofar.sas.resourceassignment.resourceassigner.schedulers.datetime') self.addCleanup(datetime_patcher.stop) - datetime_mock = datetime_patcher.start() + self.datetime_mock = datetime_patcher.start() - datetime_mock.utcnow.return_value = datetime.datetime(2017, 1, 1, 0, 0, 0) - datetime_mock.max = datetime.datetime.max + # utcnow lies before the tasks we are scheduling (the tasks lie in the future) + self.datetime_mock.utcnow.return_value = datetime.datetime(2017, 1, 1, 0, 0, 0) + self.datetime_mock.max = datetime.datetime.max def setUp(self): super(PrioritySchedulerTest, self).setUp() @@ -640,225 +510,491 @@ class PrioritySchedulerTest(StationSchedulerTest): self.mock_obscontrol() self.mock_datetime() - def new_task(self, task_id): - self.fake_ra_database.addTask(task_id, { - "mom_id": 1000 + task_id, - "otdb_id": 2000 + task_id, - "type": "observation", - "starttime": datetime.datetime(2017, 1, 1, 1, 0, 0), - "endtime": datetime.datetime(2017, 1, 1, 2, 0, 0), - }) - - self.fake_ra_database.commit() - self.fake_ra_database.committed = False # dont confuse subsequent checks on whether the scheduler committed - - def new_task_without_momid(self, task_id): - self.fake_ra_database.addTask(task_id, { - "mom_id": None, - "otdb_id": 2000 + task_id, - "type": "observation", - "starttime": datetime.datetime(2017, 1, 1, 1, 0, 0), - "endtime": datetime.datetime(2017, 1, 1, 2, 0, 0), - }) - - self.fake_ra_database.commit() - self.fake_ra_database.committed = False # dont confuse subsequent checks on whether the scheduler committed - - def new_scheduler(self, task_id, resource_estimator): - return PriorityScheduler(task_id, self.get_specification_tree(task_id), resource_estimator, self.fake_resource_availability_checker, None) - - def new_station_scheduler(self, task_id, specification_tree): - return PriorityScheduler(task_id, specification_tree, self.fake_resource_estimator, FakeResourceAvailabilityChecker(), None) - - def test_kill_lower_priority(self): + def new_task_without_momid(self, otdb_id): + return self.radb.insertSpecificationAndTask(mom_id=None, + otdb_id=otdb_id, + task_status='approved', + task_type='observation', + starttime=datetime.datetime(2017, 1, 1, 1, 0, 0), + endtime=datetime.datetime(2017, 1, 1, 2, 0, 0), + content='', + cluster='CEP4')['task_id'] + + def new_scheduler(self, task_id, resource_estimator=None, specification_tree=None): + """overridden factory method returning a scheduler class specific for this test class. + In this case, in the PrioritySchedulerTest class, it returns a new PriorityScheduler.""" + return self.new_priority_scheduler(task_id, resource_estimator, specification_tree) + + def new_station_scheduler(self, task_id, resource_estimator=None, specification_tree=None): + """overridden factory method returning a scheduler class specific for this test class. + In this case, in the PrioritySchedulerTest class, it returns a new PriorityScheduler.""" + return self.new_priority_scheduler(task_id, resource_estimator, specification_tree) + + def new_priority_scheduler(self, task_id, resource_estimator=None, specification_tree=None): + return PriorityScheduler(task_id, + specification_tree if specification_tree else self.get_specification_tree(task_id), + resource_estimator if resource_estimator else self.fake_resource_estimator, + self.resource_availability_checker, self.radb.dbcreds) + + def test_unschedule_lower_priority_future_task(self): """ - Whether two tasks that fit individually but not together will be accepted by the scheduler by killing the + Whether two future tasks that fit individually but not together will be accepted by the scheduler by unscheduling the lower-priority task. """ + # utcnow lies before the tasks we are scheduling (the tasks lie in the future) + self.datetime_mock.utcnow.return_value = datetime.datetime(2017, 1, 1, 0, 0, 0) + + max_bw_cap = self.get_station_bandwidth_max_capacity() + + # First task must succeed (for the test the mom_id determines the prio) + task_id = self.new_task(0) + estimates = [{'resource_types': {'bandwidth': max_bw_cap}, + "root_resource_group": "CS001", + "resource_count": 2 } ] + scheduler = self.new_scheduler(task_id, resource_estimator=lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() + self.assertTrue(allocation_succesful) + + self.assertEqual('approved', self.radb.getTask(task_id)['status']) + self.radb.updateTask(task_id, task_status='scheduled') + self.assertEqual('scheduled', self.radb.getTask(task_id)['status']) + + # Second task must succeed as it has a higher priority (for the test the mom_id determines the prio) + task2_id = self.new_task(1000) + estimates = [{'resource_types': {'bandwidth': max_bw_cap}, + "root_resource_group": "CS001", + "resource_count": 2 } ] + scheduler = self.new_scheduler(task2_id, resource_estimator=lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() + self.assertEqual(2, len(self.radb.getResourceClaims(task_ids=task2_id, status='claimed'))) + + # First task must have been unscheduled + # as a result, it should not have any claimed claims anymore + self.assertEqual(0, len(self.radb.getResourceClaims(task_ids=task_id, status='claimed'))) + self.assertEqual(2, len(self.radb.getResourceClaims(task_ids=task_id, status='conflict'))) + # and the low-prio task should now have conflict state (cause the high-prio task claimed the resources) + self.assertEqual('conflict', self.radb.getTask(task_id)['status']) + + + def test_kill_lower_priority_running_task(self): + """ + Whether two tasks that fit individually but not together will be accepted by the scheduler by killing the + running lower-priority task. + """ + + # utcnow lies before the tasks we are scheduling (the tasks lie in the future) + self.datetime_mock.utcnow.return_value = datetime.datetime(2017, 1, 1, 0, 0, 0) + + max_bw_cap = self.get_station_bandwidth_max_capacity() + # First task must succeed - self.new_task(0) - estimates = [{'resource_types': {'bandwidth': 512}}] - allocation_succesful = self.new_scheduler(0, lambda _: estimates).allocate_resources() + # (for the test the mom_id determines the prio) + task_id = self.new_task(0, starttime=datetime.datetime(2017, 1, 1, 12, 0, 0), + endtime=datetime.datetime(2017, 1, 1, 13, 0, 0)) + estimates = [{'resource_types': {'bandwidth': max_bw_cap}, + "root_resource_group": "RS106", + "resource_count": 1 } ] + scheduler = self.new_scheduler(task_id, resource_estimator=lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() self.assertTrue(allocation_succesful) + self.assertEqual('approved', self.radb.getTask(task_id)['status']) + self.radb.updateTask(task_id, task_status='scheduled') + self.assertEqual('scheduled', self.radb.getTask(task_id)['status']) + self.assertEqual(datetime.datetime(2017, 1, 1, 12, 0, 0), self.radb.getTask(task_id)['starttime']) + self.assertEqual(datetime.datetime(2017, 1, 1, 13, 0, 0), self.radb.getTask(task_id)['endtime']) + + # shift utcnow and fake that the task is running + self.datetime_mock.utcnow.return_value = datetime.datetime(2017, 1, 1, 12, 10, 0) + self.radb.updateTask(task_id, task_status='active') + self.assertEqual('active', self.radb.getTask(task_id)['status']) + # Second task must succeed as it has a higher priority - self.new_task(1000) - estimates = [{'resource_types': {'bandwidth': 513}}] - allocation_succesful = self.new_scheduler(1000, lambda _: estimates).allocate_resources() + # start it in a minute after now + # (or else it will still have overlap and conflicts with beginning of just-aborted running task) + # (for the test the mom_id determines the prio) + task2_id = self.new_task(1000, starttime=datetime.datetime(2017, 1, 1, 12, 11, 0), + endtime=datetime.datetime(2017, 1, 1, 13, 11, 0)) + estimates = [{'resource_types': {'bandwidth': max_bw_cap}, + "root_resource_group": "RS106", + "resource_count": 1 } ] + scheduler = self.new_scheduler(task2_id, resource_estimator=lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() self.assertTrue(allocation_succesful) # First task must have been killed - otdb_id = self.fake_ra_database.tasks[0]["otdb_id"] + otdb_id = self.radb.getTask(task_id)["otdb_id"] self.obscontrol_mock.assert_called_with(otdb_id) - # First task must have its endtime cut short to utcnow or starttime - my_starttime = self.fake_ra_database.tasks[1000]["starttime"] - for c in self.fake_ra_database.claims[0]: - self.assertLessEqual(c["endtime"], my_starttime) + # First task must have its endtime cut short to utcnow + # and all claims should be ended (but still claimed) as well. + self.assertEqual(datetime.datetime(2017, 1, 1, 12, 10, 0), self.radb.getTask(task_id)['endtime']) + self.assertEqual(1, len(self.radb.getResourceClaims(task_ids=task_id))) + for claim in self.radb.getResourceClaims(task_ids=task_id): + self.assertLessEqual(claim["endtime"], datetime.datetime(2017, 1, 1, 12, 10, 0)) + self.assertEqual('claimed', claim["status"]) + + # and the starttime should still be the original + self.assertEqual(datetime.datetime(2017, 1, 1, 12, 0, 0), self.radb.getTask(task_id)['starttime']) + # and status should be aborted + self.assertEqual('aborted', self.radb.getTask(task_id)['status']) + + + def test_do_not_unschedule_higher_priority_future_task(self): + # utcnow lies before the tasks we are scheduling (the tasks lie in the future) + self.datetime_mock.utcnow.return_value = datetime.datetime(2017, 1, 1, 0, 0, 0) + + max_bw_cap = self.get_station_bandwidth_max_capacity() + + # First task must succeed (for the test the mom_id determines the prio) + task_id = self.new_task(1000) + estimates = [{'resource_types': {'bandwidth': max_bw_cap}, + "root_resource_group": "CS001", + "resource_count": 2 } ] + scheduler = self.new_scheduler(task_id, resource_estimator=lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() + self.assertTrue(allocation_succesful) + + self.assertEqual('approved', self.radb.getTask(task_id)['status']) + self.radb.updateTask(task_id, task_status='scheduled') + self.assertEqual('scheduled', self.radb.getTask(task_id)['status']) + + # Second task must succeed as it has a higher priority (for the test the mom_id determines the prio) + task2_id = self.new_task(0) #(for the test the mom_id determines the prio) + estimates = [{'resource_types': {'bandwidth': max_bw_cap}, + "root_resource_group": "CS001", + "resource_count": 2 } ] + scheduler = self.new_scheduler(task2_id, resource_estimator=lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() + self.assertFalse(allocation_succesful) - def test_not_kill_higher_priority(self): - """ Whether two tasks that fit individually but not together get rejected priorities do not allow an override. """ + # the second (low-prio) task could not be scheduled + # as a result there are no claims allocated and the task stays in approved state. + # Thought by JS: I think that's wrong, and does not give the proper feedback to the user. + # I think that the claims and task should go to conflict to make it clear to the user what happened. + self.assertEqual('approved', self.radb.getTask(task2_id)['status']) + self.assertEqual(0, len(self.radb.getResourceClaims(task_ids=task2_id))) - # First task must succeed - self.new_task(1000) - estimates = [{ 'resource_types': {'bandwidth': 512} }] - allocation_succesful = self.new_scheduler(1000, lambda _: estimates).allocate_resources() + # First task must NOT have been unscheduled + self.assertEqual('scheduled', self.radb.getTask(task_id)['status']) + self.assertEqual(2, len(self.radb.getResourceClaims(task_ids=task_id, status='claimed'))) + + + def test_do_not_kill_higher_priority_running_task(self): + + # utcnow lies before the tasks we are scheduling (the tasks lie in the future) + self.datetime_mock.utcnow.return_value = datetime.datetime(2017, 1, 1, 0, 0, 0) + + max_bw_cap = self.get_station_bandwidth_max_capacity() + + # First (task must succeed) + task_id = self.new_task(1000) #(for the test the mom_id determines the prio) + estimates = [{'resource_types': {'bandwidth': max_bw_cap}, + "root_resource_group": "CS001", + "resource_count": 2 } ] + scheduler = self.new_scheduler(task_id, resource_estimator=lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() self.assertTrue(allocation_succesful) - # Second task must fail as it has a lower priority - self.new_task(0) - estimates = [{ 'resource_types': {'bandwidth': 513} }] - allocation_succesful = self.new_scheduler(0, lambda _: estimates).allocate_resources() + self.assertEqual('approved', self.radb.getTask(task_id)['status']) + self.radb.updateTask(task_id, task_status='scheduled') + self.assertEqual('scheduled', self.radb.getTask(task_id)['status']) + + # shift utcnow and fake that the task is running + self.datetime_mock.utcnow.return_value = datetime.datetime(2017, 1, 1, 1, 10, 0) + self.radb.updateTask(task_id, task_status='active') + self.assertEqual('active', self.radb.getTask(task_id)['status']) + + # Second task must succeed as it has a higher priority + # start it in a minute after now + # (or else it will still have overlap and conflicts with beginning of just-aborted running task) + # (for the test the mom_id determines the prio) + task2_id = self.new_task(0, starttime=datetime.datetime(2017, 1, 1, 1, 11, 0)) + estimates = [{'resource_types': {'bandwidth': max_bw_cap}, + "root_resource_group": "CS001", + "resource_count": 2 } ] + scheduler = self.new_scheduler(task2_id, resource_estimator=lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() self.assertFalse(allocation_succesful) + # the second (low-prio) task could not be scheduled + # as a result there are no claims allocated and the task stays in approved state. + # Thought by JS: I think that's wrong, and does not give the proper feedback to the user. + # I think that the claims and task should go to conflict to make it clear to the user what happened. + self.assertEqual('approved', self.radb.getTask(task2_id)['status']) + self.assertEqual(0, len(self.radb.getResourceClaims(task_ids=task2_id))) + # First task must NOT have been killed - otdb_id = self.fake_ra_database.tasks[1000]["otdb_id"] - with self.assertRaises(AssertionError): - self.obscontrol_mock.assert_called_with(otdb_id) + self.assertEqual('active', self.radb.getTask(task_id)['status']) + self.assertEqual(2, len(self.radb.getResourceClaims(task_ids=task_id, status='claimed'))) - def test_not_kill_equal_priority(self): + def test_not_unschedule_equal_priority(self): """ Whether two tasks that fit individually but not together get rejected priorities do not allow an override. """ + max_bw_cap = self.get_station_bandwidth_max_capacity() + # First task must succeed - self.new_task(1) - estimates = [{ 'resource_types': {'bandwidth': 512} }] - allocation_succesful = self.new_scheduler(1, lambda _: estimates).allocate_resources() + task1_id = self.new_task(1) #mom_id=1 and mom_id=0 yield equal priorities + estimates = [{'resource_types': {'bandwidth': max_bw_cap}, + "root_resource_group": "CS001", + "resource_count": 2 } ] + scheduler = self.new_scheduler(task1_id, resource_estimator=lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() self.assertTrue(allocation_succesful) + self.assertEqual('approved', self.radb.getTask(task1_id)['status']) + self.radb.updateTask(task1_id, task_status='scheduled') + self.assertEqual('scheduled', self.radb.getTask(task1_id)['status']) + # Second task must fail as it has a lower priority - self.new_task(0) - estimates = [{ 'resource_types': {'bandwidth': 513} }] - allocation_succesful = self.new_scheduler(0, lambda _: estimates).allocate_resources() + task2_id = self.new_task(0) #mom_id=1 and mom_id=0 yield equal priorities + estimates = [{'resource_types': {'bandwidth': max_bw_cap}, + "root_resource_group": "CS001", + "resource_count": 2 } ] + scheduler = self.new_scheduler(task2_id, resource_estimator=lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() self.assertFalse(allocation_succesful) + self.assertEqual('scheduled', self.radb.getTask(task1_id)['status']) + # Thought by JS: I think it's wrong that task2 has approved status, and does not give the proper feedback to the user. + # I think that the claims and task should go to conflict to make it clear to the user what happened. + self.assertEqual('approved', self.radb.getTask(task2_id)['status']) + def test_partial_conflict(self): """ Whether a task gets scheduled correctly if it has a partial conflict after the first fit. """ - # First task must succeed - self.new_task(0) - estimates = [{ 'resource_types': {'bandwidth': 512} }, - { 'resource_types': {'bandwidth': 512} }] - allocation_succesful = self.new_scheduler(0, lambda _: estimates).allocate_resources() + + # utcnow lies before the tasks we are scheduling (the tasks lie in the future) + self.datetime_mock.utcnow.return_value = datetime.datetime(2017, 1, 1, 0, 0, 0) + + max_bw_cap = self.get_station_bandwidth_max_capacity() + + # First task must succeed (for the test the mom_id determines the prio) + task_id = self.new_task(0) + estimates = [{'resource_types': {'bandwidth': 0.25*max_bw_cap}, + "root_resource_group": "CS001", + "resource_count": 1 }, + {'resource_types': {'bandwidth': 0.25 * max_bw_cap}, + "root_resource_group": "CS001", + "resource_count": 1} + ] + + scheduler = self.new_scheduler(task_id, resource_estimator=lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() self.assertTrue(allocation_succesful) - # Second task must succeed as it has a higher priority - self.new_task(1000) - estimates = [{ 'resource_types': {'bandwidth': 512} }, - { 'resource_types': {'bandwidth': 513} }] - allocation_succesful = self.new_scheduler(1000, lambda _: estimates).allocate_resources() + self.assertEqual('approved', self.radb.getTask(task_id)['status']) + self.radb.updateTask(task_id, task_status='scheduled') + self.assertEqual('scheduled', self.radb.getTask(task_id)['status']) + + # Second task must succeed as it has a higher priority (for the test the mom_id determines the prio) + task2_id = self.new_task(1000) + estimates = [{'resource_types': {'bandwidth': 0.25*max_bw_cap}, + "root_resource_group": "CS001", + "resource_count": 1 }, + {'resource_types': {'bandwidth': 0.95 * max_bw_cap}, + "root_resource_group": "CS001", + "resource_count": 1} + ] + scheduler = self.new_scheduler(task2_id, resource_estimator=lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() self.assertTrue(allocation_succesful) + self.assertEqual(2, len(self.radb.getResourceClaims(task_ids=task2_id, status='claimed'))) - # First task must have been killed - otdb_id = self.fake_ra_database.tasks[0]["otdb_id"] - self.obscontrol_mock.assert_called_with(otdb_id) + # First task must have been unscheduled + # as a result, it should not have any claimed claims anymore + self.assertEqual(0, len(self.radb.getResourceClaims(task_ids=task_id, status='claimed'))) + self.assertEqual(1, len(self.radb.getResourceClaims(task_ids=task_id, status='tentative'))) + self.assertEqual(1, len(self.radb.getResourceClaims(task_ids=task_id, status='conflict'))) + # and the low-prio task should now have conflict state (cause the high-prio task claimed the resources) + self.assertEqual('conflict', self.radb.getTask(task_id)['status']) def test_should_not_kill_a_task_without_a_mom_id(self): + max_bw_cap = self.get_station_bandwidth_max_capacity() + # First task must succeed - self.new_task_without_momid(0) - estimates = [{'resource_types': {'bandwidth': 512}}] - allocation_succesful = self.new_scheduler(0, lambda _: estimates).allocate_resources() + task_id = self.new_task_without_momid(0) + estimates = [{'resource_types': {'bandwidth': max_bw_cap}, + "root_resource_group": "RS106", + "resource_count": 1 }] + scheduler = self.new_scheduler(task_id, resource_estimator=lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() self.assertTrue(allocation_succesful) - self.new_task(1000) - estimates = [{'resource_types': {'bandwidth': 513}}] - allocation_succesful = self.new_scheduler(1000, lambda _: estimates).allocate_resources() + task2_id = self.new_task(1000) + estimates = [{'resource_types': {'bandwidth': max_bw_cap}, + "root_resource_group": "RS106", + "resource_count": 1 }] + scheduler = self.new_scheduler(task2_id, resource_estimator=lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() self.assertFalse(allocation_succesful) - otdb_id = self.fake_ra_database.tasks[0]["otdb_id"] self.obscontrol_mock.assert_not_called() class DwellSchedulerTest(PrioritySchedulerTest): # The DwellScheduler must not regress on the PriorityScheduler, so we inherit all its tests - def new_task(self, task_id): - self.fake_ra_database.addTask(task_id, { - "mom_id": 1000 + task_id, - "otdb_id": 2000 + task_id, - "type": "observation", - "starttime": datetime.datetime(2017, 1, 1, 1, 0, 0), - "endtime": datetime.datetime(2017, 1, 1, 2, 0, 0), - }) - - self.fake_ra_database.commit() - self.fake_ra_database.committed = False # dont confuse subsequent checks on whether the scheduler committed - - def new_scheduler(self, task_id, resource_estimator): - return DwellScheduler(task_id, self.get_specification_tree(task_id), resource_estimator, - datetime.datetime(2017, 1, 1, 1, 0, 0), # minstarttime - datetime.datetime(2017, 1, 1, 1, 0, 0), # maxstarttime - datetime.timedelta(hours=1), # duration - self.fake_resource_availability_checker, None) - - def new_station_scheduler(self, task_id, specification_tree): - return DwellScheduler(task_id, specification_tree, self.fake_resource_estimator, - datetime.datetime(2017, 1, 1, 1, 0, 0), # minstarttime - datetime.datetime(2017, 1, 1, 1, 0, 0), # maxstarttime - datetime.timedelta(hours=1), # duration - FakeResourceAvailabilityChecker(), None) - - def new_dwell_scheduler(self, task_id, resource_estimator): - return DwellScheduler(task_id, self.get_specification_tree(task_id), resource_estimator, - datetime.datetime(2017, 1, 1, 1, 0, 0), # minstarttime - datetime.datetime(2017, 1, 2, 1, 0, 0), # maxstarttime - datetime.timedelta(hours=1), # duration - self.fake_resource_availability_checker, None) + class TestResourceAvailabilityChecker(ResourceAvailabilityChecker): + """Helper class to keep track of arguments in calls to get_is_claimable""" + def get_is_claimable(self, requested_resources, available_resources): + self.last_requested_resources = requested_resources + self.last_available_resources = available_resources + return super(DwellSchedulerTest.TestResourceAvailabilityChecker, self).get_is_claimable(requested_resources, + available_resources) + + def setUp(self): + super(DwellSchedulerTest, self).setUp() + self.resource_availability_checker = DwellSchedulerTest.TestResourceAvailabilityChecker(self.radb) + + def new_scheduler(self, task_id, resource_estimator=None, specification_tree=None): + """overridden factory method returning a scheduler class specific for this test class. + In this case, in the DwellSchedulerTest class, it returns a new DwellScheduler.""" + return self.new_dwell_scheduler(task_id, resource_estimator, specification_tree, allow_dwelling=False) + + def new_station_scheduler(self, task_id, resource_estimator=None, specification_tree=None): + """overridden factory method returning a scheduler class specific for this test class. + In this case, in the DwellSchedulerTest class, it returns a new DwellScheduler.""" + return self.new_dwell_scheduler(task_id, resource_estimator, specification_tree, allow_dwelling=False) + + def new_priority_scheduler(self, task_id, resource_estimator=None, specification_tree=None): + """overridden factory method returning a scheduler class specific for this test class. + In this case, in the DwellSchedulerTest class, it returns a new DwellScheduler.""" + return self.new_dwell_scheduler(task_id, resource_estimator, specification_tree, allow_dwelling=False) + + def new_dwell_scheduler(self, task_id, resource_estimator=None, specification_tree=None, allow_dwelling=True): + if allow_dwelling: + min_starttime = datetime.datetime(2017, 1, 1, 1, 0, 0) + max_starttime = datetime.datetime(2017, 1, 2, 1, 0, 0) + else: + # we do not want dwelling, so limit the dwell starttime window to the task's actual starttime. + min_starttime = self.radb.getTask(task_id)['starttime'] + max_starttime = min_starttime + + return DwellScheduler(task_id, + specification_tree if specification_tree else self.get_specification_tree(task_id), + resource_estimator if resource_estimator else self.fake_resource_estimator, + min_starttime, + max_starttime, + datetime.timedelta(hours=1), # duration + self.resource_availability_checker, self.radb.dbcreds) def test_no_dwell(self): """ Whether a task will not dwell unnecessarily on an empty system. """ # Task must succeed - self.new_task(0) - estimates = [{ 'resource_types': {'bandwidth': 512} }] - allocation_succesful = self.new_dwell_scheduler(0, lambda _: estimates).allocate_resources() + task_id = self.new_task(0) + estimates = [{ 'resource_types': {'bandwidth': 512}, + "root_resource_group": "CS001", + "resource_count": 1 } ] + scheduler = self.new_dwell_scheduler(task_id, resource_estimator=lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() self.assertTrue(allocation_succesful) - # Task must NOT have been moved - self.assertEqual(self.fake_ra_database.tasks[0]["starttime"], datetime.datetime(2017, 1, 1, 1, 0, 0)) + # Task must be positioned at start of dwelling period. + task = self.radb.getTask(task_id) + self.assertEqual(scheduler.min_starttime, task["starttime"]) + self.assertEqual(scheduler.min_starttime+scheduler.duration, task["endtime"]) def test_dwell(self): """ Whether a task will dwell after an existing task. """ + max_bw_cap = self.get_station_bandwidth_max_capacity() + # First task must succeed - self.new_task(0) - estimates = [{ 'resource_types': {'bandwidth': 512} }] - allocation_succesful = self.new_dwell_scheduler(0, lambda _: estimates).allocate_resources() + task1_id = self.new_task(0) + estimates = [{ 'resource_types': {'bandwidth': max_bw_cap}, + "root_resource_group": "CS001", + "resource_count": 2 }] + scheduler = self.new_dwell_scheduler(task1_id, resource_estimator=lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() self.assertTrue(allocation_succesful) # Second task must also succeed - self.new_task(1) - estimates = [{ 'resource_types': {'bandwidth': 513} }] - allocation_succesful = self.new_dwell_scheduler(1, lambda _: estimates).allocate_resources() + task2_id = self.new_task(1) + estimates = [{ 'resource_types': {'bandwidth': max_bw_cap}, + "root_resource_group": "CS001", + "resource_count": 2 }] + scheduler = self.new_dwell_scheduler(task2_id, resource_estimator=lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() self.assertTrue(allocation_succesful) # Second task must have been moved, first task not - self.assertEqual(self.fake_ra_database.tasks[0]["starttime"], datetime.datetime(2017, 1, 1, 1, 0, 0)) - self.assertEqual(self.fake_ra_database.tasks[0]["endtime"], datetime.datetime(2017, 1, 1, 2, 0, 0)) - self.assertEqual(self.fake_ra_database.tasks[1]["starttime"], datetime.datetime(2017, 1, 1, 2, 1, 0)) - self.assertEqual(self.fake_ra_database.tasks[1]["endtime"], datetime.datetime(2017, 1, 1, 3, 1, 0)) + self.assertEqual(self.radb.getTask(task1_id)["starttime"], datetime.datetime(2017, 1, 1, 1, 0, 0)) + self.assertEqual(self.radb.getTask(task1_id)["endtime"], datetime.datetime(2017, 1, 1, 2, 0, 0)) + self.assertEqual(self.radb.getTask(task2_id)["starttime"], datetime.datetime(2017, 1, 1, 2, 1, 0)) + self.assertEqual(self.radb.getTask(task2_id)["endtime"], datetime.datetime(2017, 1, 1, 3, 1, 0)) def test_dwell_respect_claim_endtime(self): """ Whether a dwelling task will honour the claim endtimes, instead of the task endtime. """ + max_bw_cap = self.get_station_bandwidth_max_capacity() + # First task must succeed - self.new_task(0) - estimates = [{ 'resource_types': {'bandwidth': 512} }] - allocation_succesful = self.new_dwell_scheduler(0, lambda _: estimates).allocate_resources() + task1_id = self.new_task(0) + estimates = [{ 'resource_types': {'bandwidth': max_bw_cap}, + "root_resource_group": "CS001", + "resource_count": 2 }] + # use normal basic scheduler for first normal task, which we want to schedule in a normal (non-dwell) way. + scheduler = self.new_basic_scheduler(task1_id, resource_estimator=lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() self.assertTrue(allocation_succesful) + self.assertEqual(2, len(self.radb.getResourceClaims(task_ids=task1_id, status='claimed'))) # Extend claim - self.fake_ra_database.claims[0][0]["endtime"] += datetime.timedelta(hours=1) + task = self.radb.getTask(task1_id) + self.radb.updateResourceClaims(where_task_ids=task1_id, endtime=task["endtime"] + datetime.timedelta(hours=1)) + self.assertEqual(2, len(self.radb.getResourceClaims(task_ids=task1_id, status='claimed'))) + + # Second task must also succeed + task2_id = self.new_task(1) + estimates = [{ 'resource_types': {'bandwidth': max_bw_cap}, + "root_resource_group": "CS001", + "resource_count": 2 }] + scheduler = self.new_dwell_scheduler(task2_id, resource_estimator=lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() + self.assertTrue(allocation_succesful) + + # Second task must have been moved beyond 1st claim endtime, first task not + self.assertEqual(self.radb.getTask(task1_id)["starttime"], datetime.datetime(2017, 1, 1, 1, 0, 0)) + self.assertEqual(self.radb.getTask(task1_id)["endtime"], datetime.datetime(2017, 1, 1, 2, 0, 0)) + self.assertEqual(self.radb.getTask(task2_id)["starttime"], datetime.datetime(2017, 1, 1, 3, 1, 0)) + self.assertEqual(self.radb.getTask(task2_id)["endtime"], datetime.datetime(2017, 1, 1, 4, 1, 0)) + + def test_dwellScheduler_should_give_all_available_resources_on_second_pass(self): + """ + This tests bug LSRT-60 where the second observation of template two does not get scheduled + when dwelling is active. The basic scheduler keeps track of resources that can't be killed. + The guess is that its used for optimization purposes. The cause of the bug is that this list + does not get cleared and on dwelling to the next part it should fit. But the resources in + that list get subtracted from the list handed to the resource_availability checker. + This test verifies that the complete list should be provided on the second try. + """ + max_bw_cap = self.get_station_bandwidth_max_capacity() + + # First task must succeed + task1_id = self.new_task(0) + estimates = [{ 'resource_types': {'bandwidth': max_bw_cap}, + "root_resource_group": "CS001", + "resource_count": 2 }] + scheduler = self.new_dwell_scheduler(task1_id, resource_estimator=lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() + self.assertTrue(allocation_succesful) # Second task must also succeed - self.new_task(1) - estimates = [{ 'resource_types': {'bandwidth': 513} }] - allocation_succesful = self.new_dwell_scheduler(1, lambda _: estimates).allocate_resources() + task2_id = self.new_task(1) + estimates = [{ 'resource_types': {'bandwidth': max_bw_cap}, + "root_resource_group": "CS001", + "resource_count": 2 }] + scheduler = self.new_dwell_scheduler(task2_id, resource_estimator=lambda _: estimates) + allocation_succesful = scheduler.allocate_resources() self.assertTrue(allocation_succesful) - # Second task must have been moved beyond claim endtime - self.assertEqual(self.fake_ra_database.tasks[1]["starttime"], datetime.datetime(2017, 1, 1, 3, 1, 0)) - self.assertEqual(self.fake_ra_database.tasks[1]["endtime"], datetime.datetime(2017, 1, 1, 4, 1, 0)) + # avialable resources can be limited by tracking unkillable resources. They should be + # cleared on the second try like in this test. + self.assertEqual(set(r['name'] for r in self.resource_availability_checker.last_available_resources), + set(r['name'] for r in self.radb.getResources(include_availability=True))) if __name__ == '__main__': logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.DEBUG) diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/CMakeLists.txt b/SAS/ResourceAssignment/ResourceAssignmentDatabase/CMakeLists.txt index a26dc94c2967c204374d4866ba1f3a9c5446bf95..ddd09281e123d0d2d105162e2e041508292438b8 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/CMakeLists.txt +++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/CMakeLists.txt @@ -22,10 +22,8 @@ install(FILES radbpglistener.ini DESTINATION etc/supervisord.d) -add_subdirectory(radb/sql) add_subdirectory(tests) -# symmetric install of sql with symlinks in build share/... and normal install in installed/share/... set(sql_files radb/sql/add_notifications.sql radb/sql/add_functions_and_triggers.sql radb/sql/add_resource_allocation_statics.sql diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb.py b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb.py index a160d325768cf6d9ee9d73281fbb76361175b357..e84053fdb515b1c66421a592e9999f6ae8051574 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb.py +++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb.py @@ -39,6 +39,9 @@ _FETCH_NONE=0 _FETCH_ONE=1 _FETCH_ALL=2 +class RADBError(Exception): + pass + class RADatabase: def __init__(self, dbcreds=None, log_queries=False): self.dbcreds = dbcreds @@ -50,6 +53,10 @@ class RADatabase: self._claimStatusName2IdCache = {} self._claimStatusId2NameCache = {} + # keep track if last/current transaction was already committed or rolled_back + self.committed = False + self.rolled_back = False + def _connect(self): self.conn = None self.cursor = None @@ -75,6 +82,9 @@ class RADatabase: for i in range(5): try: start = datetime.utcnow() + # keep track if last/current transaction was already committed or rolled_back + self.committed = False + self.rolled_back = False self.cursor.execute(query, qargs) if self.log_queries: elapsed = datetime.utcnow() - start @@ -98,6 +108,9 @@ class RADatabase: logger.error("Rolling back query=\'%s\' due to error: \'%s\'" % (self._queryAsSingleLine(query, qargs), e)) self.rollback() return [] + # TODO: instead of doing a "silent" rollback and continue, we should raise an RADBError. + # We cannot oversee the impact of such a change at this moment, so let's investigate that later. + # raise RADBError(e.message) self._log_database_notifications() @@ -116,10 +129,14 @@ class RADatabase: def commit(self): logger.info('commit') self.conn.commit() + # keep track if last/current transaction was already committed or rolled_back + self.committed = True def rollback(self): logger.info('rollback') self.conn.rollback() + # keep track if last/current transaction was already committed or rolled_back + self.rolled_back = True def getTaskStatuses(self): query = '''SELECT * from resource_allocation.task_status;''' @@ -412,7 +429,7 @@ class RADatabase: VALUES (%s, %s, %s, %s, %s) RETURNING id;''' - id = self._executeQuery(query, (mom_id, otdb_id, task_status, task_type, specification_id), fetch=_FETCH_ONE)['id'] + id = self._executeQuery(query, (mom_id, otdb_id, task_status, task_type, specification_id), fetch=_FETCH_ONE).get('id') if commit: self.commit() return id @@ -808,7 +825,15 @@ class RADatabase: claim_status_id = claim_status query = '''SELECT * from resource_allocation.get_current_resource_usage(%s, %s)''' - return self._executeQuery(query, (resource_id, claim_status_id), fetch=_FETCH_ONE) + result = self._executeQuery(query, (resource_id, claim_status_id), fetch=_FETCH_ONE) + + if result is None or result.get('resource_id') is None: + result = { 'resource_id': resource_id, + 'status_id': claim_status_id, + 'as_of_timestamp': datetime.utcnow(), + 'usage': 0 } + + return result def get_resource_usage_at_or_before(self, resource_id, timestamp, claim_status='claimed', exactly_at=False, only_before=False): if isinstance(claim_status, basestring): @@ -817,7 +842,14 @@ class RADatabase: claim_status_id = claim_status query = '''SELECT * from resource_allocation.get_resource_usage_at_or_before(%s, %s, %s, %s, %s, %s)''' - return self._executeQuery(query, (resource_id, claim_status_id, timestamp, exactly_at, only_before, False), fetch=_FETCH_ONE) + result = self._executeQuery(query, (resource_id, claim_status_id, timestamp, exactly_at, only_before, False), fetch=_FETCH_ONE) + + if result is None or result.get('resource_id') is None: + result = { 'resource_id': resource_id, + 'status_id': claim_status_id, + 'as_of_timestamp': timestamp, + 'usage': 0 } + return result def updateResourceAvailability(self, resource_id, active=None, available_capacity=None, total_capacity=None, commit=True): if active is not None: @@ -942,7 +974,7 @@ class RADatabase: r_items[r_item_id] = r_item parent_id = relation['resource_group_parent_id'] - if parent_id != None: + if parent_id != None and parent_id in rg_items: r_items[r_item_id]['parent_group_ids'].append(parent_id) rg_items[parent_id]['resource_ids'].append(r_item_id) @@ -1260,6 +1292,7 @@ class RADatabase: result = self.insertResourceClaims(task_id, [claim], username, user_id, commit) if result: return result[0] + return None def insertResourceClaims(self, task_id, claims, username, user_id, commit=True): '''bulk insert of a list of resource claims for a task(_id). All claims are inserted with status tentative. @@ -1280,12 +1313,12 @@ class RADatabase: ''' logger.info('insertResourceClaims for task_id=%d with %d claim(s)' % (task_id, len(claims))) - status_strings = set([c['status'] for c in claims if isinstance(c['status'], basestring)]) + status_strings = set([c.get('status', 'tentative') for c in claims if isinstance(c.get('status', 'tentative'), basestring)]) if status_strings: status_string2id = {s:self.getResourceClaimStatusId(s) for s in status_strings} for c in claims: - if isinstance(c['status'], basestring): - c['status_id'] = status_string2id[c['status']] + if isinstance(c.get('status', 'tentative'), basestring): + c['status_id'] = status_string2id[c.get('status', 'tentative')] elif isinstance(c['status'], int): c['status_id'] = c['status'] @@ -1590,6 +1623,8 @@ class RADatabase: if commit: self.commit() return {'inserted': True, 'specification_id': specId, 'task_id': taskId} + else: + self.rollback() except Exception as e: logger.error(e) self.rollback() diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/CMakeLists.txt b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/CMakeLists.txt deleted file mode 100644 index d0443398bad2a8ca891f2d43956776ce0dcfcf8e..0000000000000000000000000000000000000000 --- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/CMakeLists.txt +++ /dev/null @@ -1,12 +0,0 @@ -# $Id: CMakeLists.txt 32341 2015-08-28 11:59:26Z schaap $ - -set(sql_files add_notifications.sql - add_functions_and_triggers.sql - add_resource_allocation_statics.sql - add_virtual_instrument.sql - create_database.sql - create_and_populate_database.sql - README) - -install_files(/share/radb/sql FILES ${sql_files}) - diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/add_functions_and_triggers.sql b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/add_functions_and_triggers.sql index 9fa8caae0f520853b0a5dfb243f92e8ebcb5dffd..56504320b0c87b8c630ea7f6b6fd668908fef162 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/add_functions_and_triggers.sql +++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/add_functions_and_triggers.sql @@ -12,10 +12,13 @@ DECLARE claim_tentative_status_id int := 0; --beware: hard coded instead of lookup for performance claim_claimed_status_id int := 1; --beware: hard coded instead of lookup for performance task_approved_status_id int := 300; --beware: hard coded instead of lookup for performance + task_conflict_status_id int := 335; --beware: hard coded instead of lookup for performance BEGIN IF NEW.status_id <> OLD.status_id THEN - IF NEW.status_id = task_approved_status_id THEN - UPDATE resource_allocation.resource_claim rc SET status_id=claim_tentative_status_id WHERE rc.task_id=NEW.id AND rc.status_id <> claim_tentative_status_id; + IF NEW.status_id = task_approved_status_id OR NEW.status_id = task_conflict_status_id THEN + UPDATE resource_allocation.resource_claim + SET status_id=claim_tentative_status_id + WHERE (task_id=NEW.id AND status_id = claim_claimed_status_id); ELSIF NEW.status_id = ANY(ARRAY[400, 500, 600, 900, 1000, 1100]) THEN --prevent task status to be upgraded to scheduled (or beyond) when not all its claims are claimed IF EXISTS (SELECT id FROM resource_allocation.resource_claim WHERE task_id = NEW.id AND status_id <> claim_claimed_status_id) THEN @@ -232,37 +235,13 @@ CREATE TRIGGER T_specification_insertupdate_check_startendtimes --------------------------------------------------------------------------------------------------------------------- -CREATE OR REPLACE FUNCTION resource_allocation.on_claim_insertupdate_check_startendtimes() - RETURNS trigger AS -$BODY$ -BEGIN - IF NEW.starttime > NEW.endtime THEN - RAISE EXCEPTION 'claim starttime > endtime: %', NEW; - END IF; -RETURN NEW; -END; -$BODY$ - LANGUAGE plpgsql VOLATILE - COST 100; -ALTER FUNCTION resource_allocation.on_claim_insertupdate_check_startendtimes() - OWNER TO resourceassignment; - -DROP TRIGGER IF EXISTS T_claim_insertupdate_check_startendtimes ON resource_allocation.resource_claim; -CREATE TRIGGER T_claim_insertupdate_check_startendtimes - BEFORE INSERT OR UPDATE - ON resource_allocation.resource_claim - FOR EACH ROW - EXECUTE PROCEDURE resource_allocation.on_claim_insertupdate_check_startendtimes(); - ---------------------------------------------------------------------------------------------------------------------- - CREATE OR REPLACE FUNCTION resource_allocation.process_new_claim_into_resource_usages(new_claim resource_allocation.resource_claim) RETURNS void AS $$ DECLARE - usage_at_or_before_start RECORD; - usage_at_or_before_end RECORD; - intermediate_usage RECORD; + usage_at_or_before_start resource_allocation.resource_usage; + usage_at_or_before_end resource_allocation.resource_usage; + intermediate_usage resource_allocation.resource_usage; BEGIN -- find resource_usage at claim starttime SELECT * FROM resource_allocation.get_resource_usage_at_or_before(new_claim.resource_id, new_claim.status_id, new_claim.starttime, false, false, false) into usage_at_or_before_start; @@ -292,6 +271,7 @@ BEGIN INSERT INTO resource_allocation.resource_usage (resource_id, status_id, as_of_timestamp, usage) VALUES (new_claim.resource_id, new_claim.status_id, new_claim.endtime, usage_at_or_before_end.usage); END IF; + --TODO: 20180709; why no else with an upate? ELSE -- no previous usage known, so insert 0 as the last usage INSERT INTO resource_allocation.resource_usage (resource_id, status_id, as_of_timestamp, usage) @@ -316,6 +296,16 @@ COMMENT ON FUNCTION resource_allocation.process_new_claim_into_resource_usages(n --------------------------------------------------------------------------------------------------------------------- +-- 20180903: brainstorm with AK & JS: the resource_usages table is useful because it makes lookups faster. However, +-- there are known bugs in inserting/updating the resource_usages table upon changes in resource_claims. +-- We discussed the idea of using an additional deltas helper table: claims -> deltas -> usages. +-- the current implementation goes diretly from claims -> usages, and loops over claims "opening" and "closing" in the usage table. +-- Introducing the intermediate deltas table has the benefit of using simple sql sum's, and not keeping track of opening/closing claims. +-- Highly recommended to give this a try in JIRA SW-35. + +--------------------------------------------------------------------------------------------------------------------- + + CREATE OR REPLACE FUNCTION resource_allocation.rebuild_resource_usages_from_claims() RETURNS void AS $$ @@ -602,6 +592,7 @@ BEGIN IF usage_at_end.usage = 0 THEN --usage_at_end was 'caused' by this deleted claim only, so delete it + --TODO:20180704 do not delete if another claim with this status and timestamp also causes this 0 DELETE FROM resource_allocation.resource_usage ru WHERE ru.id = usage_at_end.id; END IF; @@ -658,15 +649,7 @@ BEGIN -- try again, but now without the option to rebuild_usage_when_not_found (to prevent endless recursion) SELECT * FROM resource_allocation.get_resource_usage_at_or_before(_resource_id, _claim_status_id, _timestamp, exactly_at, only_before, false) INTO result; - RAISE NOTICE 'get_resource_usage_at_or_before(_resource_id=%, status_id=%, timestamp=%, exactly_at=%, only_before=%, rebuild_usage_when_not_found=%): after rebuild, result=%.', _resource_id, _claim_status_id, _timestamp, exactly_at, only_before, rebuild_usage_when_not_found, result; - END IF; - - IF result IS NULL THEN - -- if result is still null (after possible rebuild etc), then return a 'default' usage of 0 - result.resource_id = _resource_id; - result.status_id = _claim_status_id; - result.as_of_timestamp = _timestamp; - result.usage = 0; + RAISE NOTICE 'get_resource_usage_at_or_before(_resource_id=%, status_id=%, timestamp=%, exactly_at=%, only_before=%, rebuild_usage_when_not_found=%): after rebuild, result=%.', _resource_id, _claim_status_id, _timestamp, exactly_at, only_before, false, result; END IF; RETURN result; @@ -711,8 +694,8 @@ CREATE OR REPLACE FUNCTION resource_allocation.get_max_resource_usage_between(_r RETURNS resource_allocation.resource_usage AS $$ DECLARE - max_resource_usage_in_time_window record; - max_resource_at_or_before_starttime record; + max_resource_usage_in_time_window resource_allocation.resource_usage; + max_resource_at_or_before_starttime resource_allocation.resource_usage; BEGIN SELECT * FROM resource_allocation.get_resource_usage_at_or_before(_resource_id, _claim_status_id, _lower, false, false, false) into max_resource_at_or_before_starttime; @@ -725,10 +708,14 @@ BEGIN LIMIT 1 INTO max_resource_usage_in_time_window; IF max_resource_usage_in_time_window IS NOT NULL THEN - IF max_resource_usage_in_time_window.usage > max_resource_at_or_before_starttime.usage THEN - RETURN max_resource_usage_in_time_window; + IF max_resource_at_or_before_starttime IS NULL THEN + RETURN max_resource_usage_in_time_window; ELSE - RETURN max_resource_at_or_before_starttime; + IF max_resource_usage_in_time_window.usage > max_resource_at_or_before_starttime.usage THEN + RETURN max_resource_usage_in_time_window; + ELSE + RETURN max_resource_at_or_before_starttime; + END IF; END IF; ELSE -- could also be NULL but that is checked for elsewhere @@ -783,7 +770,7 @@ BEGIN END IF; END; $$ LANGUAGE plpgsql; -ALTER FUNCTION resource_allocation.get_resource_claimable_capacity_between(_resource_id int, _lower timestamp, _upper timestamp) +ALTER FUNCTION resource_allocation.get_resource_claimable_capacity_between(_resource_id int, _lower timestamp, _upper timestamp) OWNER TO resourceassignment; COMMENT ON FUNCTION resource_allocation.get_resource_claimable_capacity_between(_resource_id int, _lower timestamp, _upper timestamp) IS 'get the maximum resource usage for the given _resource_id for claims with given _claim_status_id in the period between the given _lower and _upper timestamps'; @@ -867,6 +854,14 @@ DECLARE BEGIN --order of following steps is important, do not reorder the steps + IF TG_OP = 'INSERT' OR TG_OP = 'UPDATE' THEN + IF NEW.starttime >= NEW.endtime THEN + -- Conceptually, you can't claim and release a resource at the same timestamp. + -- Nor can you claim a resource for a negative timespan. + RAISE EXCEPTION 'claim starttime >= endtime: %', NEW; + END IF; + END IF; + -- bounce any inserted claim which is not tentative IF TG_OP = 'INSERT' THEN IF NEW.status_id <> claim_tentative_status_id THEN @@ -901,7 +896,9 @@ BEGIN --update the resource usages affected by this claim --do this before we check for conflicts, because this claim might be shifted for example --which might influence the resource_usages which determine wheter a claim fits. - PERFORM resource_allocation.process_old_claim_outof_resource_usages(OLD); + IF OLD.resource_id <> 117 THEN --20180903: skip checking of cep4 storage until JIRA SW-35 is solved. + PERFORM resource_allocation.process_old_claim_outof_resource_usages(OLD); + END IF; END IF; --only check claim if status and/or claim_size and/or start/end time changed @@ -909,26 +906,33 @@ BEGIN OLD.claim_size <> NEW.claim_size OR OLD.starttime <> NEW.starttime OR OLD.endtime <> NEW.endtime)) THEN - --check if claim fits or has conflicts - SELECT * FROM resource_allocation.has_conflict_with_overlapping_claims(NEW) INTO claim_has_conflicts; - - IF claim_has_conflicts THEN - IF NEW.status_id <> claim_conflict_status_id THEN - -- only set claims to conflict if task status <= queued - -- when a claim goes to conflict, then so does it's task, and we don't want that for running/finished/aborted tasks - IF EXISTS (SELECT 1 FROM resource_allocation.task - WHERE id=NEW.task_id - AND status_id = ANY(ARRAY[300, 335, 350, 400, 500])) THEN -- hardcoded tasks statuses <= queued - -- conflict with others, so set claim status to conflict - NEW.status_id := claim_conflict_status_id; + IF NEW.resource_id <> 117 THEN --20180903: skip checking of cep4 storage until JIRA SW-35 is solved. + --check if claim fits or has conflicts + SELECT * FROM resource_allocation.has_conflict_with_overlapping_claims(NEW) INTO claim_has_conflicts; + + IF claim_has_conflicts THEN + IF NEW.status_id <> claim_conflict_status_id THEN + -- only set claims to conflict if task status <= queued + -- when a claim goes to conflict, then so does it's task, and we don't want that for running/finished/aborted tasks + IF EXISTS (SELECT 1 FROM resource_allocation.task + WHERE id=NEW.task_id + AND status_id = ANY(ARRAY[300, 335, 350, 400, 500])) THEN -- hardcoded tasks statuses <= queued + -- conflict with others, so set claim status to conflict + NEW.status_id := claim_conflict_status_id; + END IF; + END IF; + ELSE + -- no conflict (anymore) with others, so set claim status to tentative if currently in conflict + IF NEW.status_id = claim_conflict_status_id THEN + NEW.status_id := claim_tentative_status_id; END IF; - END IF; - ELSE - -- no conflict (anymore) with others, so set claim status to tentative if currently in conflict - IF NEW.status_id = claim_conflict_status_id THEN - NEW.status_id := claim_tentative_status_id; END IF; END IF; + + IF TG_OP = 'INSERT' OR TG_OP = 'UPDATE' THEN + --update the resource usages affected by this claim + PERFORM resource_allocation.process_new_claim_into_resource_usages(NEW); + END IF; END IF; IF TG_OP = 'DELETE' THEN @@ -966,14 +970,6 @@ DECLARE affected_claim resource_allocation.resource_claim; claim_has_conflicts boolean; BEGIN - --do not process_old_claim_outof_resource_usages(OLD) - --because that has been done already in before_claim_insertupdatedelete - - IF TG_OP = 'INSERT' OR TG_OP = 'UPDATE' THEN - --update the resource usages affected by this claim - PERFORM resource_allocation.process_new_claim_into_resource_usages(NEW); - END IF; - -- in the before trigger function, everything on the claim has been checked and adapted. -- now (in the after trigger, when all claims were inserted/updated in the database), let's check if the task should also be updated (to conflict status for example) -- only if claim status was changed or inserted... @@ -982,8 +978,14 @@ BEGIN --if claim status went to conflict, then set the task status to conflict as well UPDATE resource_allocation.task SET status_id=task_conflict_status_id WHERE id=NEW.task_id AND status_id <> task_conflict_status_id; ELSIF NEW.status_id = claim_tentative_status_id THEN - IF NOT EXISTS (SELECT id FROM resource_allocation.resource_claim WHERE task_id = NEW.task_id AND status_id = claim_conflict_status_id) THEN - UPDATE resource_allocation.task SET status_id=task_approved_status_id WHERE id=NEW.task_id AND status_id <> task_approved_status_id; + IF NOT EXISTS (SELECT id FROM resource_allocation.resource_claim + WHERE task_id = NEW.task_id + AND status_id = claim_conflict_status_id) THEN + IF NOT EXISTS (SELECT id FROM resource_allocation.task + WHERE id = NEW.task_id + AND status_id = task_approved_status_id) THEN + UPDATE resource_allocation.task SET status_id=task_approved_status_id WHERE id=NEW.task_id AND status_id <> task_approved_status_id; + END IF; END IF; END IF; END IF; @@ -1000,12 +1002,14 @@ BEGIN AND rc.endtime >= OLD.starttime AND rc.starttime < OLD.endtime LOOP - --check if claim fits or has conflicts - SELECT * FROM resource_allocation.has_conflict_with_overlapping_claims(affected_claim) INTO claim_has_conflicts; + IF affected_claim.resource_id <> 117 THEN --20180903: skip checking of cep4 storage until JIRA SW-35 is solved. + --check if claim fits or has conflicts + SELECT * FROM resource_allocation.has_conflict_with_overlapping_claims(affected_claim) INTO claim_has_conflicts; - IF NOT claim_has_conflicts THEN - -- no conflict (anymore) with others, so set claim status to tentative - UPDATE resource_allocation.resource_claim SET status_id=claim_tentative_status_id WHERE id = affected_claim.id; + IF NOT claim_has_conflicts THEN + -- no conflict (anymore) with others, so set claim status to tentative + UPDATE resource_allocation.resource_claim SET status_id=claim_tentative_status_id WHERE id = affected_claim.id; + END IF; END IF; END LOOP; END IF; @@ -1021,16 +1025,18 @@ BEGIN AND rc.endtime >= NEW.starttime AND rc.starttime < NEW.endtime LOOP - --check if claim fits or has conflicts - SELECT * FROM resource_allocation.has_conflict_with_overlapping_claims(affected_claim) INTO claim_has_conflicts; + IF affected_claim.resource_id <> 117 THEN --20180903: skip checking of cep4 storage until JIRA SW-35 is solved. + --check if claim fits or has conflicts + SELECT * FROM resource_allocation.has_conflict_with_overlapping_claims(affected_claim) INTO claim_has_conflicts; - IF claim_has_conflicts THEN - -- new conflict for affected_claim because this NEW claim is now claimed - UPDATE resource_allocation.resource_claim SET status_id=claim_conflict_status_id WHERE id = affected_claim.id; + IF claim_has_conflicts THEN + -- new conflict for affected_claim because this NEW claim is now claimed + UPDATE resource_allocation.resource_claim SET status_id=claim_conflict_status_id WHERE id = affected_claim.id; + END IF; END IF; END LOOP; END IF; - + IF TG_OP = 'DELETE' THEN RETURN OLD; END IF; diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/add_triggers.sql b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/add_triggers.sql deleted file mode 100644 index 5686fc819241bcf8fefbc933697abc7436956c89..0000000000000000000000000000000000000000 --- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/add_triggers.sql +++ /dev/null @@ -1,240 +0,0 @@ ---add triggers and trigger functions to radb (note, there are also the notification triggers in the add_notifications.sql file) - -BEGIN; - --- only issue >warnings log messages. (only during this transaction) -SET LOCAL client_min_messages=warning; - -DROP TRIGGER IF EXISTS T_delete_resource_claims_for_approved_task ON resource_allocation.task CASCADE; -DROP FUNCTION IF EXISTS resource_allocation.delete_resource_claims_for_approved_task(); - -CREATE OR REPLACE FUNCTION resource_allocation.delete_resource_claims_for_approved_task() - RETURNS trigger AS -$BODY$ -BEGIN - IF NEW.status_id <> OLD.status_id AND NEW.status_id = 300 THEN - DELETE FROM resource_allocation.resource_claim rc WHERE rc.task_id = NEW.id; - END IF; -RETURN NEW; -END; -$BODY$ - LANGUAGE plpgsql VOLATILE - COST 100; -ALTER FUNCTION resource_allocation.delete_resource_claims_for_approved_task() - OWNER TO resourceassignment; -COMMENT ON FUNCTION resource_allocation.delete_resource_claims_for_approved_task() - IS 'function which is called by task table update trigger, which deletes all the tasks resource claims.'; - -CREATE TRIGGER T_delete_resource_claims_for_approved_task - AFTER UPDATE - ON resource_allocation.task - FOR EACH ROW - EXECUTE PROCEDURE resource_allocation.delete_resource_claims_for_approved_task(); -COMMENT ON TRIGGER T_delete_resource_claims_for_approved_task ON resource_allocation.task - IS 'task table update trigger, calls the resource_allocation.delete_resource_claims_for_approved_task() function.'; - ---------------------------------------------------------------------------------------------------------------------- - -DROP TRIGGER IF EXISTS T_delete_conflict_reasons_after_resource_claim_update ON resource_allocation.resource_claim CASCADE; -DROP FUNCTION IF EXISTS resource_allocation.delete_conflict_reasons_after_resource_claim_update(); - -CREATE OR REPLACE FUNCTION resource_allocation.delete_conflict_reasons_after_resource_claim_update() - RETURNS trigger AS -$BODY$ -BEGIN - IF OLD.status_id = 2 AND NEW.status_id <> 2 THEN --new status is not conflict - DELETE FROM resource_allocation.resource_claim_conflict_reason rccr WHERE rccr.resource_claim_id = NEW.id; - END IF; -RETURN NEW; -END; -$BODY$ - LANGUAGE plpgsql VOLATILE - COST 100; -ALTER FUNCTION resource_allocation.delete_conflict_reasons_after_resource_claim_update() - OWNER TO resourceassignment; -COMMENT ON FUNCTION resource_allocation.delete_conflict_reasons_after_resource_claim_update() - IS 'function which is called by resource_claim table update trigger, which deletes resource_claim_conflict_reasons when the claim status is updated to !conflict.'; - -CREATE TRIGGER T_delete_conflict_reasons_after_resource_claim_update - AFTER UPDATE - ON resource_allocation.resource_claim - FOR EACH ROW - EXECUTE PROCEDURE resource_allocation.delete_conflict_reasons_after_resource_claim_update(); - ---------------------------------------------------------------------------------------------------------------------- - -DROP TRIGGER IF EXISTS T_before_insert_conflict_reason_do_resource_claim_status_check ON resource_allocation.resource_claim_conflict_reason CASCADE; -DROP FUNCTION IF EXISTS resource_allocation.before_insert_conflict_reason_do_resource_claim_status_check(); - -CREATE OR REPLACE FUNCTION resource_allocation.before_insert_conflict_reason_do_resource_claim_status_check() - RETURNS trigger AS -$BODY$ -BEGIN - -- check if referred resource_claim is in conflict status, else raise - IF (SELECT COUNT(id) FROM resource_allocation.resource_claim rc WHERE rc.id = NEW.resource_claim_id AND rc.status_id = 2) = 0 THEN - RAISE EXCEPTION 'resource_claim has no conflict status'; - END IF; -RETURN NEW; -END; -$BODY$ - LANGUAGE plpgsql VOLATILE - COST 100; -ALTER FUNCTION resource_allocation.before_insert_conflict_reason_do_resource_claim_status_check() - OWNER TO resourceassignment; -COMMENT ON FUNCTION resource_allocation.before_insert_conflict_reason_do_resource_claim_status_check() - IS 'check if referred resource_claim is in conflict status, else raise'; - -CREATE TRIGGER T_before_insert_conflict_reason_do_resource_claim_status_check - BEFORE INSERT - ON resource_allocation.resource_claim_conflict_reason - FOR EACH ROW - EXECUTE PROCEDURE resource_allocation.before_insert_conflict_reason_do_resource_claim_status_check(); - ---------------------------------------------------------------------------------------------------------------------- - -DROP TRIGGER IF EXISTS T_delete_conflict_reasons_after_task_update ON resource_allocation.task CASCADE; -DROP FUNCTION IF EXISTS resource_allocation.delete_conflict_reasons_after_task_update(); - -CREATE OR REPLACE FUNCTION resource_allocation.delete_conflict_reasons_after_task_update() - RETURNS trigger AS -$BODY$ -BEGIN - IF OLD.status_id = 335 AND NEW.status_id <> 335 THEN --new status is not conflict - DELETE FROM resource_allocation.task_conflict_reason tcr WHERE tcr.task_id = NEW.id; - END IF; -RETURN NEW; -END; -$BODY$ - LANGUAGE plpgsql VOLATILE - COST 100; -ALTER FUNCTION resource_allocation.delete_conflict_reasons_after_task_update() - OWNER TO resourceassignment; -COMMENT ON FUNCTION resource_allocation.delete_conflict_reasons_after_task_update() - IS 'function which is called by task table update trigger, which deletes task_conflict_reasons when the task status is updated to !conflict.'; - -CREATE TRIGGER T_delete_conflict_reasons_after_task_update - AFTER UPDATE - ON resource_allocation.task - FOR EACH ROW - EXECUTE PROCEDURE resource_allocation.delete_conflict_reasons_after_task_update(); - ---------------------------------------------------------------------------------------------------------------------- - -DROP TRIGGER IF EXISTS T_before_insert_conflict_reason_do_task_status_check ON resource_allocation.task_conflict_reason CASCADE; -DROP FUNCTION IF EXISTS resource_allocation.before_insert_conflict_reason_do_task_status_check(); - -CREATE OR REPLACE FUNCTION resource_allocation.before_insert_conflict_reason_do_task_status_check() - RETURNS trigger AS -$BODY$ -BEGIN - -- check if referred task is in conflict status, else raise - IF (SELECT COUNT(id) FROM resource_allocation.task task WHERE task.id = NEW.task_id AND task.status_id = 335) = 0 THEN - RAISE EXCEPTION 'task has no conflict status'; - END IF; -RETURN NEW; -END; -$BODY$ - LANGUAGE plpgsql VOLATILE - COST 100; -ALTER FUNCTION resource_allocation.before_insert_conflict_reason_do_task_status_check() - OWNER TO resourceassignment; -COMMENT ON FUNCTION resource_allocation.before_insert_conflict_reason_do_task_status_check() - IS 'check if referred task is in conflict status, else raise'; - -CREATE TRIGGER T_before_insert_conflict_reason_do_task_status_check - BEFORE INSERT - ON resource_allocation.task_conflict_reason - FOR EACH ROW - EXECUTE PROCEDURE resource_allocation.before_insert_conflict_reason_do_task_status_check(); - ---------------------------------------------------------------------------------------------------------------------- - -DROP TRIGGER IF EXISTS T_specification_insertupdate_check_startendtimes ON resource_allocation.specification; -DROP FUNCTION IF EXISTS resource_allocation.on_insertupdate_check_specification_startendtimes(); - -CREATE OR REPLACE FUNCTION resource_allocation.on_insertupdate_check_specification_startendtimes() - RETURNS trigger AS -$BODY$ -DECLARE -task RECORD; -pred_task RECORD; -suc_task RECORD; -predecessor_task_id int; -successor_task_id int; -moved_seconds double precision; -duration double precision; -max_pred_endtime timestamp := '1900-01-01 00:00:00'; -tmp_time timestamp; -min_starttime timestamp; -min_inter_task_delay int; -BEGIN - --swap start/end time if needed - IF NEW.starttime > NEW.endtime THEN - RAISE NOTICE 'NEW.starttime > NEW.endtime'; - tmp_time := NEW.starttime; - NEW.starttime := NEW.endtime; - NEW.endtime := tmp_time; - END IF; - - --store task duration - SELECT EXTRACT(epoch FROM age(NEW.endtime, NEW.starttime)) INTO duration; - - --deterimine max_pred_endtime - FOR task IN SELECT * FROM resource_allocation.task_view tv WHERE tv.specification_id = NEW.id LOOP - IF task.predecessor_ids IS NOT NULL THEN - FOREACH predecessor_task_id IN ARRAY task.predecessor_ids LOOP - FOR pred_task IN SELECT * FROM resource_allocation.task_view tv WHERE tv.id = predecessor_task_id LOOP - IF pred_task.endtime > max_pred_endtime THEN - max_pred_endtime := pred_task.endtime; - END IF; - END LOOP; - END LOOP; - END IF; - END LOOP; - - --check if spec is before max_pred_endtime, correct if needed. - IF max_pred_endtime > '1900-01-01 00:00:00' THEN - SELECT c.value::integer INTO min_inter_task_delay FROM resource_allocation.config c WHERE c.name = 'min_inter_task_delay'; - IF min_inter_task_delay IS NULL THEN - min_inter_task_delay := 0; - END IF; - min_starttime := max_pred_endtime + min_inter_task_delay * interval '1 second'; - IF min_starttime > NEW.starttime THEN - NEW.starttime := min_starttime; - NEW.endtime := min_starttime + duration * interval '1 second'; - END IF; - END IF; - - --move successor tasks by same amount if needed - IF TG_OP = 'UPDATE' THEN - IF NEW.endtime <> OLD.endtime THEN - SELECT EXTRACT(epoch FROM age(NEW.endtime, OLD.endtime)) INTO moved_seconds; - FOR task IN SELECT * FROM resource_allocation.task_view tv WHERE tv.specification_id = NEW.id LOOP - IF task.successor_ids IS NOT NULL THEN - FOREACH successor_task_id IN ARRAY task.successor_ids LOOP - FOR suc_task IN SELECT * FROM resource_allocation.task_view tv WHERE tv.id = successor_task_id LOOP - UPDATE resource_allocation.specification SET (starttime, endtime) = (starttime + moved_seconds * interval '1 second', endtime + moved_seconds * interval '1 second') WHERE id = suc_task.specification_id; - END LOOP; - END LOOP; - END IF; - END LOOP; - END IF; - END IF; - -RETURN NEW; -END; -$BODY$ - LANGUAGE plpgsql VOLATILE - COST 100; -ALTER FUNCTION resource_allocation.on_insertupdate_check_specification_startendtimes() - OWNER TO resourceassignment; - -CREATE TRIGGER T_specification_insertupdate_check_startendtimes - BEFORE INSERT OR UPDATE - ON resource_allocation.specification - FOR EACH ROW - EXECUTE PROCEDURE resource_allocation.on_insertupdate_check_specification_startendtimes(); - ---------------------------------------------------------------------------------------------------------------------- - -COMMIT; diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbpglistener.py b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbpglistener.py index 9a8d76581df1f8e01557a232d574b937830490c6..d5dbe9143a636ee4c5b0099a791a14edfbf5abdd 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbpglistener.py +++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbpglistener.py @@ -45,7 +45,7 @@ class RADBPGListener(PostgresListener): notification_prefix=DEFAULT_NOTIFICATION_PREFIX, dbcreds=None, broker=None): - super(RADBPGListener, self).__init__(dbcreds.host, dbcreds.database, dbcreds.user, dbcreds.password) + super(RADBPGListener, self).__init__(dbcreds=dbcreds) self.notification_prefix = notification_prefix self.event_bus = ToBus(notification_busname, broker=broker) diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_common_testing.py b/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_common_testing.py new file mode 100755 index 0000000000000000000000000000000000000000..d4dd75f3121cfea14743be254e291b751a3dc3fb --- /dev/null +++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_common_testing.py @@ -0,0 +1,171 @@ +#!/usr/bin/python + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id$ +import unittest +import psycopg2 +import os +from datetime import datetime, timedelta +from dateutil import parser +import logging + +logger = logging.getLogger(__name__) + +try: + import testing.postgresql +except ImportError as e: + print str(e) + print 'Please install python package testing.postgresql: sudo pip install testing.postgresql' + exit(3) # special lofar test exit code: skipped test + +from lofar.common.dbcredentials import Credentials +from lofar.sas.resourceassignment.database.radb import RADatabase + + +# Create shared test database for better performance +database_credentials = None +Postgresql = None + +def setUpModule(): + global database_credentials, Postgresql + database_credentials = Credentials() + Postgresql = testing.postgresql.PostgresqlFactory(cache_initialized_db=True) + + +def tearDownModule(): + # clear cached database at end of tests + logger.info('tearDownModule') + Postgresql.clear_cache() + + +class RADBCommonTest(unittest.TestCase): + + def setUp(self): + logger.info('setting up test RA database...') + # connect to shared test db + self.postgresql = Postgresql() # fresh db instead of shared one: self.postgresql = testing.postgresql.Postgresql() + + # set up fixtures + # Note: In theory, this can be moved to the PostgresqlFactory call as kwarg 'on_initialized=populatedb' + # ...but for some reason that was much slower than keeping it here. + self._setup_database() + + # update credentials (e.g. port changes for each test) + database_credentials.host = self.postgresql.dsn()['host'] + database_credentials.database = self.postgresql.dsn()['database'] + database_credentials.port = self.postgresql.dsn()['port'] + + # connect with useradministration role for tests + self.connection = psycopg2.connect(host=database_credentials.host, + user=database_credentials.user, + password=database_credentials.password, + dbname=database_credentials.database, + port=database_credentials.port) + + # set up radb python module + self.radb = RADatabase(database_credentials, log_queries=True) + logger.info('...finished setting up test RA database') + + def tearDown(self): + logger.info('removing test RA database...') + self.connection.close() + # self.Postgresql.clear_cache() # for fresh db during setUp, do instead: + self.postgresql.stop() + + def _setup_database(self): + + # connect to db as root + conn = psycopg2.connect(**self.postgresql.dsn()) + cursor = conn.cursor() + + # set credentials to be used during tests + database_credentials.user = 'resourceassignment' + database_credentials.password = 'blabla' # cannot be empty... + + # create user role + # Note: NOSUPERUSER currently raises "permission denied for schema virtual_instrument" + # Maybe we want to sort out user creation and proper permissions in the sql scripts? + query = "CREATE USER %s WITH SUPERUSER PASSWORD '%s'" % ( + database_credentials.user, + database_credentials.password) + cursor.execute(query) + + # populate db tables + # These are applied in given order to set up test db + # Note: cannot use create_and_populate_database.sql since '\i' is not understood by cursor.execute() + sql_basepath = os.environ['LOFARROOT'] + "/share/radb/sql/" + sql_createdb_paths = [sql_basepath + "create_database.sql", + sql_basepath + "/add_resource_allocation_statics.sql", + sql_basepath + "/add_virtual_instrument.sql", + sql_basepath + "/add_notifications.sql", + sql_basepath + "/add_functions_and_triggers.sql"] + + for sql_path in sql_createdb_paths: + logger.debug("setting up database. applying sql file: %s", sql_path) + with open(sql_path) as sql: + cursor.execute(sql.read()) + + cursor.close() + conn.commit() + conn.close() + + def _execute_query(self, query, fetch=False): + cursor = self.connection.cursor() + cursor.execute(query) + ret = None + if fetch: + ret = cursor.fetchall() + cursor.close() + self.connection.commit() + return ret + + # --- tests start here + + # integrity tests of postgres database itself + # + # Note: These are meant to make sure the setup generally works and all sql scripts were applied. + # I don't see much benefit in full coverage here since it should be all be tested through RADataBase functionality. + # Of course new tests can be added here where db functionality like triggers should be tested separately from the + # Python part of the job. + + # database created? + def test_select_tables_contains_tables_for_each_schema(self): + query = "SELECT table_schema,table_name FROM information_schema.tables" + fetch = self._execute_query(query, fetch=True) + self.assertTrue('resource_allocation' in str(fetch)) + self.assertTrue('resource_monitoring' in str(fetch)) + self.assertTrue('virtual_instrument' in str(fetch)) + + # resource allocation_statics there? + def test_select_task_types_contains_obervation(self): + query = "SELECT * FROM resource_allocation.task_type" + fetch = self._execute_query(query, fetch=True) + self.assertTrue('observation' in str(fetch)) + + # virtual instrument there? + def test_select_virtualinstrument_units_contain_rcuboard(self): + query = "SELECT * FROM virtual_instrument.unit" + fetch = self._execute_query(query, fetch=True) + self.assertTrue('rcu_board' in str(fetch)) + + +if __name__ == "__main__": + os.environ['TZ'] = 'UTC' + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) + unittest.main() diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/t_radb.py b/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/t_radb.py index 9a8a714739b1ce7e9b5c7677311a6d3227ad5823..98170ed7993acf3371c59071338484814d4f1259 100755 --- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/t_radb.py +++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/t_radb.py @@ -19,7 +19,6 @@ # $Id: $ import unittest -import testing.postgresql import psycopg2 import os from datetime import datetime, timedelta @@ -36,30 +35,15 @@ except ImportError as e: print 'Please install python package mock: sudo pip install mock' exit(3) # special lofar test exit code: skipped test -try: - import testing.postgresql -except ImportError as e: - print str(e) - print 'Please install python package testing.postgresql: sudo pip install testing.postgresql' - exit(3) # special lofar test exit code: skipped test - -from lofar.common.dbcredentials import Credentials -from lofar.sas.resourceassignment.database.radb import RADatabase -from lofar.common.postgres import PostgresListener - - -# Create shared test database for better performance -database_credentials = Credentials() -Postgresql = testing.postgresql.PostgresqlFactory(cache_initialized_db=True) +import radb_common_testing +def setUpModule(): + return radb_common_testing.setUpModule() def tearDownModule(): - # clear cached database at end of tests - logger.info('tearDownModule') - Postgresql.clear_cache() - + return radb_common_testing.tearDownModule() -class ResourceAssignmentDatabaseTest(unittest.TestCase): +class ResourceAssignmentDatabaseTest(radb_common_testing.RADBCommonTest): class test_task: """ A lot of tests involve manipulation of a task (and its corresponding specification) in the RADB. A test task @@ -72,122 +56,6 @@ class ResourceAssignmentDatabaseTest(unittest.TestCase): content = "" cluster = "CEP4" - def setUp(self): - logger.info('setting up test RA database...') - # connect to shared test db - self.postgresql = Postgresql() # fresh db instead of shared one: self.postgresql = testing.postgresql.Postgresql() - - # set up fixtures - # Note: In theory, this can be moved to the PostgresqlFactory call as kwarg 'on_initialized=populatedb' - # ...but for some reason that was much slower than keeping it here. - self.populate_db() - - # update credentials (e.g. port changes for each test) - database_credentials.host = self.postgresql.dsn()['host'] - database_credentials.database = self.postgresql.dsn()['database'] - database_credentials.port = self.postgresql.dsn()['port'] - - # connect with useradministration role for tests - self.connection = psycopg2.connect(host=database_credentials.host, - user=database_credentials.user, - password=database_credentials.password, - dbname=database_credentials.database, - port=database_credentials.port) - - # set up PostgresListener for notifications: - self.listener = PostgresListener(host=database_credentials.host, - username=database_credentials.user, - password=database_credentials.password, - database=database_credentials.database, - port=database_credentials.port) - - # set up radb python module - self.radb = RADatabase(database_credentials, log_queries=True) - logger.info('...finished setting up test RA database') - - def tearDown(self): - logger.info('removing test RA database...') - self.connection.close() - # self.Postgresql.clear_cache() # for fresh db during setUp, do instead: - self.postgresql.stop() - - def populate_db(self): - - # connect to db as root - conn = psycopg2.connect(**self.postgresql.dsn()) - cursor = conn.cursor() - - # set credentials to be used during tests - database_credentials.user = 'resourceassignment' - database_credentials.password = 'blabla' # cannot be empty... - - # create user role - # Note: NOSUPERUSER currently raises "permission denied for schema virtual_instrument" - # Maybe we want to sort out user creation and proper permissions in the sql scripts? - query = "CREATE USER %s WITH SUPERUSER PASSWORD '%s'" % ( - database_credentials.user, - database_credentials.password) - cursor.execute(query) - - # populate db tables - # These are applied in given order to set up test db - # Note: cannot use create_and_populate_database.sql since '\i' is not understood by cursor.execute() - sql_basepath = os.environ['LOFARROOT'] + "/share/radb/sql/" - sql_createdb_paths = [sql_basepath + "create_database.sql", - sql_basepath + "/add_resource_allocation_statics.sql", - sql_basepath + "/add_virtual_instrument.sql", - sql_basepath + "/add_notifications.sql", - sql_basepath + "/add_functions_and_triggers.sql" - ] - - for sql_path in sql_createdb_paths: - with open(sql_path) as sql: - cursor.execute(sql.read()) - - cursor.close() - conn.commit() - conn.close() - - def _execute_query(self, query, fetch=False): - cursor = self.connection.cursor() - cursor.execute(query) - ret = None - if fetch: - ret = cursor.fetchall() - cursor.close() - self.connection.commit() - return ret - - # --- tests start here - - - # integrity tests of postgres database itself - # - # Note: These are meant to make sure the setup generally works and all sql scripts were applied. - # I don't see much benefit in full coverage here since it should be all be tested through RADataBase functionality. - # Of course new tests can be added here where db functionality like triggers should be tested separately from the - # Python part of the job. - - # database created? - def test_select_tables_contains_tables_for_each_schema(self): - query = "SELECT table_schema,table_name FROM information_schema.tables" - fetch = self._execute_query(query, fetch=True) - self.assertTrue('resource_allocation' in str(fetch)) - self.assertTrue('resource_monitoring' in str(fetch)) - self.assertTrue('virtual_instrument' in str(fetch)) - - # resource allocation_statics there? - def test_select_task_types_contains_obervation(self): - query = "SELECT * FROM resource_allocation.task_type" - fetch = self._execute_query(query, fetch=True) - self.assertTrue('observation' in str(fetch)) - - # virtual instrument there? - def test_select_virtualinstrument_units_contain_rcuboard(self): - query = "SELECT * FROM virtual_instrument.unit" - fetch = self._execute_query(query, fetch=True) - self.assertTrue('rcu_board' in str(fetch)) - def _insert_test_spec(self, starttime='2017-05-10 13:00:00', endtime='2017-05-10 14:00:00', @@ -650,8 +518,8 @@ class ResourceAssignmentDatabaseTest(unittest.TestCase): 'starttime': parser.parse(sample_starttime), 'endtime': parser.parse(sample_endtime), 'cluster': 'CEP4', - 'status': 'conflict', - 'status_id': 335, + 'status': 'approved', + 'status_id': 300, 'type': 'observation', 'type_id': 0, 'mom_id': 0, @@ -2005,8 +1873,13 @@ class ResourceAssignmentDatabaseTest(unittest.TestCase): 'claim_size': 96 } claim2_id = self.radb.insertResourceClaims(task2_id, [claim2], 'foo', 1, 1)[0] - self.radb.updateResourceClaims(claim2_id, status='claimed') + # task1 is partially in the way, so claim2 and task2 should have conflict status + self.assertEqual('conflict', self.radb.getResourceClaim(claim2_id)['status']) + self.assertEqual('conflict', self.radb.getTask(task2_id)['status']) + # updating claim2's status to claimed should not succeed + self.radb.updateResourceClaims(claim2_id, status='claimed') + self.assertEqual('conflict', self.radb.getResourceClaim(claim2_id)['status']) self.assertEqual('conflict', self.radb.getTask(task2_id)['status']) def test_double_claim_should_result_in_conflict_overlap_in_the_past_and_future(self): @@ -2169,6 +2042,441 @@ class ResourceAssignmentDatabaseTest(unittest.TestCase): self.assertEqual('approved', self.radb.getTask(task2_id)['status']) + def test_dwellscheduler_high_low_priority_scenario(self): + """special test case to prove and solve bug: https://support.astron.nl/jira/browse/SW-426 + """ + #start with clean database + for spec in self.radb.getSpecifications(): + self.radb.deleteSpecification(spec['id']) # cascades into tasks and claims + + ###################################################################################### + # setup phase, create tasks and claims. should just work. + # we replay a responsive telescope trigger event, as performed by the dwellscheduler. + # We have two tasks, one with high prio, and one with low. + # the high prio tasks will have a conflict with the low one. + ###################################################################################### + + base_time = datetime.utcnow() + # round to current full hour (for readability in logging) + base_time = base_time - timedelta(minutes=base_time.minute, seconds=base_time.second, microseconds=base_time.microsecond) + + RESOURCE_ID = 252 + resource_max_cap = self.radb.get_resource_claimable_capacity(RESOURCE_ID, base_time, base_time) + + # insert the 'low prio' spec, task... + spec_task_low = self.radb.insertSpecificationAndTask(1, 1, 'prescheduled', 'observation', + base_time + timedelta(minutes=5), + base_time + timedelta(minutes=10), 'foo', 'CEP4') + task_low_id = spec_task_low['task_id'] + task_low = self.radb.getTask(task_low_id) + + + # the dwellscheduler inserts the claim(s)... + self.radb.insertResourceClaims(task_low_id, [{ 'resource_id': RESOURCE_ID, + 'starttime': task_low['starttime'], + 'endtime': task_low['endtime'], + 'status': 'tentative', + 'claim_size': resource_max_cap }], + 'user', 1) + + # ... and then the dwellscheduler sets the claims status to claimed... + self.radb.updateResourceClaims(where_task_ids=[task_low_id], status="claimed") + + logger.info("task_low's claims: %s", self.radb.getResourceClaims(task_ids=task_low_id)) + + self.assertEqual(1, len(self.radb.getResourceClaims(task_ids=task_low_id))) + self.assertEqual(1, len(self.radb.getResourceClaims(task_ids=task_low_id, status='claimed'))) + + # ... and updates the spec's start and endtime to the already specified start and endtime + # (why? not needed, but should not do any harm either) + self.radb.updateSpecification(task_low['specification_id'], + starttime=task_low['starttime'], + endtime=task_low['endtime']) + + # finally make the task scheduled. Should still work. + self.radb.updateTask(task_low_id, task_status='scheduled') + + # so fo so good. Everything should be normal and fine. Let's check. + self.assertEqual('scheduled', self.radb.getTask(id=task_low_id)['status']) + self.assertEqual(1, len(self.radb.getResourceClaims(task_ids=task_low_id, status='claimed'))) + + # now insert a second task, the so called high priority task, + # overlapping with the beginning of task_low + # so, the dwellscheduler finds task_low in task_high's higway + # so, task_low is aborted by the dwellscheduler (later in the code). + spec_task_high1 = self.radb.insertSpecificationAndTask(2, 2, 'approved', 'observation', + base_time, + base_time + timedelta(minutes=7), 'foo', 'CEP4') + task_high1_id = spec_task_high1['task_id'] + task_high1 = self.radb.getTask(task_high1_id) + + # the dwellscheduler inserts the claim(s)... + self.radb.insertResourceClaims(task_high1_id, [{ 'resource_id': RESOURCE_ID, + 'starttime': task_high1['starttime'], + 'endtime': task_high1['endtime'], + 'status': 'tentative', + 'claim_size': resource_max_cap }], + 'user', 1) + + logger.info("task_high1's claims: %s", self.radb.getResourceClaims(task_ids=task_high1_id)) + + # we expect task_high1 to have on claim in conflict (with the claim of task_low) + self.assertEqual(1, len(self.radb.getResourceClaims(task_ids=task_high1_id))) + self.assertEqual(0, len(self.radb.getResourceClaims(task_ids=task_high1_id, status='claimed'))) + self.assertEqual(1, len(self.radb.getResourceClaims(task_ids=task_high1_id, status='conflict'))) + + claim_in_conflict = self.radb.getResourceClaims(task_ids=task_high1_id, status='conflict')[0] + overlapping_claims = self.radb.get_overlapping_claims(claim_id=claim_in_conflict['id']) + logger.info('claim_in_conflict: %s', claim_in_conflict) + logger.info('overlapping_claims: %s', overlapping_claims) + self.assertEqual(1, len(overlapping_claims)) + self.assertEqual(task_low_id, overlapping_claims[0]['task_id']) + + ######################################################################## + # end of setup phase, now let's (try to) reproduce the bug... + # the dwellscheduler tries to abort task_low, to make room for task_high + # this caused an erroneous database exception on the production system + # but strangely enough we cannot repeat it here, + # even though we follow the same code path. + # + # This leads us to the conclusion that there was a strange set of + # circumstances in the data in the resource_usage table causing the bug in production. + # + # While examining the bug we did discover some errors in the sql code, + # for which we added more additional tests: + # - test_task_releases_claims_when_set_to_approved + # - test_task_in_conflict_releases_claimed_claims + # - test_duplicate_full_claims_on_one_resource + # - test_task_and_claim_with_zero_duration + # - test_are_claims_in_conflict_released_by_removing_conclict_causing_claims + # + # Even though this test could not reproduce the error as it happenend on production, + # we'll keep it for future reference, and for future proof the the code still works. + # + ######################################################################## + + with mock.patch('lofar.sas.resourceassignment.database.radb.logger') as mocked_logger: + self.radb.updateTaskAndResourceClaims(task_id=task_low_id, task_status='aborted', + endtime=task_low['starttime']) # yes, the endtime is set to the starttime + + # on production the above call produce the following log line: + # 2018-06-29 09:46:16,240 ERROR Rolling back query='UPDATE resource_allocation.resource_claim SET (endtime) = (2018-06-29 11:59:17) WHERE task_id = 148052' due to error: 'duplicate key value violates unique constraint "usage_unique" + # but unfortunately this error is not reproduced here, + # the only thing we can test for is if a rollback occurs + + # test if there was a log line containing the database log message for 'claim starttime >= endtime' + self.assertTrue(len([ca for ca in mocked_logger.error.call_args_list + if 'Rolling back' in ca[0][0] + and 'claim starttime >= endtime' in ca[0][0]]) > 0) + + + def test_task_releases_claims_when_set_to_approved(self): + now = datetime.utcnow() + now -= timedelta(minutes=now.minute, seconds=now.second, microseconds=now.microsecond) # round to full hour + + result = self.radb.insertSpecificationAndTask(0, 0, 'approved', 'observation', + now, now+timedelta(hours=1), 'foo', 'CEP4') + self.assertTrue(result['inserted']) + self.assertIsNotNone(result['task_id']) + task_id = result['task_id'] + task = self.radb.getTask(task_id) + self.assertEqual('approved', task['status']) + + # select first (arbitrary) resource + resource = self.radb.getResources(include_availability=True)[0] + + self.radb.insertResourceClaim(resource['id'], task_id, task['starttime'], task['endtime'], + 0.5*resource['available_capacity'], 'foo', 1) + tentative_claims = self.radb.getResourceClaims(task_ids=task_id, status='tentative') + self.assertEqual(1, len(tentative_claims)) + + # set status to claimed + self.radb.updateResourceClaims(where_task_ids=task_id, status='claimed') + tentative_claims = self.radb.getResourceClaims(task_ids=task_id, status='tentative') + claimed_claims = self.radb.getResourceClaims(task_ids=task_id, status='claimed') + self.assertEqual(0, len(tentative_claims)) + self.assertEqual(1, len(claimed_claims)) + + # when setting the task to prescheduled and back to approved, all claimed claims should be released + self.radb.updateTask(task_id=task_id, task_status='prescheduled') + self.radb.updateTask(task_id=task_id, task_status='approved') + task = self.radb.getTask(task_id) + self.assertEqual('approved', task['status']) + + tentative_claims = self.radb.getResourceClaims(task_ids=task_id, status='tentative') + claimed_claims = self.radb.getResourceClaims(task_ids=task_id, status='claimed') + self.assertEqual(1, len(tentative_claims)) + self.assertEqual(0, len(claimed_claims)) + + + def test_task_in_conflict_releases_claimed_claims(self): + """tests whether a task with multiple claims releases the claimed claims when the task goes to conflict. + This is wanted behaviour, because when a single claim goes to conflict, then the task cannot be scheduled. + So, it makes sense to release the other already claimed claims for other tasks. + """ + now = datetime.utcnow() + now -= timedelta(minutes=now.minute, seconds=now.second, microseconds=now.microsecond) # round to full hour + + result = self.radb.insertSpecificationAndTask(0, 0, 'approved', 'observation', + now, now+timedelta(hours=1), 'foo', 'CEP4') + self.assertTrue(result['inserted']) + self.assertIsNotNone(result['task_id']) + task_id = result['task_id'] + task = self.radb.getTask(task_id) + self.assertEqual('approved', task['status']) + + # select first two (arbitrary) resources + resources = self.radb.getResources(include_availability=True) + resource1 = resources[0] + resource2 = resources[1] + + # and insert a claim for each resource. + # one claim should fit and be set to claimed... + self.radb.insertResourceClaim(resource1['id'], task_id, task['starttime'], task['endtime'], + 0.5*resource1['available_capacity'], 'foo', 1) + tentative_claims = self.radb.getResourceClaims(task_ids=task_id, status='tentative') + self.assertEqual(1, len(tentative_claims)) + + # set status to claimed + self.radb.updateResourceClaims(where_task_ids=task_id, status='claimed') + tentative_claims = self.radb.getResourceClaims(task_ids=task_id, status='tentative') + conflict_claims = self.radb.getResourceClaims(task_ids=task_id, status='conflict') + claimed_claims = self.radb.getResourceClaims(task_ids=task_id, status='claimed') + self.assertEqual(0, len(tentative_claims)) + self.assertEqual(0, len(conflict_claims)) + self.assertEqual(1, len(claimed_claims)) + + # the other claim should not fit and cause a conflict... + self.radb.insertResourceClaim(resource2['id'], task_id, task['starttime'], task['endtime'], + 2.0*resource2['available_capacity'], 'foo', 1) + + # ... result should be that the task also goes to conflict ... + task = self.radb.getTask(task_id) + self.assertEqual('conflict', task['status']) + + # ... and that all the task's claimed claims should be released + tentative_claims = self.radb.getResourceClaims(task_ids=task_id, status='tentative') + conflict_claims = self.radb.getResourceClaims(task_ids=task_id, status='conflict') + claimed_claims = self.radb.getResourceClaims(task_ids=task_id, status='claimed') + self.assertEqual(1, len(tentative_claims)) + self.assertEqual(1, len(conflict_claims)) + self.assertEqual(0, len(claimed_claims)) + conflict_claim = conflict_claims[0] + + # a user/operator action could be to set the task back to approved + # all claimed claims which were already set back to tentative should still be tentative + # and claims in conflict should remain in conflict + self.radb.updateTask(task_id=task_id, task_status='approved') + task = self.radb.getTask(task_id) + self.assertEqual('approved', task['status']) + + tentative_claims = self.radb.getResourceClaims(task_ids=task_id, status='tentative') + conflict_claims = self.radb.getResourceClaims(task_ids=task_id, status='conflict') + claimed_claims = self.radb.getResourceClaims(task_ids=task_id, status='claimed') + self.assertEqual(1, len(tentative_claims)) + self.assertEqual(1, len(conflict_claims)) + self.assertEqual(0, len(claimed_claims)) + self.assertEqual(conflict_claim['id'], conflict_claims[0]['id']) + + def test_duplicate_full_claims_on_one_resource(self): + """special test case to prove and solve bug: https://support.astron.nl/jira/browse/SW-426 + We found out that inserting two duplicate claims for one resource does not result in the two claims + having the conflict status, even though at least one of them should have it. + """ + # start with clean database + for spec in self.radb.getSpecifications(): + self.radb.deleteSpecification(spec['id']) # cascades into tasks and claims + + now = datetime.utcnow() + # round to next full hour (for readability in logging) + now = now - timedelta(minutes=now.minute, seconds=now.second, microseconds=now.microsecond) + now = now + timedelta(hours=1) + + spec_task = self.radb.insertSpecificationAndTask(0, 0, 'approved', 'observation', + now, now + timedelta(minutes=10), + 'foo', 'CEP4') + + task_id = spec_task['task_id'] + task = self.radb.getTask(task_id) + + RESOURCE_ID = 252 + resource_max_cap = self.radb.get_resource_claimable_capacity(RESOURCE_ID, now, now) + + # create one claim, with claim_size of max capacity + claim = {'resource_id': RESOURCE_ID, + 'starttime': task['starttime'], + 'endtime': task['endtime'], + 'status': 'tentative', + 'claim_size': resource_max_cap} + + # insert the same claim twice, so two times the maxcap should not fit in total, + # but should fit if only one is claimed + self.radb.insertResourceClaims(task_id, [claim, claim], 'user', 1) + + # get the claims from the db, and check if there are 2, and check their status. + # Both should have tentative status, and not conflict status, + # because we did not claim anything yet. + tentative_claims = self.radb.getResourceClaims(task_ids=task_id, status='tentative') + conflict_claims = self.radb.getResourceClaims(task_ids=task_id, status='conflict') + self.assertEqual(2, len(tentative_claims)) + self.assertEqual(0, len(conflict_claims)) + self.assertEqual('approved', self.radb.getTask(task_id)['status']) + + # let's try to claim them both in one call. + self.radb.updateResourceClaims(where_task_ids=[task_id], status='claimed') + + # Get the claims again from the db, and check if there are 2 + # one was successfully claimed, but put back to tentative, + # because for the other there was no room, so it should be in conflict. + # As a result of the claim in conflict, the task is in conflict as well. + # And as a result of the task in conflict, all claimed claims are released and put back to tentative. + # And because the claimed claim was put back to tentative, this frees up room for the claim in conflict, + # which should not be in conflict anymore, but also tentative. + # (Yes, this is quite confusing, but correct.) + tentative_claims = self.radb.getResourceClaims(task_ids=task_id, status='tentative') + claimed_claims = self.radb.getResourceClaims(task_ids=task_id, status='claimed') + conflict_claims = self.radb.getResourceClaims(task_ids=task_id, status='conflict') + self.assertEqual(2, len(tentative_claims)) + self.assertEqual(0, len(claimed_claims)) + self.assertEqual(0, len(conflict_claims)) + self.assertEqual('approved', self.radb.getTask(task_id)['status']) + + # let's try to claim only one. + # One should fit, but as a result the other won't fit anymore and will go to conflict + # which causes the task to go to conflict, which causes the claimed claim to be released, + # which frees up space to the other which will be put to tentative after being in conflict. + # (Yes, this is also quite confusing, but correct.) + self.radb.updateResourceClaim(tentative_claims[0]['id'], status='claimed') + tentative_claims = self.radb.getResourceClaims(task_ids=task_id, status='tentative') + claimed_claims = self.radb.getResourceClaims(task_ids=task_id, status='claimed') + conflict_claims = self.radb.getResourceClaims(task_ids=task_id, status='conflict') + self.assertEqual(2, len(tentative_claims)) + self.assertEqual(0, len(claimed_claims)) + self.assertEqual(0, len(conflict_claims)) + self.assertEqual('approved', self.radb.getTask(task_id)['status']) + + + def test_task_and_claim_with_zero_duration(self): + """claims which claim a resource and release it at the same moment are now allowed (it's a paradox). + """ + # start with clean database + for spec in self.radb.getSpecifications(): + self.radb.deleteSpecification(spec['id']) # cascades into tasks and claims + + now = datetime.utcnow() + + spec_task = self.radb.insertSpecificationAndTask(0, 0, 'approved', 'observation', + now, now, # tasks can have zero duration + 'foo', 'CEP4') + + task_id = spec_task['task_id'] + task = self.radb.getTask(task_id) + self.assertIsNotNone(task) + self.assertEqual(now, task['starttime']) + self.assertEqual(now, task['endtime']) + + with mock.patch('lofar.sas.resourceassignment.database.radb.logger') as mocked_logger: + RESOURCE_ID = 252 + inserted_claim_id = self.radb.insertResourceClaim(RESOURCE_ID, task_id, + now, now, # claims cannot have zero duration, test that! + 1, 'foo', 1) + self.assertIsNone(inserted_claim_id) + mocked_logger.error.assert_any_call('One or more claims could not be inserted. Rolling back.') + # test if there was a log line containing the database log message for 'claim starttime >= endtime' + self.assertTrue( + len([ca for ca in mocked_logger.error.call_args_list if 'claim starttime >= endtime' in ca[0][0]]) > 0) + + with mock.patch('lofar.sas.resourceassignment.database.radb.logger') as mocked_logger: + # try again, with multi-claim insert + inserted_claim_ids = self.radb.insertResourceClaims(task_id, [{'resource_id': RESOURCE_ID, + 'starttime': now, + 'endtime': now, + 'status': 'tentative', + 'claim_size': 1}], + 'foo', 1) + self.assertEqual([], inserted_claim_ids) + # c for c in mocked_logger.error.calls if c + mocked_logger.error.assert_any_call('One or more claims could not be inserted. Rolling back.') + # test if there was a log line containing the database log message for 'claim starttime >= endtime' + self.assertTrue( + len([ca for ca in mocked_logger.error.call_args_list if 'claim starttime >= endtime' in ca[0][0]]) > 0) + + def test_are_claims_in_conflict_released_by_removing_conflict_causing_claims(self): + """test whether a claim which is in conflict is put automatically to tentative when the conflict-causing claim is released. + """ + # start with clean database + for spec in self.radb.getSpecifications(): + self.radb.deleteSpecification(spec['id']) # cascades into tasks and claims + + base_time = datetime.utcnow() + # round to current full hour (for readability in logging) + base_time = base_time - timedelta(minutes=base_time.minute, seconds=base_time.second, + microseconds=base_time.microsecond) + + RESOURCE_ID = 252 + resource_max_cap = self.radb.get_resource_claimable_capacity(RESOURCE_ID, base_time, base_time) + + # insert a first task and full claim on a resource... + spec_task1 = self.radb.insertSpecificationAndTask(0, 0, 'approved', 'observation', + base_time + timedelta(minutes=+0), + base_time + timedelta(minutes=+10), 'foo', 'CEP4') + self.assertTrue(spec_task1['inserted']) + task1_id = spec_task1['task_id'] + task1 = self.radb.getTask(task1_id) + self.assertEqual('approved', task1['status']) + + claim1_id = self.radb.insertResourceClaim(RESOURCE_ID, task1_id, + task1['starttime'], task1['endtime'], + resource_max_cap, 'foo', 1) + # claim it, and check it. Should succeed. + self.radb.updateResourceClaim(claim1_id, status='claimed') + self.assertEqual('claimed', self.radb.getResourceClaim(claim1_id)['status']) + + # insert second (partially overlapping) task and claim on same resource, which we expect to get a conflict status + # because the first claim already claims the resource fully. + spec_task2 = self.radb.insertSpecificationAndTask(1, 1, 'approved', 'observation', + base_time + timedelta(minutes=+5), + base_time + timedelta(minutes=+15), 'foo', 'CEP4') + self.assertTrue(spec_task2['inserted']) + task2_id = spec_task2['task_id'] + task2 = self.radb.getTask(task2_id) + self.assertEqual('approved', task2['status']) + + claim2_id = self.radb.insertResourceClaim(RESOURCE_ID, task2_id, + task2['starttime'], task2['endtime'], + resource_max_cap, 'foo', 1) + self.assertEqual('conflict', self.radb.getResourceClaim(claim2_id)['status']) + self.assertEqual('conflict', self.radb.getTask(task2_id)['status']) + + # now let's see if releasing claim1 results in claim2 not having conflict state anymore + self.radb.updateResourceClaim(claim1_id, status='tentative') + self.assertEqual('tentative', self.radb.getResourceClaim(claim1_id)['status']) + self.assertEqual('tentative', self.radb.getResourceClaim(claim2_id)['status']) + self.assertEqual('approved', self.radb.getTask(task1_id)['status']) + self.assertEqual('approved', self.radb.getTask(task2_id)['status']) + + # claim claim1 again, and check it. Should succeed. + # and claim2 should go to conflict again. + self.radb.updateResourceClaim(claim1_id, status='claimed') + self.assertEqual('claimed', self.radb.getResourceClaim(claim1_id)['status']) + self.assertEqual('conflict', self.radb.getResourceClaim(claim2_id)['status']) + self.assertEqual('approved', self.radb.getTask(task1_id)['status']) + self.assertEqual('conflict', self.radb.getTask(task2_id)['status']) + + # this time, resolve the conflict by shifting the endtime of claim1 + self.radb.updateResourceClaim(claim1_id, endtime=task2['starttime']) + self.assertEqual('claimed', self.radb.getResourceClaim(claim1_id)['status']) + self.assertEqual('tentative', self.radb.getResourceClaim(claim2_id)['status']) + self.assertEqual('approved', self.radb.getTask(task1_id)['status']) + self.assertEqual('approved', self.radb.getTask(task2_id)['status']) + + # and finally, we should be able to claim claim2 as well + self.radb.updateResourceClaim(claim2_id, status='claimed') + self.assertEqual('claimed', self.radb.getResourceClaim(claim1_id)['status']) + self.assertEqual('claimed', self.radb.getResourceClaim(claim2_id)['status']) + self.assertEqual('approved', self.radb.getTask(task1_id)['status']) + self.assertEqual('approved', self.radb.getTask(task2_id)['status']) + if __name__ == "__main__": os.environ['TZ'] = 'UTC' diff --git a/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/static/app/controllers/cleanupcontroller.js b/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/static/app/controllers/cleanupcontroller.js index e973db2bd649e778de1b06d5b526ebe0ad28e213..d75abb67db0961c3af0ac15fa9cb8ca3f4795341 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/static/app/controllers/cleanupcontroller.js +++ b/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/static/app/controllers/cleanupcontroller.js @@ -274,6 +274,7 @@ cleanupControllerMod.controller('CleanupController', ['$scope', '$uibModal', '$m </p>\ </div>\ <div class="modal-footer">\ + <span style="margin-right:8px">1KB=1000bytes</span>\ <button class="btn btn-primary glyphicon glyphicon-level-up" type="button" ng-click="up()" title="Up one level" ng-if="watchedObjectType!=\'projects\'"></button>\ <button class="btn btn-primary" type="button" autofocus ng-click="ok()">OK</button>\ </div>', diff --git a/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/webservice.py b/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/webservice.py index 461f8860c5130c9854abff54e09ac904a6dd4bf0..7be3f99d59fda6da50d64ab33142d99c35197270 100755 --- a/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/webservice.py +++ b/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/webservice.py @@ -1051,7 +1051,7 @@ def main(): global curpc curpc = CleanupRPC(busname=options.cleanup_busname, servicename=options.cleanup_servicename, broker=options.broker) global sqrpc - sqrpc = StorageQueryRPC(busname=options.storagequery_busname, servicename=options.storagequery_servicename, broker=options.broker) + sqrpc = StorageQueryRPC(busname=options.storagequery_busname, servicename=options.storagequery_servicename, timeout=10, broker=options.broker) global momqueryrpc momqueryrpc = MoMQueryRPC(busname=options.mom_query_busname, servicename=options.mom_query_servicename, timeout=10, broker=options.broker) global changeshandler diff --git a/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/base_pipeline_estimator.py b/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/base_pipeline_estimator.py index 0c613131b59ca744e813d1b603d245193e4419ba..8a93e42233f734bd2a4ed3282ed936e9a72d000e 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/base_pipeline_estimator.py +++ b/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/base_pipeline_estimator.py @@ -1,4 +1,4 @@ -# base_resource_estimator.py +# base_pipeline_estimator.py # # Copyright (C) 2016 # ASTRON (Netherlands Institute for Radio Astronomy) @@ -59,6 +59,16 @@ class BasePipelineResourceEstimator(BaseResourceEstimator): logger.error("Cannot have multiple pipeline output identifications. Dropping all but the first in: %s", identifications) # see doc string return identifications[0] + def _getStorageManagerSizeMultiplier(self, parset): + """Tries to read the storagemanager set in MoM, or otherwise from the OTDB key from the 'parset'. + The Specification class puts the right one in a generic storagemanager key. + If the storagemanager is dysco, then return a special multiplier, otherwise return a default multiplier of 1. + """ + storagemanager = parset.getString("storagemanager", "") + if storagemanager == "dysco": #Needs to match with the XML Generator + return 0.5 + return 1 + def verify(self, parset, predecessor_estimates): super(BasePipelineResourceEstimator, self).verify(parset, predecessor_estimates) diff --git a/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/base_resource_estimator.py b/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/base_resource_estimator.py index cec8bd9a975df1d5dda1abce5432485eaca2e15c..77d0a9ba088c66d490ce9d0de86f84c3cd83b6c9 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/base_resource_estimator.py +++ b/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/base_resource_estimator.py @@ -60,7 +60,8 @@ class BaseResourceEstimator(object): logger.warning("startTime is not before endTime") return 1.0 ##TODO To prevent divide by zero later return totalSeconds(endTime - startTime) - #TODO check if this makes duration = int(parset.get('duration', 0)) as a key reduntant? + # TODO check if this makes duration = int(parset.get('duration', 0)) as a key reduntant? + # TODO Should probably be refactored to use the Specification, maybe when RA needs to change for OTDB replacement def _calculate(self, parset, predecessor_estimates=[]): raise NotImplementedError('calculate() in base class is called. Please implement calculate() in your subclass') diff --git a/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/calibration_pipeline.py b/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/calibration_pipeline.py index 790019962c6344b2b019c3beb18ab60455c21905..99b59fb4f801ea2d7cbb873fe07c1e199a6bfc68 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/calibration_pipeline.py +++ b/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/calibration_pipeline.py @@ -36,7 +36,7 @@ class CalibrationPipelineResourceEstimator(BasePipelineResourceEstimator): """ def __init__(self): logger.info("init CalibrationPipelineResourceEstimator") - BasePipelineResourceEstimator.__init__(self, name='pipeline') #FIXME name='calibration_pipeline' + BasePipelineResourceEstimator.__init__(self, name='calibration_pipeline') self.required_keys = ('Observation.startTime', 'Observation.stopTime', DATAPRODUCTS + 'Input_Correlated.enabled', @@ -215,7 +215,8 @@ class CalibrationPipelineResourceEstimator(BasePipelineResourceEstimator): # With reduction_factor 1, computed output size increases by 53%... Casacore storage managers may change size, but that much?!? # If you can figure out what/how, please fix this calculation. Avoid unnamed magic values and document why! logger.debug("calculate correlated data size") - new_size = uv_input_file_size / float(reduction_factor) + size_multiplier = self._getStorageManagerSizeMultiplier(parset) + new_size = uv_input_file_size / float(reduction_factor) * size_multiplier uv_output_file_size = int(new_size + new_size / 64.0 * (1.0 + reduction_factor) + new_size / 2.0) nr_output_files = nr_input_files # pure 'map' (bijective) operation, no split or reduce diff --git a/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/image_pipeline.py b/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/image_pipeline.py index ea0c31da256f0eae40d566a80314b50433c7c353..abf4e47610d7459b517d927240302f97d0d15a02 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/image_pipeline.py +++ b/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/image_pipeline.py @@ -37,7 +37,7 @@ class ImagePipelineResourceEstimator(BasePipelineResourceEstimator): """ def __init__(self): logger.info("init ImagePipelineResourceEstimator") - BasePipelineResourceEstimator.__init__(self, name='pipeline') #FIXME name='imaging_pipeline' + BasePipelineResourceEstimator.__init__(self, name='imaging_pipeline') self.required_keys = ('Observation.startTime', 'Observation.stopTime', DATAPRODUCTS + 'Input_Correlated.enabled', @@ -105,6 +105,8 @@ class ImagePipelineResourceEstimator(BasePipelineResourceEstimator): nr_images = nr_input_subbands / (subbands_per_image * slices_per_image) logger.debug("calculate sky image data size") + # Not used as the current calculation is bogus any way + # size_multiplier = self._getStorageManagerSizeMultiplier(parset) img_file_size = 1000 # TODO: 1 kB was hardcoded in the Scheduler logger.info("sky_images: {} files {} bytes each".format(nr_images, img_file_size)) diff --git a/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/longbaseline_pipeline.py b/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/longbaseline_pipeline.py index 0e17f26a0bba9d512590431c688d821bae206ab3..8275a62e15f7d0d0fe65e719383048d7965d0c70 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/longbaseline_pipeline.py +++ b/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/longbaseline_pipeline.py @@ -36,7 +36,7 @@ class LongBaselinePipelineResourceEstimator(BasePipelineResourceEstimator): """ def __init__(self): logger.info("init LongBaselinePipelineResourceEstimator") - BasePipelineResourceEstimator.__init__(self, name='pipeline') #FIXME name='longbaseline_pipeline' + BasePipelineResourceEstimator.__init__(self, name='longbaseline_pipeline') self.required_keys = ('Observation.startTime', 'Observation.stopTime', DATAPRODUCTS + 'Input_Correlated.enabled', @@ -107,6 +107,8 @@ class LongBaselinePipelineResourceEstimator(BasePipelineResourceEstimator): nr_output_files = nr_input_files / (subbands_per_subbandgroup * subbandgroups_per_ms) logger.debug("calculate correlated data size") + # Not used as the current calculation is bogus any way + # size_multiplier = self._getStorageManagerSizeMultiplier(parset) uv_output_file_size = 1000 # TODO: 1 kB was hardcoded in the Scheduler # Computing start_sbg_nr in the same way as nr_output_files may not always work out as perhaps originally intended, diff --git a/SAS/ResourceAssignment/ResourceAssignmentEstimator/service.py b/SAS/ResourceAssignment/ResourceAssignmentEstimator/service.py index 01143fc2652b04de2f9fc98175cf8f2662a11932..b4144fc3a10c3e3174b149258aaf5313ffb05689 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentEstimator/service.py +++ b/SAS/ResourceAssignment/ResourceAssignmentEstimator/service.py @@ -68,7 +68,6 @@ class ResourceEstimatorHandler(MessageHandlerInterface): #TODO use something else than .values()[0]['estimates'] ?? def get_subtree_estimate(self, specification_tree): ''' Returns a dict { 'estimates': estimates, 'errors': [errors] }. ''' - otdb_id = specification_tree['otdb_id'] parset = specification_tree['specification'] predecessors = specification_tree['predecessors'] diff --git a/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.in_calibration_pipeline_dysco b/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.in_calibration_pipeline_dysco new file mode 100644 index 0000000000000000000000000000000000000000..ba6c7b6e380588d0efc8f28dae52765360a89b26 --- /dev/null +++ b/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.in_calibration_pipeline_dysco @@ -0,0 +1,59 @@ +{ + 'Observation.DataProducts.Output_InstrumentModel.enabled': True, + 'Observation.DataProducts.Output_CoherentStokes.storageClusterName': '', + 'Observation.stopTime': '2016-11-22 11:32:21', + 'Observation.VirtualInstrument.stationList': [ + ], + 'Observation.DataProducts.Input_CoherentStokes.enabled': False, + 'Observation.DataProducts.Output_CoherentStokes.enabled': False, + 'Observation.DataProducts.Output_Correlated.storageClusterName': 'CEP4', + 'Observation.DataProducts.Input_Correlated.identifications': [ + 'mom.G732487.B0.1.C.SAP000.uv.dps' + ], + 'Observation.DataProducts.Output_SkyImage.storageClusterName': 'CEP4', + 'Observation.DataProducts.Output_InstrumentModel.identifications': [ + 'mom.G732487.B0.1.CPC.inst.dps' + ], + 'Observation.antennaSet': 'LBA_INNER', + 'Observation.DataProducts.Output_Pulsar.identifications': [ + ], + 'Observation.nrBitsPerSample': '16', + 'Observation.ObservationControl.PythonControl.LongBaseline.subbandgroups_per_ms': '1', + 'Observation.DataProducts.Output_IncoherentStokes.enabled': False, + 'Observation.DataProducts.Input_IncoherentStokes.enabled': False, + 'Observation.DataProducts.Input_Correlated.enabled': True, + 'Observation.DataProducts.Output_Pulsar.enabled': False, + 'Observation.DataProducts.Output_Correlated.identifications': [ + 'mom.G732487.B0.1.CPC.uv.dps' + ], + 'Observation.DataProducts.Input_CoherentStokes.identifications': [ + ], + 'Observation.DataProducts.Output_InstrumentModel.storageClusterName': 'CEP4', + 'Observation.DataProducts.Input_InstrumentModel.enabled': False, + 'Observation.DataProducts.Output_SkyImage.enabled': False, + 'Observation.ObservationControl.PythonControl.DPPP.demixer.freqstep': '4', + 'Observation.ObservationControl.PythonControl.DPPP.storagemanager.name': 'dysco', + 'storagemanager' : 'dysco', + 'Version.number': '33385', + 'Observation.DataProducts.Output_Pulsar.storageClusterName': 'CEP4', + 'Observation.momID': '732490', + 'Observation.DataProducts.Input_InstrumentModel.identifications': [ + ], + 'Observation.DataProducts.Output_SkyImage.identifications': [ + ], + 'Observation.startTime': '2016-11-22 10:18:23', + 'Observation.ObservationControl.PythonControl.LongBaseline.subbands_per_subbandgroup': '1', + 'Observation.nrBeams': '0', + 'Observation.Scheduler.taskDuration': '4438', + 'Observation.ObservationControl.PythonControl.DPPP.demixer.timestep': '1', + 'Observation.DataProducts.Output_IncoherentStokes.identifications': [ + ], + 'Observation.DataProducts.Input_IncoherentStokes.identifications': [ + ], + 'Observation.DataProducts.Output_CoherentStokes.identifications': [ + ], + 'Observation.DataProducts.Output_Correlated.enabled': True, + 'Observation.DataProducts.Output_IncoherentStokes.storageClusterName': '', + 'Observation.sampleClock': '200' +} + diff --git a/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.out_beam_observation b/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.out_beam_observation index 30041789b6edb99930be604593cc11ad2568bf5d..02fd17e72b64ce1c17a5195e917b8f525ff75244 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.out_beam_observation +++ b/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.out_beam_observation @@ -1 +1 @@ -"{'errors': [], 'estimates': [{'root_resource_group': 'CEP4', 'resource_types': {'bandwidth': 169869312, 'storage': 76441190400}, 'resource_count': 73, 'output_files': {'cs': [{'identification': 'mom.G735371.LOTAAS-P1296B-SAP0.1296.SAP0.obs.cs', 'sap_nr': 0, 'properties': {'nr_of_cs_stokes': 1, 'cs_otdb_id': 1, 'cs_file_size': 76441190400, 'nr_of_cs_parts': 1, 'is_tab_nr': 12, 'nr_of_cs_files': 1}}]}}, {'root_resource_group': 'CEP4', 'resource_types': {'bandwidth': 169869312, 'storage': 76441190400}, 'resource_count': 73, 'output_files': {'cs': [{'identification': 'mom.G735371.LOTAAS-P1296B-SAP1.1296.SAP1.obs.cs', 'sap_nr': 1, 'properties': {'nr_of_cs_stokes': 1, 'cs_otdb_id': 1, 'cs_file_size': 76441190400, 'nr_of_cs_parts': 1, 'is_tab_nr': 12, 'nr_of_cs_files': 1}}]}}, {'root_resource_group': 'CEP4', 'resource_types': {'bandwidth': 169869312, 'storage': 76441190400}, 'resource_count': 73, 'output_files': {'cs': [{'identification': 'mom.G735371.LOTAAS-P1296B-SAP2.1296.SAP2.obs.cs', 'sap_nr': 2, 'properties': {'nr_of_cs_stokes': 1, 'cs_otdb_id': 1, 'cs_file_size': 76441190400, 'nr_of_cs_parts': 1, 'is_tab_nr': 12, 'nr_of_cs_files': 1}}]}}, {'root_resource_group': 'CEP4', 'resource_types': {'bandwidth': 169869312, 'storage': 76441190400}, 'resource_count': 1, 'output_files': {'is': [{'identification': 'mom.G735371.LOTAAS-P1296B-SAP0.1296.SAP0.obs.is', 'sap_nr': 0, 'properties': {'is_tab_nr': 12, 'is_file_size': 76441190400, 'nr_of_is_files': 1, 'nr_of_is_stokes': 1, 'is_otdb_id': 1}}]}}, {'root_resource_group': 'CEP4', 'resource_types': {'bandwidth': 169869312, 'storage': 76441190400}, 'resource_count': 1, 'output_files': {'is': [{'identification': 'mom.G735371.LOTAAS-P1296B-SAP1.1296.SAP1.obs.is', 'sap_nr': 1, 'properties': {'is_tab_nr': 12, 'is_file_size': 76441190400, 'nr_of_is_files': 1, 'nr_of_is_stokes': 1, 'is_otdb_id': 1}}]}}, {'root_resource_group': 'CEP4', 'resource_types': {'bandwidth': 169869312, 'storage': 76441190400}, 'resource_count': 1, 'output_files': {'is': [{'identification': 'mom.G735371.LOTAAS-P1296B-SAP2.1296.SAP2.obs.is', 'sap_nr': 2, 'properties': {'is_tab_nr': 12, 'is_file_size': 76441190400, 'nr_of_is_files': 1, 'nr_of_is_stokes': 1, 'is_otdb_id': 1}}]}}, {'root_resource_group': 'CS004', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS004RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3888}, 'resource_count': 1}, {'root_resource_group': 'CS004RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3888}, 'resource_count': 1}, {'root_resource_group': 'CS005', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS005RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3888}, 'resource_count': 1}, {'root_resource_group': 'CS005RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3888}, 'resource_count': 1}, {'root_resource_group': 'CS003', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS003RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3888}, 'resource_count': 1}, {'root_resource_group': 'CS003RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3888}, 'resource_count': 1}, {'root_resource_group': 'CS002', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS002RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3888}, 'resource_count': 1}, {'root_resource_group': 'CS002RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3888}, 'resource_count': 1}, {'root_resource_group': 'CS007', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS007RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3888}, 'resource_count': 1}, {'root_resource_group': 'CS007RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3888}, 'resource_count': 1}, {'root_resource_group': 'CS006', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS006RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3888}, 'resource_count': 1}, {'root_resource_group': 'CS006RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3888}, 'resource_count': 1}]}" +"{'errors': [], 'estimates': [{'root_resource_group': 'CEP4', 'resource_types': {'bandwidth': 169869312, 'storage': 76441190400}, 'resource_count': 73, 'output_files': {'cs': [{'identification': 'mom.G735371.LOTAAS-P1296B-SAP0.1296.SAP0.obs.cs', 'sap_nr': 0, 'properties': {'nr_of_cs_stokes': 1, 'cs_otdb_id': 1, 'cs_file_size': 76441190400, 'nr_of_cs_parts': 1, 'is_tab_nr': 12, 'nr_of_cs_files': 1}}]}}, {'root_resource_group': 'CEP4', 'resource_types': {'bandwidth': 169869312, 'storage': 76441190400}, 'resource_count': 73, 'output_files': {'cs': [{'identification': 'mom.G735371.LOTAAS-P1296B-SAP1.1296.SAP1.obs.cs', 'sap_nr': 1, 'properties': {'nr_of_cs_stokes': 1, 'cs_otdb_id': 1, 'cs_file_size': 76441190400, 'nr_of_cs_parts': 1, 'is_tab_nr': 12, 'nr_of_cs_files': 1}}]}}, {'root_resource_group': 'CEP4', 'resource_types': {'bandwidth': 169869312, 'storage': 76441190400}, 'resource_count': 73, 'output_files': {'cs': [{'identification': 'mom.G735371.LOTAAS-P1296B-SAP2.1296.SAP2.obs.cs', 'sap_nr': 2, 'properties': {'nr_of_cs_stokes': 1, 'cs_otdb_id': 1, 'cs_file_size': 76441190400, 'nr_of_cs_parts': 1, 'is_tab_nr': 12, 'nr_of_cs_files': 1}}]}}, {'root_resource_group': 'CEP4', 'resource_types': {'bandwidth': 169869312, 'storage': 76441190400}, 'resource_count': 1, 'output_files': {'is': [{'identification': 'mom.G735371.LOTAAS-P1296B-SAP0.1296.SAP0.obs.is', 'sap_nr': 0, 'properties': {'is_tab_nr': 12, 'is_file_size': 76441190400, 'nr_of_is_files': 1, 'nr_of_is_stokes': 1, 'is_otdb_id': 1}}]}}, {'root_resource_group': 'CEP4', 'resource_types': {'bandwidth': 169869312, 'storage': 76441190400}, 'resource_count': 1, 'output_files': {'is': [{'identification': 'mom.G735371.LOTAAS-P1296B-SAP1.1296.SAP1.obs.is', 'sap_nr': 1, 'properties': {'is_tab_nr': 12, 'is_file_size': 76441190400, 'nr_of_is_files': 1, 'nr_of_is_stokes': 1, 'is_otdb_id': 1}}]}}, {'root_resource_group': 'CEP4', 'resource_types': {'bandwidth': 169869312, 'storage': 76441190400}, 'resource_count': 1, 'output_files': {'is': [{'identification': 'mom.G735371.LOTAAS-P1296B-SAP2.1296.SAP2.obs.is', 'sap_nr': 2, 'properties': {'is_tab_nr': 12, 'is_file_size': 76441190400, 'nr_of_is_files': 1, 'nr_of_is_stokes': 1, 'is_otdb_id': 1}}]}}, {'root_resource_group': 'CS004', 'station': 'CS004', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS004RSP0', 'station': 'CS004', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3888}, 'resource_count': 1}, {'root_resource_group': 'CS004RSP1', 'station': 'CS004', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3888}, 'resource_count': 1}, {'root_resource_group': 'CS005', 'station': 'CS005', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS005RSP0', 'station': 'CS005', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3888}, 'resource_count': 1}, {'root_resource_group': 'CS005RSP1', 'station': 'CS005', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3888}, 'resource_count': 1}, {'root_resource_group': 'CS003', 'station': 'CS003', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS003RSP0', 'station': 'CS003', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3888}, 'resource_count': 1}, {'root_resource_group': 'CS003RSP1', 'station': 'CS003', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3888}, 'resource_count': 1}, {'root_resource_group': 'CS002', 'station': 'CS002', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS002RSP0', 'station': 'CS002', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3888}, 'resource_count': 1}, {'root_resource_group': 'CS002RSP1', 'station': 'CS002', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3888}, 'resource_count': 1}, {'root_resource_group': 'CS007', 'station': 'CS007', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS007RSP0', 'station': 'CS007', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3888}, 'resource_count': 1}, {'root_resource_group': 'CS007RSP1', 'station': 'CS007', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3888}, 'resource_count': 1}, {'root_resource_group': 'CS006', 'station': 'CS006', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS006RSP0', 'station': 'CS006', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3888}, 'resource_count': 1}, {'root_resource_group': 'CS006RSP1', 'station': 'CS006', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3888}, 'resource_count': 1}]}" diff --git a/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.out_calibration_pipeline_dysco b/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.out_calibration_pipeline_dysco new file mode 100644 index 0000000000000000000000000000000000000000..d026b12605d766df93e9be818d86320df90b26bb --- /dev/null +++ b/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.out_calibration_pipeline_dysco @@ -0,0 +1 @@ +"{'errors': [], 'estimates': [{'root_resource_group': 'CEP4', 'output_files': {'uv': [{'identification': 'mom.G732487.B0.1.CPC.uv.dps', 'properties': {'start_sb_nr': 0, 'nr_of_uv_files': 120, 'uv_file_size': 713704064, 'uv_otdb_id': 1}}], 'im': [{'identification': 'mom.G732487.B0.1.CPC.inst.dps', 'properties': {'im_otdb_id': 1, 'start_sb_nr': 0, 'im_file_size': 1000, 'nr_of_im_files': 120}}]}, 'resource_types': {'bandwidth': 154384151, 'storage': 85644607680}, 'resource_count': 1, 'input_files': {'uv': [{'identification': 'mom.G732487.B0.1.C.SAP000.uv.dps', 'sap_nr': 0, 'properties': {'nr_of_uv_files': 120, 'start_sb_nr': 0, 'uv_file_size': 3617984960, 'uv_otdb_id': 2}}]}}]}" diff --git a/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.out_interferometer_observation b/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.out_interferometer_observation index 95c38ba7785b87352cd7c1511e92a6a7bbf7a2d8..4756e716f3cad7d54d08157169720cf87ed5b678 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.out_interferometer_observation +++ b/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/data_sets/t_resource_estimator.out_interferometer_observation @@ -1 +1 @@ -"{'errors': [], 'estimates': [{'root_resource_group': 'CEP4', 'resource_types': {'bandwidth': 45688942, 'storage': 164480190400}, 'resource_count': 1, 'output_files': {'uv': [{'identification': 'mom.G737227.B1.1.T.SAP000.uv.dps', 'sap_nr': 0, 'properties': {'start_sb_nr': 0, 'nr_of_uv_files': 1, 'uv_file_size': 164480190400, 'uv_otdb_id': 1}}]}}, {'root_resource_group': 'CEP4', 'resource_types': {'bandwidth': 45688942, 'storage': 164480190400}, 'resource_count': 243, 'output_files': {'uv': [{'identification': 'mom.G737227.B1.1.T.SAP001.uv.dps', 'sap_nr': 1, 'properties': {'start_sb_nr': 1, 'nr_of_uv_files': 1, 'uv_file_size': 164480190400, 'uv_otdb_id': 1}}]}}, {'root_resource_group': 'CEP4', 'resource_types': {'bandwidth': 45688942, 'storage': 164480190400}, 'resource_count': 243, 'output_files': {'uv': [{'identification': 'mom.G737227.B1.1.T.SAP002.uv.dps', 'sap_nr': 2, 'properties': {'start_sb_nr': 244, 'nr_of_uv_files': 1, 'uv_file_size': 164480190400, 'uv_otdb_id': 1}}]}}, {'root_resource_group': 'CS001', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS001RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS001RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS002', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS002RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS002RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS003', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS003RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS003RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS004', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS004RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS004RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS005', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS005RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS005RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS006', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS006RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS006RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS007', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS007RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS007RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS011', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS011RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS011RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS013', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS013RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS013RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS017', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS017RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS017RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS021', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS021RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS021RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS024', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS024RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS024RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS026', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS026RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS026RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS028', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS028RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS028RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS030', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS030RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS030RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS031', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS031RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS031RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS032', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS032RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS032RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS101', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS101RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS101RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS103', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS103RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS103RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS201', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS201RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS201RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS301', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS301RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS301RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS302', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS302RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS302RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS401', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS401RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS401RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS501', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS501RSP0', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS501RSP1', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'DE602', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'}, 'resource_count': 1}, {'root_resource_group': 'DE602RSP', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'DE603', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'}, 'resource_count': 1}, {'root_resource_group': 'DE603RSP', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'DE605', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'}, 'resource_count': 1}, {'root_resource_group': 'DE605RSP', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'DE609', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'}, 'resource_count': 1}, {'root_resource_group': 'DE609RSP', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'FR606', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'}, 'resource_count': 1}, {'root_resource_group': 'FR606RSP', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'PL610', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'}, 'resource_count': 1}, {'root_resource_group': 'PL610RSP', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'PL611', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'}, 'resource_count': 1}, {'root_resource_group': 'PL611RSP', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'PL612', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'}, 'resource_count': 1}, {'root_resource_group': 'PL612RSP', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS106', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS106RSP', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS205', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS205RSP', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS208', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS208RSP', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS210', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS210RSP', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS305', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS305RSP', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS306', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS306RSP', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS307', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS307RSP', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS310', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS310RSP', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS406', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS406RSP', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS407', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS407RSP', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS409', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS409RSP', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS503', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS503RSP', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS508', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS508RSP', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS509', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS509RSP', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'SE607', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'}, 'resource_count': 1}, {'root_resource_group': 'SE607RSP', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'UK608', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'}, 'resource_count': 1}, {'root_resource_group': 'UK608RSP', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}]}" +"{'errors': [], 'estimates': [{'root_resource_group': 'CEP4', 'resource_types': {'bandwidth': 45688942, 'storage': 164480190400}, 'resource_count': 1, 'output_files': {'uv': [{'identification': 'mom.G737227.B1.1.T.SAP000.uv.dps', 'sap_nr': 0, 'properties': {'start_sb_nr': 0, 'nr_of_uv_files': 1, 'uv_file_size': 164480190400, 'uv_otdb_id': 1}}]}}, {'root_resource_group': 'CEP4', 'resource_types': {'bandwidth': 45688942, 'storage': 164480190400}, 'resource_count': 243, 'output_files': {'uv': [{'identification': 'mom.G737227.B1.1.T.SAP001.uv.dps', 'sap_nr': 1, 'properties': {'start_sb_nr': 1, 'nr_of_uv_files': 1, 'uv_file_size': 164480190400, 'uv_otdb_id': 1}}]}}, {'root_resource_group': 'CEP4', 'resource_types': {'bandwidth': 45688942, 'storage': 164480190400}, 'resource_count': 243, 'output_files': {'uv': [{'identification': 'mom.G737227.B1.1.T.SAP002.uv.dps', 'sap_nr': 2, 'properties': {'start_sb_nr': 244, 'nr_of_uv_files': 1, 'uv_file_size': 164480190400, 'uv_otdb_id': 1}}]}}, {'root_resource_group': 'CS001', 'station': 'CS001', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS001RSP0', 'station': 'CS001', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS001RSP1', 'station': 'CS001', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS002', 'station': 'CS002', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS002RSP0', 'station': 'CS002', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS002RSP1', 'station': 'CS002', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS003', 'station': 'CS003', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS003RSP0', 'station': 'CS003', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS003RSP1', 'station': 'CS003', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS004', 'station': 'CS004', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS004RSP0', 'station': 'CS004', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS004RSP1', 'station': 'CS004', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS005', 'station': 'CS005', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS005RSP0', 'station': 'CS005', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS005RSP1', 'station': 'CS005', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS006', 'station': 'CS006', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS006RSP0', 'station': 'CS006', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS006RSP1', 'station': 'CS006', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS007', 'station': 'CS007', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS007RSP0', 'station': 'CS007', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS007RSP1', 'station': 'CS007', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS011', 'station': 'CS011', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS011RSP0', 'station': 'CS011', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS011RSP1', 'station': 'CS011', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS013', 'station': 'CS013', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS013RSP0', 'station': 'CS013', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS013RSP1', 'station': 'CS013', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS017', 'station': 'CS017', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS017RSP0', 'station': 'CS017', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS017RSP1', 'station': 'CS017', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS021', 'station': 'CS021', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS021RSP0', 'station': 'CS021', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS021RSP1', 'station': 'CS021', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS024', 'station': 'CS024', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS024RSP0', 'station': 'CS024', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS024RSP1', 'station': 'CS024', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS026', 'station': 'CS026', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS026RSP0', 'station': 'CS026', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS026RSP1', 'station': 'CS026', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS028', 'station': 'CS028', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS028RSP0', 'station': 'CS028', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS028RSP1', 'station': 'CS028', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS030', 'station': 'CS030', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS030RSP0', 'station': 'CS030', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS030RSP1', 'station': 'CS030', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS031', 'station': 'CS031', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS031RSP0', 'station': 'CS031', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS031RSP1', 'station': 'CS031', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS032', 'station': 'CS032', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS032RSP0', 'station': 'CS032', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS032RSP1', 'station': 'CS032', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS101', 'station': 'CS101', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS101RSP0', 'station': 'CS101', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS101RSP1', 'station': 'CS101', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS103', 'station': 'CS103', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS103RSP0', 'station': 'CS103', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS103RSP1', 'station': 'CS103', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS201', 'station': 'CS201', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS201RSP0', 'station': 'CS201', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS201RSP1', 'station': 'CS201', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS301', 'station': 'CS301', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS301RSP0', 'station': 'CS301', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS301RSP1', 'station': 'CS301', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS302', 'station': 'CS302', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS302RSP0', 'station': 'CS302', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS302RSP1', 'station': 'CS302', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS401', 'station': 'CS401', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS401RSP0', 'station': 'CS401', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS401RSP1', 'station': 'CS401', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS501', 'station': 'CS501', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'CS501RSP0', 'station': 'CS501', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'CS501RSP1', 'station': 'CS501', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'DE602', 'station': 'DE602', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'}, 'resource_count': 1}, {'root_resource_group': 'DE602RSP', 'station': 'DE602', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'DE603', 'station': 'DE603', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'}, 'resource_count': 1}, {'root_resource_group': 'DE603RSP', 'station': 'DE603', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'DE605', 'station': 'DE605', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'}, 'resource_count': 1}, {'root_resource_group': 'DE605RSP', 'station': 'DE605', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'DE609', 'station': 'DE609', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'}, 'resource_count': 1}, {'root_resource_group': 'DE609RSP', 'station': 'DE609', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'FR606', 'station': 'FR606', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'}, 'resource_count': 1}, {'root_resource_group': 'FR606RSP', 'station': 'FR606', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'PL610', 'station': 'PL610', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'}, 'resource_count': 1}, {'root_resource_group': 'PL610RSP', 'station': 'PL610', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'PL611', 'station': 'PL611', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'}, 'resource_count': 1}, {'root_resource_group': 'PL611RSP', 'station': 'PL611', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'PL612', 'station': 'PL612', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'}, 'resource_count': 1}, {'root_resource_group': 'PL612RSP', 'station': 'PL612', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS106', 'station': 'RS106', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS106RSP', 'station': 'RS106', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS205', 'station': 'RS205', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS205RSP', 'station': 'RS205', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS208', 'station': 'RS208', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS208RSP', 'station': 'RS208', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS210', 'station': 'RS210', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS210RSP', 'station': 'RS210', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS305', 'station': 'RS305', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS305RSP', 'station': 'RS305', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS306', 'station': 'RS306', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS306RSP', 'station': 'RS306', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS307', 'station': 'RS307', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS307RSP', 'station': 'RS307', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS310', 'station': 'RS310', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS310RSP', 'station': 'RS310', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS406', 'station': 'RS406', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS406RSP', 'station': 'RS406', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS407', 'station': 'RS407', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS407RSP', 'station': 'RS407', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS409', 'station': 'RS409', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS409RSP', 'station': 'RS409', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS503', 'station': 'RS503', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS503RSP', 'station': 'RS503', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS508', 'station': 'RS508', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS508RSP', 'station': 'RS508', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'RS509', 'station': 'RS509', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111100'}, 'resource_count': 1}, {'root_resource_group': 'RS509RSP', 'station': 'RS509', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'SE607', 'station': 'SE607', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'}, 'resource_count': 1}, {'root_resource_group': 'SE607RSP', 'station': 'SE607', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}, {'root_resource_group': 'UK608', 'station': 'UK608', 'resource_types': {'rcu': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'}, 'resource_count': 1}, {'root_resource_group': 'UK608RSP', 'station': 'UK608', 'resource_types': {'bandwidth': 3000000000, 'rsp': 3896}, 'resource_count': 1}]}" diff --git a/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/t_resource_estimator.py b/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/t_resource_estimator.py index 9cc6110d92d89c9b7886dd9e5ab382dc4866dec5..ce10c54328427b0f991939ad64442c0adac93495 100755 --- a/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/t_resource_estimator.py +++ b/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/t_resource_estimator.py @@ -7,10 +7,8 @@ import logging from lofar.sas.resourceassignment.resourceassignmentestimator.service import ResourceEstimatorHandler import mock -logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) logger = logging.getLogger(__name__) - # Set to True to (re-)generate golden outputs for all data sets in ./data_sets that start with # "t_resource_estimator.in_" # ---------------------------------------------------------------------------------------------------------------------- @@ -201,6 +199,36 @@ class TestEstimationsAgainstGoldenOutput(unittest.TestCase): self.assertEqual(len(error_messages), 0, "\nThe uut reported errors:\n" + '\n- '.join(error_messages)) self.assertEqual(self.get_datastructure_as_string(estimation), golden_estimation) + @mock.patch('lofar.sas.resourceassignment.resourceassignmentestimator.resource_estimators.reservation.AntennaSetsParser') + @mock.patch('lofar.sas.resourceassignment.resourceassignmentestimator.resource_estimators.observation.AntennaSetsParser') + def test_estimate_for_calibration_pipeline_dysco(self, mock_asp, mock_notusedhere): + """ Verify estimation for a calibration pipeline specification tree against the golden output """ + # Arrange + data_set_filepath = os.path.join(self.data_sets_dir, 't_resource_estimator.in_calibration_pipeline_dysco') + golden_output_filepath = os.path.join(self.data_sets_dir, 't_resource_estimator.out_calibration_pipeline_dysco') + task_type = 'pipeline' + task_subtype = 'calibration pipeline' + specification_tree = self.get_specification_tree(data_set_filepath, task_type, task_subtype) + + self.add_predecessor_to_specification_tree(os.path.join(self.data_sets_dir, + 't_resource_estimator.in_calibration_pipeline_predecessor_558022'), # predecessor also used for imaging pipeline test + 'observation', + 'bfmeasurement', + specification_tree['predecessors']) + + uut = ResourceEstimatorHandler() + golden_estimation = self.get_golden_estimate(golden_output_filepath, + uut._get_estimated_resources, + specification_tree) + + # Act + estimation = uut.handle_message({'specification_tree': specification_tree}) + + # Assert + error_messages = self.get_uut_errors(estimation) + self.assertEqual(len(error_messages), 0, "\nThe uut reported errors:\n" + '\n- '.join(error_messages)) + self.assertEqual(self.get_datastructure_as_string(estimation), golden_estimation) + @mock.patch('lofar.sas.resourceassignment.resourceassignmentestimator.resource_estimators.reservation.AntennaSetsParser') @mock.patch('lofar.sas.resourceassignment.resourceassignmentestimator.resource_estimators.observation.AntennaSetsParser') def test_estimate_for_imaging_pipeline(self, mock_asp, mock_notusedhere): @@ -408,7 +436,7 @@ class TestEstimationsAgainstGoldenOutput(unittest.TestCase): """ return uut_return_value['errors'] - def get_golden_estimate(self, golden_output_filepath, estimator_function=None, *estimator_args): + def get_golden_imate(self, golden_output_filepath, estimator_function=None, *estimator_args): """ Obtain the golden estimation from file (and create one if DO_GENERATE_GOLDEN_OUTPUTS is True) :param golden_output_filepath: the path to the golden estimate output file @@ -449,4 +477,5 @@ class TestEstimationsAgainstGoldenOutput(unittest.TestCase): f.close() if __name__ == '__main__': + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.WARNING) unittest.main() diff --git a/SAS/ResourceAssignment/TaskPrescheduler/taskprescheduler.py b/SAS/ResourceAssignment/TaskPrescheduler/taskprescheduler.py index 348d5357ca33c1f2646c735ab11ca383875b4477..a649842dac9d79439d28075b903d2cf1958607d3 100644 --- a/SAS/ResourceAssignment/TaskPrescheduler/taskprescheduler.py +++ b/SAS/ResourceAssignment/TaskPrescheduler/taskprescheduler.py @@ -134,26 +134,14 @@ class TaskPrescheduler(OTDBBusListener): # NOTE: The MoM predecessor Ids to OTDB predecessor Ids conversion is done in RATaskSpecified on the fly # otdb_id = treeId - # #Race condition when asking MoM as the mom-otdb-adapter might not have heard that the - # #task is on approved and might still be on approved pending in MoM. - # #mom_ids = self.momquery.getMoMIdsForOTDBIds([otdb_id]) - # #So we get the parset for all tasks we receive instead of just for the ones with - # #a trigger. - # try: - # parset = self.otdbrpc.taskGetSpecification( otdb_id=otdb_id )['specification'] - # tasktype, task_subtype = getTaskTypes(parset) - # specification = resourceIndicatorsFromParset(tasktype, task_subtype, parset, INPUT_PREFIX) - # except Exception as e: - # logger.exception(e) - # logger.error("Problem parsing specification for otdb_id=%s", otdb_id) - # self.radbrpc.updateTaskStatusForOtdbId(otdb_id, 'error') # We don't catch an exception if this fails. - # return - # if specification["Observation.momID"]: - # mom_id = specification["Observation.momID"] - # logger.info('Found mom_id %s for otdb_id %s', mom_id, otdb_id) - # else: - # logger.info('Did not find a mom_id for task otdb_id=%s', otdb_id) - # return + # + # Note: Race condition when asking MoM as the mom-otdb-adapter might not have heard that the + # task is on approved and might still be on approved pending in MoM. + # so don't ask the MomQuery: mom_ids = self.momquery.getMoMIdsForOTDBIds([otdb_id]) + # We get the mom_id from the parset + # + # We get the parset for all tasks we receive instead of just for the ones with + # a trigger. status = "approved" spec = Specification(logger, self.otdbrpc, self.momquery, self.radbrpc) spec.set_status(status) diff --git a/SAS/ResourceAssignment/doc/resource_assignment.md b/SAS/ResourceAssignment/doc/resource_assignment.md index 6e49523d2cbe1bf3edabdd1d96c388de96a4389d..30ff88b6fb91b8c492b008b93a95a57825b84a84 100644 --- a/SAS/ResourceAssignment/doc/resource_assignment.md +++ b/SAS/ResourceAssignment/doc/resource_assignment.md @@ -35,6 +35,7 @@ interact with the system in a manner visible to other users. ### Overview See [Wiki documentation Resource Assigner](https://www.astron.nl/lofarwiki/doku.php?id=rrr:redesign_resource_assignment_system) +Updated diagrams (in graphml/odf format) can be found in the [SVN documentation on SAS redesign for responsive telescope](https://svn.astron.nl/LOFAR/trunk//SAS/doc/SAS_redesign_for_responsive_telescope/) TODO - *Add a diagram* diff --git a/SAS/Scheduler/src/Controller.cpp b/SAS/Scheduler/src/Controller.cpp index 2342513510d8c2cece1b72ef98bf05334b534abf..d3471434a6a213253c43e82a38fb3574db952965 100644 --- a/SAS/Scheduler/src/Controller.cpp +++ b/SAS/Scheduler/src/Controller.cpp @@ -3961,8 +3961,7 @@ std::pair<unscheduled_reasons, QString> Controller::setInputFilesForPipeline(Tas for (dataProductTypes dpType = _BEGIN_DATA_PRODUCTS_ENUM_; dpType < _END_DATA_PRODUCTS_ENUM_-1; dpType = dataProductTypes(dpType + 1)) { if (pPipe->storage()->isInputDataProduktEnabled(dpType)) { // is this input data product type enabled? TaskStorage::inputDataProduct &dp = inputDataProducts[dpType]; // also creates the record in the inputDataProducts map if it doesn't exist yet - resetSkipVector = (dp.skip.empty() /*|| (dp.skip.size() != (unsigned)dp.filenames.size())*/); // the skip vector should only be synchronized with the predecessor skip vector the first time (i.e. when it is not yet set) - storageVector storageVec; + resetSkipVector = (dp.skip.empty() ); // the skip vector should only be synchronized with the predecessor skip vector the first time (i.e. when it is not yet set) for (QStringList::const_iterator identit = dp.identifications.begin(); identit != dp.identifications.end(); ++identit) { foundit = false; @@ -3974,9 +3973,7 @@ std::pair<unscheduled_reasons, QString> Controller::setInputFilesForPipeline(Tas if (pit != pred_output.end()) { idxIt = pit->second.identifications.indexOf(*identit); if (idxIt != -1) { // found? - storageVector predecessorStorageVec(predStorage->getStorageLocations(dpType)); - unsigned psz(predecessorStorageVec.size()); - if (psz != 0 || (*predit)->getOutputDataproductCluster() == "CEP4") { + if (true /*Used to contain code for CEP2/CEP4 checks*/) { // copy the filenames and locations pointed to by this identification to the input data product list of this task if (pit->second.filenames.size() == pit->second.locations.size()) { if ((dpType == DP_CORRELATED_UV) || (dpType == DP_COHERENT_STOKES) || (dpType == DP_INCOHERENT_STOKES)) { // for these data product types copy only the files that have the corresponding SAP @@ -3990,9 +3987,6 @@ std::pair<unscheduled_reasons, QString> Controller::setInputFilesForPipeline(Tas if (filename.contains(sapstr)) { filenames.push_back(filename); locations.push_back(pit->second.locations.at(i)); - if ((*predit)->getOutputDataproductCluster() != "CEP4") { - storageVec.push_back(predecessorStorageVec.at(i % psz)); - } if (resetSkipVector) { if (SyncSkipWithPredecessor) { skipVec.push_back(pit->second.skip.at(i)); @@ -4007,9 +4001,6 @@ std::pair<unscheduled_reasons, QString> Controller::setInputFilesForPipeline(Tas else { // no specific SAP specified in identification, just copy all files filenames += pit->second.filenames; locations += pit->second.locations; - if ((*predit)->getOutputDataproductCluster() != "CEP4") { - storageVec.insert(storageVec.end(), predecessorStorageVec.begin(), predecessorStorageVec.end()); - } if (resetSkipVector) { if (pit->second.skip.size() == (unsigned)pit->second.filenames.size()) { skipVec.insert(skipVec.end(), pit->second.skip.begin(), pit->second.skip.end()); @@ -4023,9 +4014,6 @@ std::pair<unscheduled_reasons, QString> Controller::setInputFilesForPipeline(Tas else { // for all other data product types copy all files filenames += pit->second.filenames; locations += pit->second.locations; - if ((*predit)->getOutputDataproductCluster() != "CEP4") { - storageVec.insert(storageVec.end(), predecessorStorageVec.begin(), predecessorStorageVec.end()); - } if (resetSkipVector) { if (pit->second.skip.size() == (unsigned)pit->second.filenames.size()) { skipVec.insert(skipVec.end(), pit->second.skip.begin(), pit->second.skip.end()); @@ -4066,8 +4054,6 @@ std::pair<unscheduled_reasons, QString> Controller::setInputFilesForPipeline(Tas return error; // identification not found } } - // set storage location IDs equal to the accumulation of the predecessor output storage vec's - pPipe->storage()->addInputStorageLocations(dpType, storageVec); dp.filenames = filenames; dp.locations = locations; if (!resetSkipVector) { @@ -4544,7 +4530,7 @@ bool Controller::checkEarlyTasksStatus(void) { int treeID; for (std::vector<Task *>::const_iterator it = tasks.begin(); it != tasks.end(); ++it) { if ((*it)->getScheduledStart() <= now()) { - if (((*it)->getOutputDataproductCluster() == "CEP4") && (*it)->isPipeline()) { + if ( (*it)->isPipeline()) { continue; //Pipelines on CEP4: we don't care as SLURM sorts it out. } treeID = (*it)->getSASTreeID(); @@ -4587,14 +4573,14 @@ int Controller::assignResources(bool showResult) { } if (retVal == 0) { - int ret = refreshStorageNodesInfo(); + int ret = 0; //Data Monitor does not exist any more refreshStorageNodesInfo(); // ret: // 0: refresh ok // 1: no connection to data monitor don't continue // 2: user clicked cancel when asked to connect to the data monitor if (ret == 0) { // refresh ok - if (!assignStorageResources()) { + if (false /*Used to contain code for CEP2/CEP4 checks*/) { retVal = 3; // storage resource assignment conflicts detected } } @@ -4615,8 +4601,7 @@ int Controller::assignResources(bool showResult) { } break; case 1: -// QMessageBox::critical(0, tr("No connection to Data Monitor"), -// tr("Could not connect to the Data Monitor.\nPlease check Data Monitor connection settings")); + //Used to contain DataMonitor code break; case 2: QMessageBox::warning(gui,tr("Resource assignment conflicts detected"),tr("Some task(s) are scheduled in the past!\nStart time needs to be at least 3 minutes after now")); @@ -4767,7 +4752,7 @@ bool Controller::calculateDataSlots(void) { } -bool Controller::assignManualStorageToTask(Task *pTask) { +/*bool Controller::assignManualStorageToTask(Task *pTask) { if (pTask->hasStorage()) { TaskStorage *taskStorage(pTask->storage()); // check and when possible assign the task's manually requested resources @@ -4807,10 +4792,10 @@ bool Controller::assignManualStorageToTask(Task *pTask) { } } else return true; -} +}*/ -bool Controller::assignStorageToTask(Task *pTask) { +/*bool Controller::assignStorageToTask(Task *pTask) { bool bResult(true); if (pTask->hasStorage()) { TaskStorage *taskStorage(pTask->storage()); @@ -5132,9 +5117,9 @@ bool Controller::assignStorageToTask(Task *pTask) { } } return bResult; -} +}*/ -bool Controller::assignGroupedStorage(void) { // not for manual assignment of storage +/*bool Controller::assignGroupedStorage(void) { // not for manual assignment of storage bool bResult(true); std::map<unsigned, std::vector<Task *> > groupedTasks = data.getGroupedTasks(Task::PRESCHEDULED); @@ -5613,10 +5598,10 @@ bool Controller::assignGroupedStorage(void) { // not for manual assignment of st } return bResult; -} +}*/ -bool Controller::assignStorageResources(Task *task) { +/*bool Controller::assignStorageResources(Task *task) { bool bResult(true); // if (refreshStorageNodesInfo()) { if (task) { @@ -5688,5 +5673,5 @@ bool Controller::assignStorageResources(Task *task) { gui->updateTaskDialog(); // update task dialog (needed for the storage tree to show the conflict info) return bResult; -} +}*/ diff --git a/SAS/Scheduler/src/Controller.h b/SAS/Scheduler/src/Controller.h index 52185f026fe1da177a551353aeb271fc3821c1cf..b33812173b73bf5849a4dfe66356712998b4ed4b 100644 --- a/SAS/Scheduler/src/Controller.h +++ b/SAS/Scheduler/src/Controller.h @@ -190,7 +190,7 @@ public: #endif // resource assignment - bool assignStorageResources(Task *task = 0); +// bool assignStorageResources(Task *task = 0); bool calculateDataSlots(void); @@ -223,10 +223,10 @@ private: void updateDeletedTasksUndo(void); bool askOverWriteExistingTask(bool &overwrite, bool &forAll, unsigned taskID, const QString &taskName); // bool dataMonitorInitRequired(void); // checks to see if data monitor init is required - bool assignManualStorageToTask(Task *pTask); - bool assignGroupedStorage(void); +// bool assignManualStorageToTask(Task *pTask); +// bool assignGroupedStorage(void); // bool assignMinimumStorageToTask(Task *pTask); - bool assignStorageToTask(Task *pTask); +// bool assignStorageToTask(Task *pTask); void rescheduleTask(unsigned task_id, AstroDateTime new_start); // checkEarlyTasksStatus: checks the current status of too early tasks in the SAS database and updates the tasks in the scheduler if the status was changed in SAS // returns false if any too early task was found which is still (PRE)SCHEDULED diff --git a/SAS/Scheduler/src/SASConnection.cpp b/SAS/Scheduler/src/SASConnection.cpp index 66edc07fc541db8edb5932adbd0f5a806c52c7f9..395f032285d9bce5a3ccdb90982fb15f1c8b50b4 100644 --- a/SAS/Scheduler/src/SASConnection.cpp +++ b/SAS/Scheduler/src/SASConnection.cpp @@ -2576,131 +2576,13 @@ bool SASConnection::saveStationSettings(int treeID, const StationTask &task, con bool SASConnection::saveInputStorageSettings(int treeID, const Task &task) { bool bResult(true); - if (task.getOutputDataproductCluster() == "CEP4") { //For CEP4 we're skipping this. /AR - return bResult; - } - const TaskStorage *task_storage(task.storage()); - if (task_storage) { - const std::map<dataProductTypes, TaskStorage::inputDataProduct> &inputDataProducts(task_storage->getInputDataProducts()); - std::map<dataProductTypes, TaskStorage::inputDataProduct>::const_iterator flit; - QString locationsStr, filenamesStr, skipVectorStr, enabledStr; - - for (dataProductTypes dp = _BEGIN_DATA_PRODUCTS_ENUM_; dp < _END_DATA_PRODUCTS_ENUM_-1; dp = dataProductTypes(dp + 1)) { - flit = inputDataProducts.find(dp); - enabledStr = task_storage->isInputDataProduktEnabled(dp) ? "true" : "false"; - if (flit != inputDataProducts.end()) { - locationsStr = "[" + flit->second.locations.join(",") + "]"; - filenamesStr = "[" + flit->second.filenames.join(",") + "]"; - skipVectorStr = boolVector2StringVector(flit->second.skip); - switch (dp) { - case DP_COHERENT_STOKES: - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_CoherentStokes.enabled", enabledStr)) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_CoherentStokes.locations", locationsStr)) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_CoherentStokes.filenames", filenamesStr)) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_CoherentStokes.skip", skipVectorStr)) bResult = false; - break; - case DP_INCOHERENT_STOKES: - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_IncoherentStokes.enabled", enabledStr)) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_IncoherentStokes.locations", locationsStr)) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_IncoherentStokes.filenames", filenamesStr)) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_IncoherentStokes.skip", skipVectorStr)) bResult = false; - break; - case DP_CORRELATED_UV: - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_Correlated.enabled", enabledStr)) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_Correlated.locations", locationsStr)) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_Correlated.filenames", filenamesStr)) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_Correlated.skip", skipVectorStr)) bResult = false; - break; - case DP_INSTRUMENT_MODEL: - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_InstrumentModel.enabled", enabledStr)) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_InstrumentModel.locations", locationsStr)) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_InstrumentModel.filenames", filenamesStr)) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_InstrumentModel.skip", skipVectorStr)) bResult = false; - break; - case DP_SKY_IMAGE: - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_SkyImage.enabled", enabledStr)) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_SkyImage.locations", locationsStr)) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_SkyImage.filenames", filenamesStr)) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_SkyImage.skip", skipVectorStr)) bResult = false; - break; - default: - break; - } - } - else { - switch (dp) { - case DP_COHERENT_STOKES: - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_CoherentStokes.enabled", enabledStr)) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_CoherentStokes.locations", "[]")) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_CoherentStokes.filenames", "[]")) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_CoherentStokes.skip", "[]")) bResult = false; - break; - case DP_INCOHERENT_STOKES: - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_IncoherentStokes.enabled", enabledStr)) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_IncoherentStokes.locations", "[]")) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_IncoherentStokes.filenames", "[]")) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_IncoherentStokes.skip", "[]")) bResult = false; - break; - case DP_CORRELATED_UV: - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_Correlated.enabled", enabledStr)) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_Correlated.locations", "[]")) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_Correlated.filenames", "[]")) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_Correlated.skip", "[]")) bResult = false; - break; - case DP_INSTRUMENT_MODEL: - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_InstrumentModel.enabled", enabledStr)) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_InstrumentModel.locations", "[]")) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_InstrumentModel.filenames", "[]")) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_InstrumentModel.skip", "[]")) bResult = false; - break; - case DP_SKY_IMAGE: - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_SkyImage.enabled", enabledStr)) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_SkyImage.locations", "[]")) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_SkyImage.filenames", "[]")) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Input_SkyImage.skip", "[]")) bResult = false; - break; - default: - break; - } - } - } - } - return bResult; + //For CEP4 we're skipping this. /AR + return bResult; } bool SASConnection::saveOutputStorageSettings(int treeID, const Task &task, const task_diff *diff) { bool bResult(true); - if (task.getOutputDataproductCluster() == "CEP4") { //For CEP4 we're skipping this. /AR - return bResult; - } - const TaskStorage *task_storage(task.storage()); - if (task_storage) { - QString trueStr("true"), falseStr("false"); - // which output data to generate - const TaskStorage::enableDataProdukts &odp(task_storage->getOutputDataProductsEnabled()); - if (diff) { - if (diff->output_data_types) { - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Output_Correlated.enabled", (odp.correlated ? trueStr : falseStr))) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Output_CoherentStokes.enabled", (odp.coherentStokes ? trueStr : falseStr))) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Output_IncoherentStokes.enabled", (odp.incoherentStokes ? trueStr : falseStr))) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Output_InstrumentModel.enabled", (odp.instrumentModel ? trueStr : falseStr))) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Output_Pulsar.enabled", (odp.pulsar ? trueStr : falseStr))) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Output_SkyImage.enabled", (odp.skyImage ? trueStr : falseStr))) bResult = false; - } - if (diff->output_data_products) { - bResult &= saveOutputDataProducts(treeID, task); - } - } - else { - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Output_Correlated.enabled", (odp.correlated ? trueStr : falseStr))) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Output_CoherentStokes.enabled", (odp.coherentStokes ? trueStr : falseStr))) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Output_IncoherentStokes.enabled", (odp.incoherentStokes ? trueStr : falseStr))) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Output_InstrumentModel.enabled", (odp.instrumentModel ? trueStr : falseStr))) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Output_Pulsar.enabled", (odp.pulsar ? trueStr : falseStr))) bResult = false; - if (!setNodeValue(treeID, "LOFAR.ObsSW.Observation.DataProducts.Output_SkyImage.enabled", (odp.skyImage ? trueStr : falseStr))) bResult = false; - bResult &= saveOutputDataProducts(treeID, task); - } - } + //For CEP4 we're skipping this. /AR return bResult; } diff --git a/SAS/Scheduler/src/Storage.cpp b/SAS/Scheduler/src/Storage.cpp index b4e3c887a85fb8d8a2d6715861c7f4c00f2cca37..3409b95f0afb2bf3e622b29734c9db9121dfb160 100644 --- a/SAS/Scheduler/src/Storage.cpp +++ b/SAS/Scheduler/src/Storage.cpp @@ -87,186 +87,6 @@ void Storage::initStorage(void) { } } -std::vector<storageResult> Storage::addStorageToTask(Task *pTask, const storageMap &storageLocations) { - if (pTask->hasStorage()) { - TaskStorage *task_storage = pTask->storage(); - storageNodesMap::iterator sit; - const AstroDateTime &start = pTask->getScheduledStart(); - const AstroDateTime &end = pTask->getScheduledEnd(); - unsigned durationSec = pTask->getDuration().totalSeconds(); - const dataFileMap &dataFiles = task_storage->getOutputFileSizes(); // contains the number of files and the size of an individual file for each output data product of the task - double claimSize, bandWidth; - // check node bandwidth requirements (CAUTION: multiple data product types could use the same storage node - // search for dataproducts that use the same storage node - std::map<int, double> totalBWPerNodeMap; - double dpBWPerLocation; - task_conflict res; - for (storageMap::const_iterator it = storageLocations.begin(); it != storageLocations.end(); ++it) { - dataFileMap::const_iterator dit = dataFiles.find(it->first); - if (dit != dataFiles.end()) { - // total bandwidth (kbit/sec) required from a storage node to receive the required number of files from the data product - // ceil(total_number_files / number_of_raids_used) * filesize [kbyte] / duration [seconds] * 8 - // std::cout << "task:" << pTask->getID() << ", dataproduct:" << DATA_PRODUCTS[dit->first] << std::endl - // << "nr of files:" << dit->second.second << ", size per file:" << dit->second.first << "number of locations:" << it->second.size() << std::endl - // << "BW per location:" << ceil((double)dit->second.second / it->second.size()) * dit->second.first / durationSec * 8 << std::endl; - dpBWPerLocation = ceil((double)dit->second.second / it->second.size()) * dit->second.first / durationSec * 8; - for (storageVector::const_iterator sit = it->second.begin(); sit != it->second.end(); ++sit) { - if (totalBWPerNodeMap.find(sit->first) == totalBWPerNodeMap.end()) { // node not already in totalSizePerNodeMap? - // for each time this node is used by this data product increase its totalSizePerNodeMap value accordingly - for (storageVector::const_iterator countit = sit; countit != it->second.end(); ++countit) { - if (countit->first == sit->first) { // for each time this storage node is used in storageVector - totalBWPerNodeMap[sit->first] += dpBWPerLocation; // add the amount of bandwidth used by the set of files - } - } - } - } - } - } - itsLastStorageCheckResult.clear(); - if (pTask->getOutputDataproductCluster() == "CEP4") { //Can we just skip this for CEP4 ? /AR - debugWarn("sis","Storage::addStorageToTask: Did not check storage for task:", pTask->getID(), " (CEP4 detected)"); - } - else { - // check if the total bandwidths for the nodes used do not exceed the nodes their available bandwidths - for (std::map<int, double>::const_iterator nit = totalBWPerNodeMap.begin(); nit != totalBWPerNodeMap.end(); ++nit) { - storageNodesMap::const_iterator nodeit = itsStorageNodes.find(nit->first); - if (nodeit != itsStorageNodes.end()) { - // std::cout << "Total bandwidth required for node:" << nodeit->second.name() << " = " << nit->second << " kb/s" << std::endl; - res = nodeit->second.checkBandWidth(start, end, nit->second); - if (res != CONFLICT_NO_CONFLICT) { - itsLastStorageCheckResult.push_back(storageResult(_END_DATA_PRODUCTS_ENUM_, nit->first, -1, res)); - } - } - } - if (itsLastStorageCheckResult.empty()) { // if no total bandwidth error for any node then start the rest of the checks - for (dataFileMap::const_iterator dfit = dataFiles.begin(); dfit != dataFiles.end(); ++dfit) { - storageMap::const_iterator stit = storageLocations.find(dfit->first); - if (stit != storageLocations.end()) { - if (!stit->second.empty()) { - claimSize = (double) dfit->second.first * dfit->second.second / stit->second.size(); // size per file * nrFiles / nr of raid arrays assigned - bandWidth = (double) claimSize / 1000 / durationSec; // MByte/sec, the required remaining disk write speed (or bandwidth) for this array - - // check requested resources - for (storageVector::const_iterator it = stit->second.begin(); it != stit->second.end(); ++it) { - sit = itsStorageNodes.find(it->first); - if (sit != itsStorageNodes.end()) { - // check size requirements - res = sit->second.checkSpaceAndWriteSpeed(start, end, claimSize, bandWidth, it->second); // check space and write speed for every raid array - if (res != CONFLICT_NO_CONFLICT) { - itsLastStorageCheckResult.push_back(storageResult(dfit->first, it->first, it->second, res)); - // itsLastStorageCheckResult[it->first].push_back(std::pair<int, task_conflict>(it->second, res)); // store the error result - } - else { // add the claim - sit->second.addClaim(pTask->getID(), start, end, dfit->first, claimSize, bandWidth, it->second); - } - } - } - // if there were conflicts then remove the claim again from the storage nodes - if (!itsLastStorageCheckResult.empty()) { - std::vector<int> snd; - for (storageVector::const_iterator it = stit->second.begin(); it != stit->second.end(); ++it) { - sit = itsStorageNodes.find(it->first); - if (sit != itsStorageNodes.end()) { - if (std::find(snd.begin(), snd.end(), stit->first) == snd.end()) { - sit->second.removeClaim(pTask->getID()); // only call removeClaim one time for every storage node (it removes all claims found for the task ID) - snd.push_back(stit->first); - } - } - } - } - } - else { // no storage has been assigned to this data product type - itsLastStorageCheckResult.push_back(storageResult(dfit->first, -1, -1, CONFLICT_NO_STORAGE_ASSIGNED)); - } - } - else { // no storage has been assigned to this data product type - itsLastStorageCheckResult.push_back(storageResult(dfit->first, -1, -1, CONFLICT_NO_STORAGE_ASSIGNED)); - } - } - } - } - if (itsLastStorageCheckResult.empty()) { - task_storage->unAssignStorage(); - task_storage->setStorage(storageLocations); // sets the new locations in the task - for (storageMap::const_iterator tsit = storageLocations.begin(); tsit != storageLocations.end(); ++tsit) { - task_storage->setOutputDataProductAssigned(tsit->first, true); - } - } - } - else { - debugWarn("sis","Storage::addStorageToTask: Cannot add storage to task:", pTask->getID(), " (hint:not an observation or pipeline?)"); - } - - return itsLastStorageCheckResult; -} - -std::vector<storageResult> Storage::addStorageToTask(Task *pTask, dataProductTypes dataProduct, const storageVector &storageLocations, bool noCheck) { - if (pTask->hasStorage()) { - TaskStorage *task_storage = pTask->storage(); - storageNodesMap::iterator sit; - const AstroDateTime &start = pTask->getScheduledStart(); - const AstroDateTime &end = pTask->getScheduledEnd(); - unsigned durationSec = pTask->getDuration().totalSeconds(); - unsigned taskID = pTask->getID(); - const dataFileMap &dataFiles = task_storage->getOutputFileSizes(); // contains the number of files and the size of an individual file for each output data product of the task - double claimSize, bandWidth; - // iterate over all required data products for the task - // for (dataFileMap::const_iterator dpit = dataFiles.begin(); dpit != dataFiles.end(); ++dpit) { - dataFileMap::const_iterator dfit = dataFiles.find(dataProduct); - itsLastStorageCheckResult.clear(); - if (dfit != dataFiles.end()) { - // claimsize = size of the claim for this raid array - claimSize = (double) dfit->second.first * dfit->second.second / storageLocations.size(); // size per file * nrFiles / nr of raid arrays assigned - bandWidth = (double) claimSize / 1000 / durationSec; // MByte/sec, the required remaining disk write speed (or bandwidth) for this array - // std::cout << "total size: " << totalStorageSize << std::endl << "nr of storage locations:" << storageLocations.size() << std::endl << "size per node: " << sizePerNode << std::endl - // << "total bandwidth: " << totalBandWidth << std::endl << "per node: " << bandWidthPerNode << std::endl; - task_conflict res(CONFLICT_NO_CONFLICT); - for (storageVector::const_iterator it = storageLocations.begin(); it != storageLocations.end(); ++it) { - res = CONFLICT_NO_CONFLICT; - sit = itsStorageNodes.find(it->first); - if (sit != itsStorageNodes.end()) { - // check size requirements - if (!noCheck && pTask->getOutputDataproductCluster() != "CEP4") { - res = sit->second.checkSpaceAndWriteSpeed(start, end, claimSize, bandWidth, it->second); // check space and bandwidth for every raid array - } - if (res == CONFLICT_NO_CONFLICT) { - sit->second.addClaim(taskID, start, end, dataProduct, claimSize, bandWidth, it->second); - if (std::find(itsTaskStorageNodes[taskID].begin(), itsTaskStorageNodes[taskID].end(), it->first) == itsTaskStorageNodes[taskID].end()) { - itsTaskStorageNodes[taskID].push_back(it->first); - } - } - else { - itsLastStorageCheckResult.push_back(storageResult(dataProduct, it->first, it->second, res)); - } - } - } - if (!storageLocations.empty() && res == CONFLICT_NO_CONFLICT) { - task_storage->addStorage(dataProduct, storageLocations); // adds the storage to the task - task_storage->setOutputDataProductAssigned(dataProduct, true); - } - } - else { - // error: dataProduct not found in dataFiles map of the task! - } - } - else { - debugWarn("sis","Storage::addStorageToTask: Cannot add storage to task:", pTask->getID(), " (hint:not an observation or pipeline?)"); - } - return itsLastStorageCheckResult; -} - -void Storage::removeTaskStorage(unsigned taskID) { - std::map<unsigned, std::vector<int> >::iterator it = itsTaskStorageNodes.find(taskID); - if (it != itsTaskStorageNodes.end()) { - for (std::vector<int>::iterator sit = it->second.begin(); sit != it->second.end(); ++sit) { - storageNodesMap::iterator snit = itsStorageNodes.find(*sit); - if (snit != itsStorageNodes.end()) { - snit->second.removeClaim(taskID); - } - } - itsTaskStorageNodes.erase(it); - } -} // function checkAssignedTaskStorage is used for checking if the given task it's claims are registered at the storage nodes assigned to the task // assuming it is not possible to assign storage to a task if a conflict arises from it, the function doesn't check if the size and bandwidth requirements are fulfilled. diff --git a/SAS/Scheduler/src/Storage.h b/SAS/Scheduler/src/Storage.h index acef733e60f5e7c82d01cf59864b2c6db1c08b9b..5f1fa89b38181227eb5ed85021b5c862978ff55f 100644 --- a/SAS/Scheduler/src/Storage.h +++ b/SAS/Scheduler/src/Storage.h @@ -32,9 +32,6 @@ public: bool addStorageNode(const std::string &nodeName, int nodeID);// {itsStorageNodes.insert(storageNodesMap::value_type(nodeID, nodeName));} void addStoragePartition(int nodeID, unsigned short partitionID, const std::string &path, const double &capacity, const double &free_space); void clearStorageClaims(void); // removes all claims from all storage nodes - std::vector<storageResult> addStorageToTask(Task *pTask, const storageMap &storageLocations); - std::vector<storageResult> addStorageToTask(Task *pTask, dataProductTypes dataProduct, const storageVector &storageLocations, bool noCheck); // bool addStorageTask(unsigned taskID, const AstroDateTime &startTime, const AstroDateTime &endTime, const double &claimSize, const double &bandWidth, int storageNodeID, int raidID); - void removeTaskStorage(unsigned taskID); std::vector<storageResult> checkAssignedTaskStorage(Task *pTask, dataProductTypes dataProduct); // returns the possible storage locations for the claim.key = node ID, value vector of raidID,free space pairs storageLocationOptions getStorageLocationOptions(dataProductTypes dataProduct, const AstroDateTime &startTime, const AstroDateTime &endTime, diff --git a/SAS/Scheduler/src/schedulerdata.h b/SAS/Scheduler/src/schedulerdata.h index 3762bfd42c862d718f3e5a9db7d2ad1c45643f39..8557890ab97a801af7a1f4380fa052a60e2108fd 100644 --- a/SAS/Scheduler/src/schedulerdata.h +++ b/SAS/Scheduler/src/schedulerdata.h @@ -164,11 +164,6 @@ public: void updateStations(void) {itsData.updateStations();} void initStorage(void) {itsData.initStorage();} void clearStorageClaims(void) {itsData.clearStorageClaims();} - std::vector<storageResult> addStorageToTask(Task *pTask, const storageMap &storageLocations) {return itsData.addStorageToTask(pTask, storageLocations);} - std::vector<storageResult> addStorageToTask(Task *pTask, dataProductTypes dataProduct, const storageVector &storageLocations, bool noCheck = false) { - return itsData.addStorageToTask(pTask, dataProduct, storageLocations, noCheck); - } - void removeStorageForTask(unsigned taskID) {itsData.removeStorageForTask(taskID);} std::vector<storageResult> checkAssignedTaskStorage(Task *pTask, dataProductTypes dataProduct) {return itsData.checkAssignedTaskStorage(pTask, dataProduct);} storageLocationOptions getStorageLocationOptions(dataProductTypes dataProduct, const AstroDateTime &startTime, const AstroDateTime &endTime, const double &fileSize, const double &bandWidth, unsigned minNrFiles, sortMode sort_mode = SORT_NONE, const std::vector<int> &nodes = std::vector<int>()) { return itsData.getStorageLocationOptions(dataProduct, startTime, endTime, fileSize, bandWidth, minNrFiles, sort_mode, nodes); diff --git a/SAS/Scheduler/src/schedulerdatablock.h b/SAS/Scheduler/src/schedulerdatablock.h index 84e7ca02816f8001110a2a4d25c91a0ba22032f7..bad28149bd55f38f1aebf4a6e813531ee347d831 100644 --- a/SAS/Scheduler/src/schedulerdatablock.h +++ b/SAS/Scheduler/src/schedulerdatablock.h @@ -211,11 +211,6 @@ public: void initStorage(void) {itsStorage.initStorage();} void clearStorageClaims(void) {itsStorage.clearStorageClaims();} - std::vector<storageResult> addStorageToTask(Task *pTask, const storageMap &storageLocations) {return itsStorage.addStorageToTask(pTask, storageLocations);} - std::vector<storageResult> addStorageToTask(Task *pTask, dataProductTypes dataProduct, const storageVector &storageLocations, bool noCheck = false) { - return itsStorage.addStorageToTask(pTask, dataProduct, storageLocations, noCheck); - } - void removeStorageForTask(unsigned taskID) {itsStorage.removeTaskStorage(taskID);} std::vector<storageResult> checkAssignedTaskStorage(Task *pTask, dataProductTypes dataProduct) {return itsStorage.checkAssignedTaskStorage(pTask, dataProduct);} storageLocationOptions getStorageLocationOptions(dataProductTypes dataProduct, const AstroDateTime &startTime, const AstroDateTime &endTime, const double &fileSize, const double &bandWidth, unsigned minNrFiles, sortMode sort_mode = SORT_NONE, const std::vector<int> &nodes = std::vector<int>()) { return itsStorage.getStorageLocationOptions(dataProduct, startTime, endTime, fileSize, bandWidth, minNrFiles, sort_mode, nodes); diff --git a/SAS/TriggerServices/config/dbcredentials_trigger_restinterface.ini b/SAS/TriggerServices/config/dbcredentials_trigger_restinterface.ini new file mode 100644 index 0000000000000000000000000000000000000000..7db6c53863cbcd248656e8804a7179588a8043c8 --- /dev/null +++ b/SAS/TriggerServices/config/dbcredentials_trigger_restinterface.ini @@ -0,0 +1,7 @@ +[database:trigger_restinterface] +type = mysql +host = mysql1.control.lofar +user = <redacted> +password = <redacted> +database = lofar_trigger_rest +port = 3306 diff --git a/SAS/TriggerServices/django_rest/restinterface/settings.py b/SAS/TriggerServices/django_rest/restinterface/settings.py index abd72b9b1d740a12d79e058ff558f5e4f6edf1b9..428cd551c6facb85e003e88eb19262fe60c1ae46 100644 --- a/SAS/TriggerServices/django_rest/restinterface/settings.py +++ b/SAS/TriggerServices/django_rest/restinterface/settings.py @@ -23,7 +23,7 @@ logger.setLevel(logging.INFO) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) dbc = dbcredentials.DBCredentials() -mom_credentials = dbc.get("MoM") +restinterface_credentials = dbc.get("trigger_restinterface") # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/ @@ -87,11 +87,11 @@ WSGI_APPLICATION = 'lofar.triggerservices.restinterface.wsgi.application' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.mysql', - 'NAME': 'lofar_trigger_rest', - 'USER': mom_credentials.user, - 'PASSWORD': mom_credentials.password, - 'HOST': mom_credentials.host, - 'PORT': mom_credentials.port, + 'NAME': restinterface_credentials.database, + 'USER': restinterface_credentials.user, + 'PASSWORD': restinterface_credentials.password, + 'HOST': restinterface_credentials.host, + 'PORT': restinterface_credentials.port, } } diff --git a/SAS/XML_generator/src/xmlgen.py b/SAS/XML_generator/src/xmlgen.py index eaefe5200ea9657a491588ca54a4660790cf5eec..c9329e6be5816d800d052dc4210ed7c6bb40e07b 100755 --- a/SAS/XML_generator/src/xmlgen.py +++ b/SAS/XML_generator/src/xmlgen.py @@ -26,28 +26,31 @@ # Last change by : $Author: renting $ # Change date : $Date: 2016-05-18 11:47:57 +0200 (wo, 18 mei 2016) $ -VERSION = "2.22.4" - +VERSION = "3.1.0" + import sys, getopt, time from xml.sax.saxutils import escape as XMLescape from os import _exit as os_exit from os.path import splitext -from datetime import datetime,timedelta +from datetime import datetime, timedelta from math import pi import re +import json -CLOCK_MODES = ['160 MHz','200 MHz'] +CLOCK_MODES = ['160 MHz', '200 MHz'] INSTRUMENT_FILTERS = ["10-70 MHz", "30-70 MHz", "10-90 MHz", "30-90 MHz", "110-190 MHz", "170-230 MHz", "210-250 MHz"] -ANTENNA_MODES = ["LBA Inner", "LBA Outer", "LBA Sparse Even", "LBA Sparse Odd", "LBA X", "LBA Y", "HBA Zero", "HBA Zero Inner", "HBA One", "HBA One Inner", "HBA Dual", "HBA Dual Inner", "HBA Joined", "HBA Joined Inner"] -NUMBER_OF_BITS_PER_SAMPLE = [4,8,16] -MAX_NR_SUBBANDS = [976,488,244] -WHICH_IS = ['IQUV','I'] -WHICH_CS = ['IQUV','I','XXYY'] +ANTENNA_MODES = ["LBA Inner", "LBA Outer", "LBA Sparse Even", "LBA Sparse Odd", "LBA X", "LBA Y", "HBA Zero", + "HBA Zero Inner", "HBA One", "HBA One Inner", "HBA Dual", "HBA Dual Inner", "HBA Joined", + "HBA Joined Inner"] +NUMBER_OF_BITS_PER_SAMPLE = [4, 8, 16] +MAX_NR_SUBBANDS = [976, 488, 244] +WHICH_IS = ['IQUV', 'I'] +WHICH_CS = ['IQUV', 'I', 'XXYY'] WEIGHTING_SCHEMES = ['uniform', 'superuniform', 'natural', 'briggs', 'briggsabs', 'radial'] -IMAGING_PIPELINE_TYPES = ['MSSS','standard','none'] -#MODES = ['Calobs','Calbeam','MultiObs'] -PROCESSING = ['Preprocessing','Calibration','Pulsar','Imaging','LongBaseline','none'] -CALIBRATION_MODE = ['internal','external','none'] +IMAGING_PIPELINE_TYPES = ['MSSS', 'standard', 'none'] +# MODES = ['Calobs','Calbeam','MultiObs'] +PROCESSING = ['Preprocessing', 'Calibration', 'Pulsar', 'Imaging', 'LongBaseline', 'none'] +CALIBRATION_MODE = ['internal', 'external', 'none'] ALL_STATIONS = 'CS001,CS002,CS003,CS004,CS005,CS006,CS007,CS011,CS013,CS017,CS021,CS024,CS026,CS028,CS030,CS031,CS032,CS101,CS103,CS201,CS301,CS302,CS401,CS501,RS106,RS205,RS208,RS210,RS305,RS306,RS307,RS310,RS406,RS407,RS409,RS503,RS508,RS509,DE601,DE602,DE603,DE604,DE605,FR606,SE607,UK608,DE609,PL610,PL611,PL612,IE613' CORE_STATIONS = 'CS001,CS002,CS003,CS004,CS005,CS006,CS007,CS011,CS013,CS017,CS021,CS024,CS026,CS028,CS030,CS031,CS032,CS101,CS103,CS201,CS301,CS302,CS401,CS501' SUPERTERP_STATIONS = 'CS002,CS003,CS004,CS005,CS006,CS007' @@ -57,73 +60,83 @@ NL_STATIONS = 'CS001,CS002,CS003,CS004,CS005,CS006,CS007,CS011,CS013,CS017,CS021 DEFAULT_TASKS_PER_NODE = 11 DEFAULT_CORES_PER_TASK = 2 -RED_COLOR = '\033[91m' -NO_COLOR = '\033[0m' +RED_COLOR = '\033[91m' +NO_COLOR = '\033[0m' YELLOW_COLOR = '\033[93m' -CYAN_COLOR = '\033[96m' -GREEN_COLOR = '\033[92m' -#BLUE_COLOR = '\033[94m' -TRUE = ['y','Y','YES','yes','t','T','True','true'] -FALSE = ['n','N','NO','no','f','F','False','false'] +CYAN_COLOR = '\033[96m' +GREEN_COLOR = '\033[92m' +# BLUE_COLOR = '\033[94m' +TRUE = ['y', 'Y', 'YES', 'yes', 't', 'T', 'True', 'true'] +FALSE = ['n', 'N', 'NO', 'no', 'f', 'F', 'False', 'false'] + class GenException(Exception): - def __init__(self, message): - # Call the base class constructor with the parameters it needs - super(Exception, self).__init__(RED_COLOR + message + NO_COLOR) + def __init__(self, message): + # Call the base class constructor with the parameters it needs + super(Exception, self).__init__(RED_COLOR + message + NO_COLOR) + def merge_dicts(*dict_args): - ''' - Given any number of dicts, shallow copy and merge into a new dict, - precedence goes to key value pairs in latter dicts. - ''' - result = {} - for dictionary in dict_args: - result.update(dictionary) - return result + ''' + Given any number of dicts, shallow copy and merge into a new dict, + precedence goes to key value pairs in latter dicts. + ''' + result = {} + for dictionary in dict_args: + result.update(dictionary) + return result + def printMessage(message): - print(GREEN_COLOR + message + NO_COLOR) + print(GREEN_COLOR + message + NO_COLOR) + def printInfo(message): - print(CYAN_COLOR + 'INFO: ' + message + NO_COLOR) + print(CYAN_COLOR + 'INFO: ' + message + NO_COLOR) + def printWarning(message): - print(YELLOW_COLOR + 'WARNING: ' + message + NO_COLOR) + print(YELLOW_COLOR + 'WARNING: ' + message + NO_COLOR) + def dms2deg(dms_str): - arr = re.findall(r'\d+', dms_str) - while len(arr) < 4: # pad DMS string if not all of H,M,S are specified e.g. 20:10 will be 20:10:0.0 - arr.append(0) + arr = re.findall(r'\d+', dms_str) + while len(arr) < 4: # pad DMS string if not all of H,M,S are specified e.g. 20:10 will be 20:10:0.0 + arr.append(0) + + if dms_str[0].strip() == '-': # negative sign can only happen at the start of the string + sign = -1 + else: + sign = 1 + + arr[3] = float(arr[3]) / (10 ** len(str(arr[3]))) - if dms_str[0].strip() == '-': #negative sign can only happen at the start of the string - sign = -1 - else: - sign = 1 - - arr[3] = float(arr[3]) / (10**len(str(arr[3]))) + return sign * (abs(int(arr[0])) + float(arr[1]) / 60 + (float(arr[2]) + arr[3]) / 3600) - return sign * (abs(int(arr[0])) + float(arr[1]) / 60 + (float(arr[2]) + arr[3]) / 3600) def hms2deg(hms_str): - arr = re.findall(r'\d+', hms_str) - while len(arr) < 4: # pad HMS string if not all of H,M,S are specified e.g. 20:10 will be 20:10:0.0 - arr.append(0) - - #FIXME Probably we shouldn't even allow negatives, hour angles should be between 0 and 24 - if hms_str[0].strip() == '-': #negative sign can only happen at the start of the string - sign = -1 - else: - sign = 1 - - arr[3] = float(arr[3]) / (10**len(str(arr[3]))) - - return sign * (abs(int(arr[0])) + float(arr[1]) / 60 + (float(arr[2]) + arr[3]) / 3600) * 15 + arr = re.findall(r'\d+', hms_str) + while len(arr) < 4: # pad HMS string if not all of H,M,S are specified e.g. 20:10 will be 20:10:0.0 + arr.append(0) + + # FIXME Probably we shouldn't even allow negatives, hour angles should be between 0 and 24 + if hms_str[0].strip() == '-': # negative sign can only happen at the start of the string + sign = -1 + else: + sign = 1 + + arr[3] = float(arr[3]) / (10 ** len(str(arr[3]))) + + return sign * (abs(int(arr[0])) + float(arr[1]) / 60 + (float(arr[2]) + arr[3]) / 3600) * 15 + def deg2rad(degrees): - return float(degrees) * pi / 180 - + return float(degrees) * pi / 180 + + def rad2deg(radian): - return float(radian) * 180 / pi + return float(radian) * 180 / pi + # def convertAngle(number, angle, beamName): #TODO get one convertAngle function # # try converting to degrees else radians else HMS @@ -140,32 +153,34 @@ def rad2deg(radian): # return angle def convertAngle1(angle, beamName): - # try converting to degrees else radians else HMS - if angle.endswith('deg') or angle.endswith('d'): # ra specified with 'deg' ? - angle = angle.rstrip(' deg') - else: - try: # try radian units - ra_deg = rad2deg(angle); - angle = ra_deg - except: # assuming hms - if not (angle.endswith('s') or angle[-1].isdigit()): - raise GenException("unkown coordinate: %s for angle1 of %s" % angle, beamName) - angle = str(hms2deg(angle)) - return angle + # try converting to degrees else radians else HMS + if angle.endswith('deg') or angle.endswith('d'): # ra specified with 'deg' ? + angle = angle.rstrip(' deg') + else: + try: # try radian units + ra_deg = rad2deg(angle) + angle = ra_deg + except: # assuming hms + if not (angle.endswith('s') or angle[-1].isdigit()): # FIXME: makes no sense, angle should be float + raise GenException("unkown coordinate: %s for angle1 of %s" % (angle, beamName)) + angle = str(hms2deg(angle)) + return angle + def convertAngle2(angle, beamName): - # try converting to degrees else radians else HMS - if angle.endswith('deg') or angle.endswith('d'): # ra specified with 'deg' ? - angle = angle.rstrip(' deg') - else: - try: # try radian units - dec_deg = rad2deg(angle); - angle = dec_deg - except: # assuming dms - if not (angle.endswith('s') or angle[-1].isdigit()): - raise GenException("unkown coordinate: %s for angle2 of %s" % angle, beamName) - angle = str(dms2deg(angle)) - return angle + # try converting to degrees else radians else HMS + if angle.endswith('deg') or angle.endswith('d'): # ra specified with 'deg' ? + angle = angle.rstrip(' deg') + else: + try: # try radian units + dec_deg = rad2deg(angle) + angle = dec_deg + except: # assuming dms + if not (angle.endswith('s') or angle[-1].isdigit()): # FIXME: makes no sense, angle should be float + raise GenException("unkown coordinate: %s for angle2 of %s" % (angle, beamName)) + angle = str(dms2deg(angle)) + return angle + def parse_subband_list(parset_subband_list, nr_subbands): r''' @@ -196,116 +211,128 @@ def parse_subband_list(parset_subband_list, nr_subbands): if len(sub_list) == 1: multiplication = sub_list[0].split('*') if len(multiplication) == 2: - subbands += [int(multiplication[1])]*int(multiplication[0]) + subbands += [int(multiplication[1])] * int(multiplication[0]) else: subbands.append(int(sub_list[0])) elif len(sub_list) == 2: - subbands += range(int(sub_list[0]), int(sub_list[1])+1) + subbands += range(int(sub_list[0]), int(sub_list[1]) + 1) else: - raise GenException(str(word) + ' is not a valid sub_range in a subband list') - return [] + raise GenException(str(word) + ' is not a valid sub_range in a subband list') # FIXME: word might be undefined? doubles = set([x for x in subbands if subbands.count(x) > 1]) if len(doubles) > 0: - printWarning(parset_subband_list + ' contains the following double specified subbands: %s' % sorted(doubles)) + printWarning(parset_subband_list + ' contains the following double specified subbands: %s' % sorted(doubles)) return subbands + def verifySubbandList(keyname, parset_subband_list, nr_subbands): - subbandListCalculated = parse_subband_list(parset_subband_list, nr_subbands) - calcNrSubbands = len(subbandListCalculated) - if calcNrSubbands != int(nr_subbands): - raise GenException("%s error: calculated number of subbands (%i) is not equal to the specified number of subbands (%s)\nIs the subband list correct?") % (keyname, calcNrSubbands, nr_subbands) + subbandListCalculated = parse_subband_list(parset_subband_list, nr_subbands) + calcNrSubbands = len(subbandListCalculated) + if calcNrSubbands != int(nr_subbands): + raise GenException("%s error: calculated number of subbands (%i) is not equal to the specified number of " + "subbands (%s)\nIs the subband list correct?" % (keyname, calcNrSubbands, nr_subbands)) + def readExtraParms(keyset, lines): - valListEsc = [] - for line in lines: - if line.startswith(keyset + ":") or line.startswith(keyset + "="): - line = re.sub(r"\s+;", ';', line.lstrip(keyset).lstrip(":").lstrip("=").rstrip()) # clear white-space just before ';' - line = re.sub(r";\s+", ';', line) # clear white-space directly after ';' (this method intentionally does not clear white-space in the (string) parameters self!) - valList = line.split(';') - for strVal in valList: - valListEsc.append(XMLescape(strVal)) - return valListEsc - -def readTiedArrayBeams(lines): - tabs = [] - stopTABsearch = False - try: + valListEsc = [] for line in lines: - if line.startswith("TAB") or line.startswith("Global_TAB"): - continue - else: - valList = line.lstrip().rstrip().replace(' ', '').split(';') - if valList[0].startswith('c'): - # angle1 - if valList[1].endswith('deg') or valList[1].endswith('d'): # degree units? - valList[1] = deg2rad(valList[1].rstrip(' deg')) - else: #try radian else HMS - try: # if float conversion works assume radian - angle1 = float(valList[1]); - valList[1] = angle1 - except: # float conversion did not work try hms - valList[1] = deg2rad(hms2deg(valList[1])) - # angle2 - if valList[2].endswith('deg') or valList[2].endswith('d'): # degree units? - valList[2] = deg2rad(valList[2].rstrip(' deg')) - else: #try radian else HMS - try: # if float conversion works assume radian - angle2 = float(valList[2]); - valList[2] = angle2 - except: # float conversion did not work try hms - valList[2] = deg2rad(dms2deg(valList[2])) - #if valList[2].endswith('deg') or valList[2].endswith('d'): - #valList[2] = deg2rad(valList[2].rstrip(' deg')) - tabs.append(valList) - elif valList[0].startswith('i'): - valList[1] = float(valList[1]) - tabs.append(valList) - except: - raise GenException("An error occurred reading the TAB specification on line '%s'" % line) - return tabs + if line.startswith(keyset + ":") or line.startswith(keyset + "="): + # clear white-space just before ';' + line = re.sub(r"\s+;", ';', line.lstrip(keyset).lstrip(":").lstrip("=").rstrip()) + # clear white-space directly after ';' (this method intentionally does not clear white-space in the + # (string) parameters self!) + line = re.sub(r";\s+", ';', line) + valList = line.split(';') + for strVal in valList: + valListEsc.append(XMLescape(strVal)) + return valListEsc + + +def readTiedArrayBeams(lines): + tabs = [] + stopTABsearch = False + try: + for line in lines: + if line.startswith("TAB") or line.startswith("Global_TAB"): + continue + else: + valList = line.lstrip().rstrip().replace(' ', '').split(';') + if valList[0].startswith('c'): + # angle1 + if valList[1].endswith('deg') or valList[1].endswith('d'): # degree units? + valList[1] = deg2rad(valList[1].rstrip(' deg')) + else: # try radian else HMS + try: # if float conversion works assume radian + angle1 = float(valList[1]) + valList[1] = angle1 + except: # float conversion did not work try hms + valList[1] = deg2rad(hms2deg(valList[1])) + # angle2 + if valList[2].endswith('deg') or valList[2].endswith('d'): # degree units? + valList[2] = deg2rad(valList[2].rstrip(' deg')) + else: # try radian else HMS + try: # if float conversion works assume radian + angle2 = float(valList[2]) + valList[2] = angle2 + except: # float conversion did not work try hms + valList[2] = deg2rad(dms2deg(valList[2])) + # if valList[2].endswith('deg') or valList[2].endswith('d'): + # valList[2] = deg2rad(valList[2].rstrip(' deg')) + tabs.append(valList) + elif valList[0].startswith('i'): + valList[1] = float(valList[1]) + tabs.append(valList) + except: + raise GenException("An error occurred reading the TAB specification on line '%s'" % line) # FIXME line might be undefined? + return tabs + def hasCoherentTab(TAB): - for i in range(0,len(TAB)): - if TAB[i][0] == 'c': - return True - return False + for i in range(0, len(TAB)): + if TAB[i][0] == 'c': + return True + return False + ##FIXME we will need to fill in actual values. Might need to depend on variables def processingCluster(cluster, number_of_tasks, number_of_cores_per_task): - CEP4 = r""" <processingCluster> + CEP4 = r""" <processingCluster> <name>CEP4</name> <partition>cpu</partition> <numberOfTasks>%i</numberOfTasks> <minRAMPerTask unit="byte">1000000000</minRAMPerTask> - <minScratchPerTask unit="byte">100000000</minScratchPerTask> + <minScratchPerTask unit="byte">100000000</minScratchPerTask> <maxDurationPerTask>P7DT0S</maxDurationPerTask> <numberOfCoresPerTask>%i</numberOfCoresPerTask> <runSimultaneous>true</runSimultaneous> </processingCluster>""" - if cluster in ["", "CEP4"]: - result = CEP4 % (number_of_tasks, number_of_cores_per_task) - else: - raise GenException("Unknown processing cluster specified: %s" % cluster) - return result + if cluster in ["", "CEP4"]: + result = CEP4 % (number_of_tasks, number_of_cores_per_task) + else: + raise GenException("Unknown processing cluster specified: %s" % cluster) + return result + def dataProductCluster(cluster): - template = r"""<storageCluster> + template = r"""<storageCluster> <name>%s</name> <partition>%s</partition> </storageCluster>""" - if cluster in ["", "CEP4"]: - result = template % ("CEP4", "/data/projects/") - elif cluster == "DRAGNET": - result = template % ("DRAGNET", "") - else: - raise GenException("Unknown storage cluster specified: %s" % cluster) - return result - -def writeXMLObs(ofile, name, descr, topo, predecessor_topo, attrname, projname, TBBpiggyBack, aartfaacPiggyBack, cordata, cohdata, incohdata, antenna, clock, instrfilt, interval, channels, - cohdedisp, flysEye, subsperfileCS, colapseCS, downstepsCS, whichCS, subsperfileIS, colapseIS, downstepsIS, whichIS, stations, start, stop, duration, bitspersample, status): - print >>ofile, r""" <item index="0"> + if cluster in ["", "CEP4"]: + result = template % ("CEP4", "/data/projects/") + elif cluster == "DRAGNET": + result = template % ("DRAGNET", "") + else: + raise GenException("Unknown storage cluster specified: %s" % cluster) + return result + + +def writeXMLObs(ofile, name, descr, topo, predecessor_topo, attrname, projname, TBBpiggyBack, aartfaacPiggyBack, + cordata, cohdata, incohdata, antenna, clock, instrfilt, interval, channels, + cohdedisp, flysEye, subsperfileCS, colapseCS, downstepsCS, whichCS, subsperfileIS, colapseIS, + downstepsIS, whichIS, stations, start, stop, duration, bitspersample, status): + print >> ofile, r""" <item index="0"> <lofar:observation> <name>%s</name> <description>%s</description> @@ -359,13 +386,17 @@ def writeXMLObs(ofile, name, descr, topo, predecessor_topo, attrname, projname, </userSpecification> </lofar:observationAttributes> <children>""" % ( - name, descr, topo, predecessor_topo, status, attrname, projname, writeBoolean(TBBpiggyBack), writeBoolean(aartfaacPiggyBack), - writeBoolean(cordata), writeBoolean(cohdata), writeBoolean(incohdata), antenna, clock, instrfilt, interval, channels, - writeBoolean(cohdedisp), writeBoolean(flysEye), subsperfileCS, colapseCS, downstepsCS, whichCS, - subsperfileIS, colapseIS, downstepsIS, whichIS, stations, start, stop, duration, bitspersample) + name, descr, topo, predecessor_topo, status, attrname, projname, writeBoolean(TBBpiggyBack), + writeBoolean(aartfaacPiggyBack), + writeBoolean(cordata), writeBoolean(cohdata), writeBoolean(incohdata), antenna, clock, instrfilt, interval, + channels, + writeBoolean(cohdedisp), writeBoolean(flysEye), subsperfileCS, colapseCS, downstepsCS, whichCS, + subsperfileIS, colapseIS, downstepsIS, whichIS, stations, start, stop, duration, bitspersample) -def writeXMLBeam(ofile, name, description, topo, beamtype, target, ra, dec, subbands, flyseye, tabrings, tabringsize, tablist, dataproducts, status): - print >>ofile, r"""<item index="0"> + +def writeXMLBeam(ofile, name, description, topo, beamtype, target, ra, dec, subbands, flyseye, tabrings, tabringsize, + tablist, dataproducts, status): + print >> ofile, r"""<item index="0"> <lofar:measurement xsi:type="lofar:BFMeasurementType"> <name>%s</name> <description>%s</description> @@ -396,37 +427,52 @@ def writeXMLBeam(ofile, name, description, topo, beamtype, target, ra, dec, subb </lofar:bfMeasurementAttributes> <resultDataProducts> %s - </resultDataProducts> + </resultDataProducts> </lofar:measurement> - </item>""" % ( name, description, topo, status, beamtype, target, ra, dec, subbands, writeBoolean(flyseye), - tabrings, tabringsize, tablist, dataproducts ) - + </item>""" % ( + name, description, topo, status, beamtype, target, ra, dec, subbands, writeBoolean(flyseye), + tabrings, tabringsize, tablist, dataproducts) + + def writeXMLObsEnd(ofile): - print >> ofile, r"""</children> + print >> ofile, r"""</children> </lofar:observation> </item>""" + def writeTABXML(TAB): - strVal = r"" - for i in range(0,len(TAB)): - if TAB[i][0] == 'c': - strVal += r""" <tiedArrayBeam> + strVal = r"" + for i in range(0, len(TAB)): + if TAB[i][0] == 'c': + strVal += r""" <tiedArrayBeam> <coherent>true</coherent> <angle1>%s</angle1> <angle2>%s</angle2> </tiedArrayBeam> - """ % (TAB[i][1],TAB[i][2]) - else: - strVal += r""" <tiedArrayBeam> + """ % (TAB[i][1], TAB[i][2]) + else: + strVal += r""" <tiedArrayBeam> <coherent>false</coherent> <dispersionMeasure>%s</dispersionMeasure> </tiedArrayBeam> """ % (TAB[i][1]) - strVal = strVal.rstrip() # strip off the last newline - return strVal + strVal = strVal.rstrip() # strip off the last newline + return strVal + + +def writeMiscParameters(ofile, miscParameters): + """ + :param ofile: the xml-string mess so far + :param miscParameters: A dict(!) with the parameters (not some magically interpreted list like elsewhere) + :return: + """ + if miscParameters is not None and len(miscParameters) > 0: + j = json.dumps(miscParameters) + print >> ofile, r"""<misc>%s</misc>""" % j + def writeBBSParameters(ofile, bbsParameters): - print >> ofile, r""" <bbsParameters> + print >> ofile, r""" <bbsParameters> <baselines>%s</baselines> <correlations>%s</correlations> <beamModelEnable>%s</beamModelEnable> @@ -434,11 +480,14 @@ def writeBBSParameters(ofile, bbsParameters): <solveUVRange>%s</solveUVRange> <strategyBaselines>%s</strategyBaselines> <strategyTimeRange>%s</strategyTimeRange> - </bbsParameters>""" % (bbsParameters[0], bbsParameters[1], writeBoolean(bbsParameters[2]), bbsParameters[3], bbsParameters[4], bbsParameters[5], bbsParameters[6]) - ##TODO % {"baselines":, "correlations":, writeBoolean("beamenable":), "solveparms":, "solveuvrange":, "strategybaselines":, "strategytimerange":} + </bbsParameters>""" % ( + bbsParameters[0], bbsParameters[1], writeBoolean(bbsParameters[2]), bbsParameters[3], bbsParameters[4], + bbsParameters[5], bbsParameters[6]) + ##TODO % {"baselines":, "correlations":, writeBoolean("beamenable":), "solveparms":, "solveuvrange":, "strategybaselines":, "strategytimerange":} + def writeDemixParameters(ofile, demixParameters): - print >> ofile, r""" <demixingParameters> + print >> ofile, r""" <demixingParameters> <averagingFreqStep>%s</averagingFreqStep> <averagingTimeStep>%s</averagingTimeStep> <demixFreqStep>%s</demixFreqStep> @@ -446,15 +495,19 @@ def writeDemixParameters(ofile, demixParameters): <demixAlways>%s</demixAlways> <demixIfNeeded>%s</demixIfNeeded> <ignoreTarget>%s</ignoreTarget> - </demixingParameters>""" % (demixParameters[0], demixParameters[1], demixParameters[2], demixParameters[3], demixParameters[4], demixParameters[5], writeBoolean(demixParameters[6])) ##TODO writeBoolean() Might be reduntant? Should do the conversion earlier - ##TODO % {"averagingFreqStep":, "averagingTimeStep":, "demixFreqStep":, "demixTimeStep":, writeBoolean("demixAlways":), writeBoolean("demixIfNeeded":), writeBoolean("ignoreTarget":)} - -def writeXMLTargetPipeline(ofile, topo, pred_topo, name, descr, defaulttemplate, flagging, - duration, demixParameters, bbsParameters, uvintopo, uvinname, - instrintopo, instrinname, uvoutname, uvouttopo, storageCluster, status, nr_tasks, nr_cores_per_task): - stor_cluster = dataProductCluster(storageCluster) - proc_cluster = processingCluster(storageCluster, nr_tasks, nr_cores_per_task) - print >> ofile, r"""<item index="0"> + </demixingParameters>""" % ( + demixParameters[0], demixParameters[1], demixParameters[2], demixParameters[3], demixParameters[4], + demixParameters[5], + writeBoolean(demixParameters[6])) ##TODO writeBoolean() Might be reduntant? Should do the conversion earlier + ##TODO % {"averagingFreqStep":, "averagingTimeStep":, "demixFreqStep":, "demixTimeStep":, writeBoolean("demixAlways":), writeBoolean("demixIfNeeded":), writeBoolean("ignoreTarget":)} + + +def writeXMLTargetPipeline(ofile, topo, pred_topo, name, descr, defaulttemplate, flagging, duration, demixParameters, + bbsParameters, uvintopo, uvinname, instrintopo, instrinname, uvoutname, uvouttopo, + storageCluster, status, nr_tasks, nr_cores_per_task, miscParameters): + stor_cluster = dataProductCluster(storageCluster) + proc_cluster = processingCluster(storageCluster, nr_tasks, nr_cores_per_task) + print >> ofile, r"""<item index="0"> <lofar:pipeline xsi:type="lofar:CalibrationPipelineType"> <topology>%s</topology> <predecessor_topology>%s</predecessor_topology> @@ -463,16 +516,17 @@ def writeXMLTargetPipeline(ofile, topo, pred_topo, name, descr, defaulttemplate, <currentStatus> <mom2:%sStatus/> </currentStatus>""" % (topo, pred_topo, name, name, descr, status) - if proc_cluster: - print >> ofile, proc_cluster - print >> ofile, r""" <pipelineAttributes> + if proc_cluster: + print >> ofile, proc_cluster + print >> ofile, r""" <pipelineAttributes> <defaultTemplate>%s</defaultTemplate> <flaggingStrategy>%s</flaggingStrategy> <duration>%s</duration>""" % (defaulttemplate, flagging, duration) - writeDemixParameters(ofile, demixParameters) - ##TODO if bbsParameters: ?? - writeBBSParameters(ofile, bbsParameters) - print >> ofile, r"""</pipelineAttributes> + writeDemixParameters(ofile, demixParameters) + ##TODO if bbsParameters: ?? + writeBBSParameters(ofile, bbsParameters) + writeMiscParameters(ofile, miscParameters) + print >> ofile, r"""</pipelineAttributes> <usedDataProducts> <item> <lofar:uvDataProduct topology="%s"> @@ -493,16 +547,18 @@ def writeXMLTargetPipeline(ofile, topo, pred_topo, name, descr, defaulttemplate, <status>no_data</status> %s </lofar:uvDataProduct> - </item> - </resultDataProducts> + </item> + </resultDataProducts> </lofar:pipeline> - </item>""" % (uvintopo, uvinname, instrintopo, instrinname, uvoutname, uvouttopo, stor_cluster) + </item>""" % (uvintopo, uvinname, instrintopo, instrinname, uvoutname, uvouttopo, stor_cluster) + -def writeXMLCalPipe(ofile, topo, pred_topo, name, descr, defaulttemplate, flagging, duration, skymodel, demixParameters, - bbsParameters, uvintopo, instroutname, instrouttopo, uvouttopo, storageCluster, status, nr_tasks, nr_cores_per_task): - stor_cluster = dataProductCluster(storageCluster) - proc_cluster = processingCluster(storageCluster, nr_tasks, nr_cores_per_task) - print >> ofile, r""" <item index="0"> +def writeXMLCalPipe(ofile, topo, pred_topo, name, descr, defaulttemplate, flagging, duration, skymodel, demixParameters, + bbsParameters, uvintopo, instroutname, instrouttopo, uvouttopo, storageCluster, status, nr_tasks, + nr_cores_per_task, miscParameters): + stor_cluster = dataProductCluster(storageCluster) + proc_cluster = processingCluster(storageCluster, nr_tasks, nr_cores_per_task) + print >> ofile, r""" <item index="0"> <lofar:pipeline xsi:type="lofar:CalibrationPipelineType"> <topology>%s</topology> <predecessor_topology>%s</predecessor_topology> @@ -510,18 +566,19 @@ def writeXMLCalPipe(ofile, topo, pred_topo, name, descr, defaulttemplate, flaggi <description>%s (%s)</description> <currentStatus> <mom2:%sStatus/> - </currentStatus>""" %(topo, pred_topo, name, name, descr, status) - if proc_cluster: - print >> ofile, proc_cluster - print >> ofile, r""" <pipelineAttributes> + </currentStatus>""" % (topo, pred_topo, name, name, descr, status) + if proc_cluster: + print >> ofile, proc_cluster + print >> ofile, r""" <pipelineAttributes> <defaultTemplate>%s</defaultTemplate> <flaggingStrategy>%s</flaggingStrategy> <duration>%s</duration> <skyModelDatabase>%s</skyModelDatabase>""" % (defaulttemplate, flagging, duration, skymodel) - writeDemixParameters(ofile, demixParameters) - ##TODO if bbsParameters: ?? - writeBBSParameters(ofile, bbsParameters) - print >> ofile, r"""</pipelineAttributes> + writeDemixParameters(ofile, demixParameters) + ##TODO if bbsParameters: ?? + writeBBSParameters(ofile, bbsParameters) + writeMiscParameters(ofile, miscParameters) + print >> ofile, r"""</pipelineAttributes> <usedDataProducts> <item> <lofar:uvDataProduct topology="%s"> @@ -549,11 +606,13 @@ def writeXMLCalPipe(ofile, topo, pred_topo, name, descr, defaulttemplate, flaggi </lofar:pipeline> </item>""" % (uvintopo, instroutname, instrouttopo, stor_cluster, uvouttopo, uvouttopo, stor_cluster) -def writeXMLAvgPipeline(ofile, topo, pred_topo, name, descr, defaulttemplate, flagging, duration, - demixParameters, uvintopo, uvouttopo, storageCluster, status, nr_tasks, nr_cores_per_task): - stor_cluster = dataProductCluster(storageCluster) - proc_cluster = processingCluster(storageCluster, nr_tasks, nr_cores_per_task) - print >> ofile, r""" <item index="0"> + +def writeXMLAvgPipeline(ofile, topo, pred_topo, name, descr, defaulttemplate, flagging, duration, + demixParameters, uvintopo, uvouttopo, storageCluster, status, nr_tasks, nr_cores_per_task, + miscParameters): + stor_cluster = dataProductCluster(storageCluster) + proc_cluster = processingCluster(storageCluster, nr_tasks, nr_cores_per_task) + print >> ofile, r""" <item index="0"> <lofar:pipeline xsi:type="lofar:AveragingPipelineType"> <topology>%s</topology> <predecessor_topology>%s</predecessor_topology> @@ -562,14 +621,15 @@ def writeXMLAvgPipeline(ofile, topo, pred_topo, name, descr, defaulttemplate, fl <currentStatus> <mom2:%sStatus/> </currentStatus>""" % (topo, pred_topo, name, name, descr, status) - if proc_cluster: - print >> ofile, proc_cluster - print >> ofile, r""" <pipelineAttributes> + if proc_cluster: + print >> ofile, proc_cluster + print >> ofile, r""" <pipelineAttributes> <defaultTemplate>%s</defaultTemplate> <flaggingStrategy>%s</flaggingStrategy> <duration>%s</duration>""" % (defaulttemplate, flagging, duration) - writeDemixParameters(ofile, demixParameters) - print >> ofile, r"""</pipelineAttributes> + writeDemixParameters(ofile, demixParameters) + writeMiscParameters(ofile, miscParameters) + print >> ofile, r"""</pipelineAttributes> <usedDataProducts> <item> <lofar:uvDataProduct topology="%s"> @@ -588,15 +648,17 @@ def writeXMLAvgPipeline(ofile, topo, pred_topo, name, descr, defaulttemplate, fl </resultDataProducts> </lofar:pipeline> </item>""" % (uvintopo, uvouttopo, uvouttopo, stor_cluster) - + + def writeXMLPulsarPipe(ofile, topo, pred_topo, name, descr, defaulttemplate, duration, bfintopo, pouttopo, - storageCluster, status, nr_tasks, nr_cores_per_task, _2bf2fitsExtraOpts, _8bitConversionSigma, - decodeNblocks, decodeSigma, digifilExtraOpts, dspsrExtraOpts, dynamicSpectrumTimeAverage, nofold, nopdmp, norfi, - prepdataExtraOpts, prepfoldExtraOpts, prepsubbandExtraOpts, pulsar, rawTo8bit, rfifindExtraOpts, rrats, singlePulse, - skipDsps, skipDynamicSpectrum, skipPrepfold, tsubint): - stor_cluster = dataProductCluster(storageCluster) - proc_cluster = processingCluster(storageCluster, nr_tasks, nr_cores_per_task) - print >> ofile, r""" <item index="0"> + storageCluster, status, nr_tasks, nr_cores_per_task, _2bf2fitsExtraOpts, _8bitConversionSigma, + decodeNblocks, decodeSigma, digifilExtraOpts, dspsrExtraOpts, dynamicSpectrumTimeAverage, nofold, + nopdmp, norfi, prepdataExtraOpts, prepfoldExtraOpts, prepsubbandExtraOpts, pulsar, rawTo8bit, + rfifindExtraOpts, rrats, singlePulse, skipDspsr, skipDynamicSpectrum, skipPrepfold, tsubint, + miscParameters): + stor_cluster = dataProductCluster(storageCluster) + proc_cluster = processingCluster(storageCluster, nr_tasks, nr_cores_per_task) + print >> ofile, r""" <item index="0"> <lofar:pipeline xsi:type="lofar:PulsarPipelineType"> <topology>%s</topology> <predecessor_topology>%s</predecessor_topology> @@ -605,9 +667,9 @@ def writeXMLPulsarPipe(ofile, topo, pred_topo, name, descr, defaulttemplate, dur <currentStatus> <mom2:%sStatus/> </currentStatus>""" % (topo, pred_topo, name, name, descr, status) - if proc_cluster: - print >> ofile, proc_cluster - print >> ofile, r""" <pipelineAttributes> + if proc_cluster: + print >> ofile, proc_cluster + print >> ofile, r""" <pipelineAttributes> <defaultTemplate>%s</defaultTemplate> <duration>%s</duration> <_2bf2fitsExtraOpts>%s</_2bf2fitsExtraOpts> @@ -628,10 +690,18 @@ def writeXMLPulsarPipe(ofile, topo, pred_topo, name, descr, defaulttemplate, dur <rfifindExtraOpts>%s</rfifindExtraOpts> <rrats>%s</rrats> <singlePulse>%s</singlePulse> - <skipDsps>%s</skipDsps> + <skipDspsr>%s</skipDspsr> <skipDynamicSpectrum>%s</skipDynamicSpectrum> <skipPrepfold>%s</skipPrepfold> - <tsubint>%s</tsubint> + <tsubint>%s</tsubint>""" % (defaulttemplate, duration, _2bf2fitsExtraOpts, _8bitConversionSigma, + decodeNblocks, decodeSigma, digifilExtraOpts, dspsrExtraOpts, + dynamicSpectrumTimeAverage, writeBoolean(nofold), writeBoolean(nopdmp), + writeBoolean(norfi), prepdataExtraOpts, prepfoldExtraOpts, + prepsubbandExtraOpts, pulsar, writeBoolean(rawTo8bit), rfifindExtraOpts, + writeBoolean(rrats), writeBoolean(singlePulse), writeBoolean(skipDspsr), + writeBoolean(skipDynamicSpectrum), writeBoolean(skipPrepfold), tsubint) + writeMiscParameters(ofile, miscParameters) + print >> ofile, r""" </pipelineAttributes> <usedDataProducts> <item> @@ -650,20 +720,16 @@ def writeXMLPulsarPipe(ofile, topo, pred_topo, name, descr, defaulttemplate, dur </item> </resultDataProducts> </lofar:pipeline> - </item>""" % (defaulttemplate, duration, _2bf2fitsExtraOpts, _8bitConversionSigma, - decodeNblocks, decodeSigma, digifilExtraOpts, dspsrExtraOpts, dynamicSpectrumTimeAverage, - writeBoolean(nofold), writeBoolean(nopdmp), writeBoolean(norfi), - prepdataExtraOpts, prepfoldExtraOpts, prepsubbandExtraOpts, pulsar, writeBoolean(rawTo8bit), - rfifindExtraOpts, writeBoolean(rrats), writeBoolean(singlePulse), - writeBoolean(skipDsps), writeBoolean(skipDynamicSpectrum), writeBoolean(skipPrepfold), tsubint, - bfintopo, pouttopo, pouttopo, stor_cluster) - -#nv 13okt2014: #6716 - Implement Long Baseline Pipeline + </item>""" % (bfintopo, pouttopo, pouttopo, stor_cluster) + + +# nv 13okt2014: #6716 - Implement Long Baseline Pipeline def writeXMLLongBaselinePipe(ofile, topo, pred_topo, name, descr, defaulttemplate, duration, - subbands_per_subbandgroup, subbandgroups_per_ms, uvintopo, uvouttopo, storageCluster, status, nr_tasks, nr_cores_per_task): - stor_cluster = dataProductCluster(storageCluster) - proc_cluster = processingCluster(storageCluster, nr_tasks, nr_cores_per_task) - print >> ofile, r""" <item index="0"> + subbands_per_subbandgroup, subbandgroups_per_ms, uvintopo, uvouttopo, storageCluster, + status, nr_tasks, nr_cores_per_task): + stor_cluster = dataProductCluster(storageCluster) + proc_cluster = processingCluster(storageCluster, nr_tasks, nr_cores_per_task) + print >> ofile, r""" <item index="0"> <lofar:pipeline xsi:type="lofar:LongBaselinePipelineType"> <topology>%s</topology> <predecessor_topology>%s</predecessor_topology> @@ -672,9 +738,9 @@ def writeXMLLongBaselinePipe(ofile, topo, pred_topo, name, descr, defaulttemplat <currentStatus> <mom2:%sStatus/> </currentStatus>""" % (topo, pred_topo, name, name, descr, status) - if proc_cluster: - print >> ofile, proc_cluster - print >> ofile, r""" <pipelineAttributes> + if proc_cluster: + print >> ofile, proc_cluster + print >> ofile, r""" <pipelineAttributes> <defaultTemplate>%s</defaultTemplate> <duration>%s</duration> <subbandsPerSubbandGroup>%s</subbandsPerSubbandGroup> @@ -697,14 +763,15 @@ def writeXMLLongBaselinePipe(ofile, topo, pred_topo, name, descr, defaulttemplat </item> </resultDataProducts> </lofar:pipeline> - </item>""" % (defaulttemplate, duration, subbands_per_subbandgroup, subbandgroups_per_ms, - uvintopo, uvouttopo, uvouttopo, stor_cluster) + </item>""" % (defaulttemplate, duration, subbands_per_subbandgroup, subbandgroups_per_ms, + uvintopo, uvouttopo, uvouttopo, stor_cluster) + def writeDataProducts(dataTopo, correlatedData, coherentStokesData, incoherentStokesData, storageCluster): - strVal = r"" - if correlatedData: - dataTopoStr = dataTopo + '.uv.dps' - strVal += r""" <item> + strVal = r"" + if correlatedData: + dataTopoStr = dataTopo + '.uv.dps' + strVal += r""" <item> <lofar:uvDataProduct> <name>%s</name> <topology>%s</topology> @@ -713,14 +780,14 @@ def writeDataProducts(dataTopo, correlatedData, coherentStokesData, incoherentSt </lofar:uvDataProduct> </item> """ % (dataTopoStr, dataTopoStr, dataProductCluster(storageCluster)) - if coherentStokesData | incoherentStokesData: - if coherentStokesData & ~incoherentStokesData: - dataTopoStr = dataTopo + '.cs' - elif incoherentStokesData & ~coherentStokesData: - dataTopoStr = dataTopo + '.is' - else: - dataTopoStr = dataTopo + '.csis' - strVal += r""" <item> + if coherentStokesData | incoherentStokesData: + if coherentStokesData & ~incoherentStokesData: + dataTopoStr = dataTopo + '.cs' + elif incoherentStokesData & ~coherentStokesData: + dataTopoStr = dataTopo + '.is' + else: + dataTopoStr = dataTopo + '.csis' + strVal += r""" <item> <lofar:bfDataProduct> <name>%s</name> <topology>%s</topology> @@ -728,23 +795,25 @@ def writeDataProducts(dataTopo, correlatedData, coherentStokesData, incoherentSt %s </lofar:bfDataProduct> </item> - """ % (dataTopoStr, dataTopoStr, dataProductCluster(storageCluster)) - strVal = strVal.rstrip() # strip off the last newline - return strVal + """ % (dataTopoStr, dataTopoStr, dataProductCluster(storageCluster)) + strVal = strVal.rstrip() # strip off the last newline + return strVal + def writeImagingPipelineInputDataproducts(ofile, topologyList): - print >> ofile, r""" <usedDataProducts>""" - for topology in topologyList: - print >> ofile, r""" <item> + print >> ofile, r""" <usedDataProducts>""" + for topology in topologyList: + print >> ofile, r""" <item> <lofar:uvDataProduct topology="%s"> <name>%s</name> </lofar:uvDataProduct> </item>""" % (topology, topology) - print >> ofile, r""" </usedDataProducts>""" + print >> ofile, r""" </usedDataProducts>""" + def writeSkyImageOutputDataproduct(ofile, topology, storageCluster): - stor_cluster = dataProductCluster(storageCluster) - print >> ofile, r""" <resultDataProducts> + stor_cluster = dataProductCluster(storageCluster) + print >> ofile, r""" <resultDataProducts> <item> <lofar:skyImageDataProduct> <name>%s</name> @@ -757,41 +826,46 @@ def writeSkyImageOutputDataproduct(ofile, topology, storageCluster): </lofar:pipeline> </item>""" % (topology, topology, stor_cluster) + def writeFolderStart(ofile, folderNr, packageName, packageDescription, processing): - print >>ofile, r""" <item index="0"> + print >> ofile, r""" <item index="0"> <lofar:folder topology_parent="true"> <topology>%s</topology> <name>%s</name> <description>%s (%s)</description> <children>""" % (folderNr, packageName, packageDescription, processing) + def writeFolderEnd(ofile): - print >> ofile, r"""</children> + print >> ofile, r"""</children> </lofar:folder> </item>""" + def writeBoolean(booleanValue): - if booleanValue == '': - return '' - elif booleanValue: - return 'true' - else: - return 'false' - + if booleanValue == '': + return '' + elif booleanValue: + return 'true' + else: + return 'false' + + def toBool(strVal): - strVal = strVal.rstrip().lstrip() - if strVal.rstrip() in TRUE: - return True - elif strVal in FALSE: - return False - #TODO I think we want something else here? - elif strVal == '': - return '' - else: - raise GenException(strVal + " could not be represented as a boolean value") + strVal = strVal.rstrip().lstrip() + if strVal.rstrip() in TRUE: + return True + elif strVal in FALSE: + return False + # TODO I think we want something else here? + elif strVal == '': + return '' + else: + raise GenException(strVal + " could not be represented as a boolean value") + def writeProjectStart(ofile, version, projectName): - print >>ofile, r"""<?xml version="1.0" encoding="UTF-8"?> + print >> ofile, r"""<?xml version="1.0" encoding="UTF-8"?> <lofar:project xmlns:lofar="http://www.astron.nl/MoM2-Lofar" xmlns:mom2="http://www.astron.nl/MoM2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.astron.nl/MoM2-Lofar http://lofar.astron.nl:8080/mom3/schemas/LofarMoM2.xsd http://www.astron.nl/MoM2 http://lofar.astron.nl:8080/mom3/schemas/MoM2.xsd "> <version>%s</version> <template version="%s" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> @@ -800,25 +874,30 @@ def writeProjectStart(ofile, version, projectName): <name>%s</name> <children>""" % (version, version, version, projectName) + def writeProjectEnd(ofile): - print >> ofile, r""" </children> + print >> ofile, r""" </children> </lofar:project>""" + def writeMainFolderStart(ofile, mainFolderName, mainFolderDescription): - print >>ofile, r""" <item index="0"> + print >> ofile, r""" <item index="0"> <lofar:folder topology_parent="false"> <name>%s</name> <description>%s</description> <children>""" % (mainFolderName, mainFolderDescription) + def writeMainFolderEnd(ofile): - print >> ofile, r"""</children> + print >> ofile, r"""</children> </lofar:folder> </item>""" -def writeImagingPipelineXML(ofile, input_list, bbsParameters, storageCluster, status, nr_tasks, nr_cores_per_task): - proc_cluster = processingCluster(storageCluster, nr_tasks, nr_cores_per_task) - print >> ofile, r"""<item index="0"> + +def writeImagingPipelineXML(ofile, input_list, bbsParameters, storageCluster, status, nr_tasks, nr_cores_per_task, + miscParameters): + proc_cluster = processingCluster(storageCluster, nr_tasks, nr_cores_per_task) + print >> ofile, r"""<item index="0"> <lofar:pipeline xsi:type="lofar:%(imaging_pipe_type)s"> <topology>%(imaging_pipe_topology)s</topology> <predecessor_topology>%(imaging_pipe_predecessors_string)s</predecessor_topology> @@ -827,9 +906,9 @@ def writeImagingPipelineXML(ofile, input_list, bbsParameters, storageCluster, st <currentStatus> <mom2:%(initial_status)sStatus/> </currentStatus>""" % (input_list) - if proc_cluster: - print >> ofile, proc_cluster - print >> ofile, r""" <imagingPipelineAttributes> + if proc_cluster: + print >> ofile, proc_cluster + print >> ofile, r""" <imagingPipelineAttributes> <defaultTemplate>%(imaging_pipe_default_template)s</defaultTemplate> <duration>%(imaging_pipe_duration)s</duration> <nrOfOutputSkyImage>%(nrImages)s</nrOfOutputSkyImage> @@ -846,1486 +925,1949 @@ def writeImagingPipelineXML(ofile, input_list, bbsParameters, storageCluster, st <uvMax>%(uvMax)s</uvMax> <stokes>%(stokesToImage)s</stokes> </imagingParameters>""" % (input_list) - if bbsParameters: - writeBBSParameters(ofile, bbsParameters) - print >> ofile, r""" + if bbsParameters: + writeBBSParameters(ofile, bbsParameters) + writeMiscParameters(ofile, miscParameters) + print >> ofile, r""" </imagingPipelineAttributes>""" + def parseOptions(argv): - inputfile = '' - outputfile = '' - status = "opened" - - try: - opts, args = getopt.getopt(argv,"hi:o:a",["ifile=","ofile="]) - except getopt.GetoptError: - print 'xmlgen.py -i <inputfile> [-o <outputfile>] [-a]' - sys.exit(2) - - if len(opts) == 0: - print 'usage: xmlgen.py -i <inputfile> [-o <outputfile>] [-a]' - sys.exit(2) - - for opt, arg in opts: - if opt == '-h': - print 'usage: xmlgen.py -i <inputfile> [-o <outputfile.xml>] [-a]' - sys.exit() - elif opt in ("-i", "--ifile"): - inputfile = arg - elif opt in ("-o", "--ofile"): - outputfile = arg - elif opt in ("-a"): - status="approved" - - if (outputfile == inputfile): - raise GenException("Output file'" + outputfile + "' has the same name as inputfile") - if len(outputfile): - print "Writing output xml file: " + outputfile - else: - outputfile = splitext(inputfile)[0] + '.xml' - print "Output file not specified, writing output xml file:'" + outputfile + "'" - return (inputfile, outputfile, status) + inputfile = '' + outputfile = '' + status = "opened" + + try: + opts, args = getopt.getopt(argv, "hi:o:a", ["ifile=", "ofile="]) + except getopt.GetoptError: + print 'xmlgen.py -i <inputfile> [-o <outputfile>] [-a]' + sys.exit(2) + + if len(opts) == 0: + print 'usage: xmlgen.py -i <inputfile> [-o <outputfile>] [-a]' + sys.exit(2) + + for opt, arg in opts: + if opt == '-h': + print 'usage: xmlgen.py -i <inputfile> [-o <outputfile.xml>] [-a]' + sys.exit() + elif opt in ("-i", "--ifile"): + inputfile = arg + elif opt in ("-o", "--ofile"): + outputfile = arg + elif opt in ("-a"): + status = "approved" + + if (outputfile == inputfile): + raise GenException("Output file'" + outputfile + "' has the same name as inputfile") + if len(outputfile): + print "Writing output xml file: " + outputfile + else: + outputfile = splitext(inputfile)[0] + '.xml' + print "Output file not specified, writing output xml file:'" + outputfile + "'" + return (inputfile, outputfile, status) + def processInput(inputfile): - ifile = open(inputfile, 'r') - lines = ifile.readlines() - header = [] - blocks = [] - block = [] - block_count = 0 - for l in lines: - line = l.strip() - if line: ##skipping empty lines - if not line[0] == "#": #skipping comments - if "BLOCK" in line: - if block_count == 0: - header = block - else: - if len(block) > 1: #We have at least BLOCK - blocks.append(block) - else: - printWarning("BLOCK %i was found to be empty" % block_count) - block = [] - block_count += 1 - stripped_line = line.split('#')[0] - if stripped_line: #Not sure if this can happen? - block.append(stripped_line) - if len(block) > 1: #We have at least BLOCK - blocks.append(block) - else: - printWarning("BLOCK %i was found to be empty" % block_count) - ifile.close() - return (header, blocks) - + ifile = open(inputfile, 'r') + lines = ifile.readlines() + header = [] + blocks = [] + block = [] + block_count = 0 + for l in lines: + line = l.strip() + if line: ##skipping empty lines + if not line[0] == "#": # skipping comments + if "BLOCK" in line: + if block_count == 0: + header = block + else: + if len(block) > 1: # We have at least BLOCK + blocks.append(block) + else: + printWarning("BLOCK %i was found to be empty" % block_count) + block = [] + block_count += 1 + stripped_line = line.split('#')[0] + if stripped_line: # Not sure if this can happen? + block.append(stripped_line) + if len(block) > 1: # We have at least BLOCK + blocks.append(block) + else: + printWarning("BLOCK %i was found to be empty" % block_count) + ifile.close() + return (header, blocks) + + def wrongCombiError(): - #TODO check if this list matches the actual code, replace it with a print of the define? - raise GenException("the combination of antennaMode, clock and instrumentFilter is not a valid combination, should be one of:\n \ + # TODO check if this list matches the actual code, replace it with a print of the define? + raise GenException("the combination of antennaMode, clock and instrumentFilter is not a valid combination, should be one of:\n \ LBA - 160 MHz > '10-70 MHz', '30-70 MHz'\n \ LBA - 200 MHz > '10-90 MHz', '30-90 MHz'\n \ HBA - 160 MHz > '170-230 MHz'\n \ HBA - 200 MHz > '110-190 MHz', '210-250 MHz'") + def readProcessing(value): - if value: - processing = value - try: - p = PROCESSING.index(processing)+1 - except ValueError: - raise GenException("the specified processing '" + processing + "' is not recognized. It should be one of %s" % ", ".join(PROCESSING)) - print "processing = %s" % processing - else: - processing = '' - return processing + if value: + processing = value + try: + p = PROCESSING.index(processing) + 1 + except ValueError: + raise GenException( + "the specified processing '" + processing + "' is not recognized. It should be one of %s" % ", ".join( + PROCESSING)) + print "processing = %s" % processing + else: + processing = '' + return processing + def readKeyValuePair(line): - if not '=' in line: #TODO print line/linenumber - raise GenException("'=' not found in line that should have one!") - split = line.split('=') - key = split[0].strip() - if not key: #TODO print line/linenumber - raise GenException("Found a line starting with '='!") - if len(split) < 2: - value = '' - else: - value = split[1].strip() - if len(split) > 2: - raise GenException("Found a line with multiple '='s") - return key, value + if not '=' in line: # TODO print line/linenumber + raise GenException("'=' not found in line that should have one!") + split = line.split('=') + key = split[0].strip() + if not key: # TODO print line/linenumber + raise GenException("Found a line starting with '='!") + if len(split) < 2: + value = '' + else: + value = split[1].strip() + if len(split) > 2: + raise GenException("Found a line with multiple '='s") + return key, value + def readBoolKey(keyname, value): - if value: - key = toBool(value) - print "%s = %s" % (keyname, value) - else: - raise GenException("the %s has not been specified" % keyname) - return key + if value: + key = toBool(value) + print "%s = %s" % (keyname, value) + else: + raise GenException("the %s has not been specified" % keyname) + return key + def readStringKey(keyname, value): - if value: - key = value - print "%s = %s" % (keyname, value) - else: - raise GenException("the %s has not been specified" % keyname) - return key + if value: + key = value + print "%s = %s" % (keyname, value) + else: + raise GenException("the %s has not been specified" % keyname) + return key + def readIntKey(keyname, value): - if value: - key = int(value) #TODO try: ? - print "%s = %s" % (keyname, key) - else: - raise GenException("the %s has not been specified" % keyname) - return key + if value: + key = int(value) # TODO try: ? + print "%s = %s" % (keyname, key) + else: + raise GenException("the %s has not been specified" % keyname) + return key + def readFloatKey(keyname, value): - if value: - key = float(value) #TODO try: ? - print "%s = %s" % (keyname, key) - else: - raise GenException("the %s has not been specified" % keyname) - return key + if value: + key = float(value) # TODO try: ? + print "%s = %s" % (keyname, key) + else: + raise GenException("the %s has not been specified" % keyname) + return key + def readListKey(keyname, value): - if keyname == "whichIS": keylist = WHICH_IS - if keyname == "whichCS": keylist = WHICH_CS - if keyname == "imagingPipeline": keylist = IMAGING_PIPELINE_TYPES - if keyname == "clock": keylist = CLOCK_MODES - if keyname == "instrumentFilter": keylist = INSTRUMENT_FILTERS - if keyname == "antennaMode": keylist = ANTENNA_MODES - if keyname == "weightingScheme": keylist = WEIGHTING_SCHEMES - if keyname == "calibration": keylist = CALIBRATION_MODE - if value: - key = value - if key not in keylist: - raise GenException("the %s parameter '%s' not correct. Should be one of %s" % (keyname, value, ", ".join(keylist))) - print "%s = %s" % (keyname, key) - else: #TODO added this as it seemed to make sense? - raise GenException("the %s has not been specified" % keyname) - return key + if keyname == "whichIS": keylist = WHICH_IS + if keyname == "whichCS": keylist = WHICH_CS + if keyname == "imagingPipeline": keylist = IMAGING_PIPELINE_TYPES + if keyname == "clock": keylist = CLOCK_MODES + if keyname == "instrumentFilter": keylist = INSTRUMENT_FILTERS + if keyname == "antennaMode": keylist = ANTENNA_MODES + if keyname == "weightingScheme": keylist = WEIGHTING_SCHEMES + if keyname == "calibration": keylist = CALIBRATION_MODE + try: + keylist # check if defined + except NameError: + raise GenException("no keylist found for keyname %s." % (keyname)) + if value: + key = value + if key not in keylist: + raise GenException( + "the %s parameter '%s' not correct. Should be one of %s" % (keyname, value, ", ".join(keylist))) + print "%s = %s" % (keyname, key) + else: # TODO added this as it seemed to make sense? + raise GenException("the %s has not been specified" % keyname) + return key + def readIntListKey(keyname, value): - if keyname == "numberOfBitsPerSample": keylist = NUMBER_OF_BITS_PER_SAMPLE - if value: - key = int(value) #TODO try? - if key not in keylist: - raise GenException("the %s parameter '%s' not correct. Should be one of %s" % (keyname, value, str(keylist))) - print "%s = %s" % (keyname, key) - else: #TODO added this as it seemed to make sense? - raise GenException("the %s has not been specified" % keyname) - return key + if keyname == "numberOfBitsPerSample": keylist = NUMBER_OF_BITS_PER_SAMPLE + try: + keylist # check if defined + except NameError: + raise GenException("no keylist found for keyname %s." % (keyname)) + if value: + key = int(value) # TODO try? + if key not in keylist: + raise GenException( + "the %s parameter '%s' not correct. Should be one of %s" % (keyname, value, str(keylist))) + print "%s = %s" % (keyname, key) + else: # TODO added this as it seemed to make sense? + raise GenException("the %s has not been specified" % keyname) + return key + def processHeader(header): - for line in header: - key, value = readKeyValuePair(line) - if key == "projectName": - projectName = readStringKey("projectName", value) - elif key == "mainFolderName": - mainFolderName = readOptionalStringKey("mainFolderName", value) - elif key == "mainFolderDescription": - mainFolderDescription = readOptionalStringKey("mainFolderDescription", value) - return projectName, mainFolderName, mainFolderDescription + for line in header: + key, value = readKeyValuePair(line) + if key == "projectName": + projectName = readStringKey("projectName", value) + elif key == "mainFolderName": + mainFolderName = readOptionalStringKey("mainFolderName", value) + elif key == "mainFolderDescription": + mainFolderDescription = readOptionalStringKey("mainFolderDescription", value) + try: + # check if everything was defined + projectName + mainFolderName + mainFolderDescription + except NameError as ex: + raise GenException("Could not find all expected keys in header: %s" % ex.message) + + return projectName, mainFolderName, mainFolderDescription + def readOptionalStringKey(keyname, value): - if value: - key = value - print "%s = %s" % (keyname, value) - else: - printWarning("The %s has not been specified" % keyname) - key = "" #TODO put in some dummy description? - return key + if value: + key = value + print "%s = %s" % (keyname, value) + else: + printWarning("The %s has not been specified" % keyname) + key = "" # TODO put in some dummy description? + return key + def readPackageTag(value): - if value: - packageTag = value - if len(packageTag) > 8: - raise GenException("the package tag:'" + packageTag + "' is too long. Max 8 characters.") - print "package tag = %s" % packageTag - else: - packageTag = '' - print "no package tag will be used." - return packageTag + if value: + packageTag = value + if len(packageTag) > 8: + raise GenException("the package tag:'" + packageTag + "' is too long. Max 8 characters.") + print "package tag = %s" % packageTag + else: + packageTag = '' + print "no package tag will be used." + return packageTag + def readStartTimeUTC(value): - if value: - startTimeUTC = value - startTime = datetime.strptime(startTimeUTC, '%Y-%m-%d %H:%M:%S') - print "start time (UTC) = %s" % startTime.strftime('%b %d %Y %H:%M:%S') - set_starttime = True - return startTime, set_starttime + if value: + startTimeUTC = value + startTime = datetime.strptime(startTimeUTC, '%Y-%m-%d %H:%M:%S') + print "start time (UTC) = %s" % startTime.strftime('%b %d %Y %H:%M:%S') + set_starttime = True + + return startTime, set_starttime + def readTimeStep(number, value): - if value: - timeStep = int(value) - print "time step%i = %s seconds" % (number, timeStep) - else: - timeStep = '' - return timeStep + if value: + timeStep = int(value) + print "time step%i = %s seconds" % (number, timeStep) + else: + timeStep = '' + return timeStep + def readStationList(value): - if value: - stationList = ','.join(sorted(set(value.replace('core',CORE_STATIONS).replace('superterp',SUPERTERP_STATIONS).replace('remote',REMOTE_STATIONS).replace('international',INTERNATIONAL_STATIONS).replace('all',ALL_STATIONS).replace('NL',NL_STATIONS).replace('nl',NL_STATIONS).replace('dutch',NL_STATIONS).split(',')))) - print "stations = %s" % stationList - else: - raise GenException("the stationList has not been specified") - return stationList + if value: + stationList = ','.join(sorted(set( + value.replace( + 'core', CORE_STATIONS).replace( + 'superterp', SUPERTERP_STATIONS).replace( + 'remote', REMOTE_STATIONS).replace( + 'international', INTERNATIONAL_STATIONS).replace( + 'all', ALL_STATIONS).replace( + 'NL', NL_STATIONS).replace( + 'nl', NL_STATIONS).replace( + 'dutch', NL_STATIONS).split(',')))) + print "stations = %s" % stationList + else: + raise GenException("the stationList has not been specified") + return stationList + def readCreate_extra_ncp_beam(value): - if value: - create_extra_ncp_beam = toBool(value) #TODO toBool can return True, False or '' - if create_extra_ncp_beam: - print "extra ncp beam will be created" + if value: + create_extra_ncp_beam = toBool(value) # TODO toBool can return True, False or '' + if create_extra_ncp_beam: + print "extra ncp beam will be created" + else: + print "extra ncp beam will not be created" else: - print "extra ncp beam will not be created" - else: - raise GenException("create_extra_ncp_beam has not been specified") - return create_extra_ncp_beam + raise GenException("create_extra_ncp_beam has not been specified") + return create_extra_ncp_beam + def readGlobalBBS(value): - globalBBS = ['','','','true','','','',''] - if value: - valList = value.split(';') - for i in range(0,len(valList)): - globalBBS[i] = XMLescape(valList[i]) - globalBBS[3] = toBool(globalBBS[3]) - return globalBBS + globalBBS = ['', '', '', 'true', '', '', '', ''] + if value: + valList = value.split(';') + for i in range(0, len(valList)): + globalBBS[i] = XMLescape(valList[i]) + globalBBS[3] = toBool(globalBBS[3]) + return globalBBS + def readImagingBBS(value): - imagingBBS = ['','','true','','','',''] - if value: - valList = value.split(';') - for i in range(0,len(valList)): - imagingBBS[i] = XMLescape(valList[i]) - imagingBBS[2] = toBool(imagingBBS[2]) - return imagingBBS + imagingBBS = ['', '', 'true', '', '', '', ''] + if value: + valList = value.split(';') + for i in range(0, len(valList)): + imagingBBS[i] = XMLescape(valList[i]) + imagingBBS[2] = toBool(imagingBBS[2]) + return imagingBBS + def checkDemixMultiples(avg_freq_step, avg_time_step, demix_freq_step, demix_time_step, name): - try: - if avg_freq_step and demix_freq_step: - if int(demix_freq_step) % int(avg_freq_step) <> 0: - raise GenException("demixFreqStep (%s) should be integer multiple of averagingFreqStep (%s) for %s" % (demix_freq_step, avg_freq_step, name)) - if avg_time_step and demix_time_step: - if int(demix_time_step) % int(avg_time_step) <> 0: - raise GenException("demixTimeStep (%s) should be integer multiple of averagingTimeStep (%s) for %s" % (demix_time_step, avg_time_step, name)) - except: - raise GenException("I can't read the Demix values for %s" % name) + try: + if avg_freq_step and demix_freq_step: + if int(demix_freq_step) % int(avg_freq_step) <> 0: + raise GenException("demixFreqStep (%s) should be integer multiple of averagingFreqStep (%s) for %s" % ( + demix_freq_step, avg_freq_step, name)) + if avg_time_step and demix_time_step: + if int(demix_time_step) % int(avg_time_step) <> 0: + raise GenException("demixTimeStep (%s) should be integer multiple of averagingTimeStep (%s) for %s" % ( + demix_time_step, avg_time_step, name)) + except: + raise GenException("I can't read the Demix values for %s" % name) + def readGlobalDemix(value): - globalDemix = ['','','','','','',''] - if value: - valList = value.split(';') - for i in range(0,len(valList)): - globalDemix[i] = valList[i] - checkDemixMultiples(globalDemix[0], globalDemix[1], globalDemix[2], globalDemix[3], "globalDemix") - globalDemix[6] = toBool(globalDemix[6]) # convert ignoreTarget to bool - return globalDemix + globalDemix = ['', '', '', '', '', '', ''] + if value: + valList = value.split(';') + for i in range(0, len(valList)): + globalDemix[i] = valList[i] + checkDemixMultiples(globalDemix[0], globalDemix[1], globalDemix[2], globalDemix[3], "globalDemix") + globalDemix[6] = toBool(globalDemix[6]) # convert ignoreTarget to bool + return globalDemix + def readGlobalPulsar(value): - globalPulsar = ['','','','','','','','','','','','','','','','','','','','','',''] - if value: - valList = re.sub(r"\s+;", ';', value) # clear white-space just before ';' - valList = re.sub(r";\s+", ';', valList).split(';') # clear white-space directly after ';' (this method intentionally does not clear white-space in the (string) parameters self!) - for i in range(0,len(valList)): - globalPulsar[i] = XMLescape(valList[i]) - globalPulsar[1] = toBool(globalPulsar[1]) # singlePulse - globalPulsar[2] = toBool(globalPulsar[2]) # rawTo8bit - globalPulsar[7] = toBool(globalPulsar[7]) # norfi - globalPulsar[8] = toBool(globalPulsar[8]) # nofold - globalPulsar[9] = toBool(globalPulsar[9]) # nopdmp - globalPulsar[10] = toBool(globalPulsar[10]) # skipDsps - globalPulsar[11] = toBool(globalPulsar[11]) # rrats - globalPulsar[19] = toBool(globalPulsar[19]) # skipDynamicSpectrum - globalPulsar[20] = toBool(globalPulsar[20]) # skipPrepfold - return globalPulsar + globalPulsar = ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''] + if value: + valList = re.sub(r"\s+;", ';', value) # clear white-space just before ';' + valList = re.sub(r";\s+", ';', valList).split( + ';') # clear white-space directly after ';' (this method intentionally does not clear white-space in the (string) parameters self!) + for i in range(0, len(valList)): + globalPulsar[i] = XMLescape(valList[i]) + globalPulsar[1] = toBool(globalPulsar[1]) # singlePulse + globalPulsar[2] = toBool(globalPulsar[2]) # rawTo8bit + globalPulsar[7] = toBool(globalPulsar[7]) # norfi + globalPulsar[8] = toBool(globalPulsar[8]) # nofold + globalPulsar[9] = toBool(globalPulsar[9]) # nopdmp + globalPulsar[10] = toBool(globalPulsar[10]) # skipDspsr + globalPulsar[11] = toBool(globalPulsar[11]) # rrats + globalPulsar[19] = toBool(globalPulsar[19]) # skipDynamicSpectrum + globalPulsar[20] = toBool(globalPulsar[20]) # skipPrepfold + return globalPulsar + def readGlobalSubbands(value): - if value: - globalSubbands = value.replace(' ','').split(';') - if (len(globalSubbands) == 2) and (globalSubbands[1].rstrip() != ''): - verifySubbandList("Global_Subbands", globalSubbands[0], globalSubbands[1]) + if value: + globalSubbands = value.replace(' ', '').split(';') + if (len(globalSubbands) == 2) and (globalSubbands[1].rstrip() != ''): + verifySubbandList("Global_Subbands", globalSubbands[0], globalSubbands[1]) + else: + raise GenException( + "Global_Subbands error: not enough parameters specified. Should be: subband list;nr_subbands") else: - raise GenException("Global_Subbands error: not enough parameters specified. Should be: subband list;nr_subbands") - else: - raise GenException("Global_Subbands specified incorrectly") - return globalSubbands + raise GenException("Global_Subbands specified incorrectly") + return globalSubbands + def readGlobalTABrings(value): - if value: - globalTABrings = value.split(';') - if (len(globalTABrings) == 2) and (globalTABrings[1].rstrip() != ''): - globalTABrings[0] = int(globalTABrings[0]) # nrTABrings - if globalTABrings[1].endswith('deg') or globalTABrings[1].endswith('d'): - globalTABrings[1] = deg2rad(globalTABrings[1].rstrip(' deg')) - else: - globalTABrings[1] = float(globalTABrings[1]) # TAB ring size - else: - globalTABrings = [] - return globalTABrings + if value: + globalTABrings = value.split(';') + if (len(globalTABrings) == 2) and (globalTABrings[1].rstrip() != ''): + globalTABrings[0] = int(globalTABrings[0]) # nrTABrings + if globalTABrings[1].endswith('deg') or globalTABrings[1].endswith('d'): + globalTABrings[1] = deg2rad(globalTABrings[1].rstrip(' deg')) + else: + globalTABrings[1] = float(globalTABrings[1]) # TAB ring size + else: + globalTABrings = [] + return globalTABrings + def findBeamSpecs(startLine, lines): - beams = [] - beamSpec = '' - for lineNr in range(startLine, len(lines)): - line = lines[lineNr] - if line[:1].isdigit(): #startswith a digit, new beam - if beamSpec: #save previous found one - beam = {'beam': beamSpec, 'pipelines': pipelines, "TABs":TABs} + beams = [] + beamSpec = '' + for lineNr in range(startLine, len(lines)): + line = lines[lineNr] + if line[:1].isdigit(): # startswith a digit, new beam + if beamSpec: # save previous found one + beam = {'beam': beamSpec, 'pipelines': pipelines, "TABs": TABs} + beams.append(beam) + beamSpec = line + pipelines = [] + TABs = [] + if line.startswith(('BBS', 'Demix', 'Pulsar')): # Can contain '=' + pipelines.append(line) # FIXME: pipelines might be undefined + elif '=' in line: # key=value pair, so end of beam spec + break + if line.startswith(('TAB', 'c;', 'i;')): + TABs.append(line) # FIXME: TABs might be undefined + if beamSpec: # save last found one + beam = {'beam': beamSpec, 'pipelines': pipelines, "TABs": TABs} beams.append(beam) - beamSpec = line - pipelines = [] - TABs = [] - if line.startswith(('BBS','Demix','Pulsar')): #Can contain '=' - pipelines.append(line) - elif '=' in line: #key=value pair, so end of beam spec - break; - if line.startswith(('TAB','c;','i;')): - TABs.append(line) - if beamSpec: # save last found one - beam = {'beam': beamSpec, 'pipelines': pipelines, "TABs":TABs} - beams.append(beam) - return beams - -def readCalibratorBeam(startLine, lines, globalSubbands, globalTABrings, globalBBS, globalDemix, globalTAB, coherentStokesData, flysEye): - printInfo("found a calibrator beam") - beamspecs = findBeamSpecs(startLine, lines) - if len(beamspecs) < 1: - raise GenException("the calibration beam is not specified") - #TODO currently only one Calibrator Beam? - beam = beamspecs[0]["beam"] - pipelines = beamspecs[0]["pipelines"] - TABs = beamspecs[0]["TABs"] - - nr_parms = beam.count(';') + 1 - if nr_parms > 9: - raise GenException("too many parameters for calibrator beam: " + beam) - elif nr_parms < 8: - raise GenException("too few parameters for calibrator beam: " + beam) - else: - calibratorBeam = beam.replace(' ;',';').replace('; ',';').split(';') - if nr_parms == 9: - try: - calibratorBeam[8] = int(calibratorBeam[8]) # the (optionally specified) duration of the pipeline - except: - raise GenException("the specified pipeline duration: " + calibratorBeam[8] + " needs to be an integer value in seconds") - else: - calibratorBeam.append(0) - - # convert coordinated HMS to degrees - # Right Ascension - calibratorBeam[0] = convertAngle1(calibratorBeam[0], "calibratorBeam") - # declination - calibratorBeam[1] = convertAngle2(calibratorBeam[1], "calibratorBeam") - - if not calibratorBeam[3]: - if globalSubbands: - calibratorBeam[3] = globalSubbands[0] - calibratorBeam[4] = globalSubbands[1] - else: - raise GenException("No subbands specified for the calibrator beam") - else: - verifySubbandList("calibratorBeam", calibratorBeam[3], calibratorBeam[4]) - - if not calibratorBeam[5]: # TABrings specified? - if globalTABrings: - calibratorBeam[5] = globalTABrings[0] - calibratorBeam[6] = globalTABrings[1] - else: - calibratorBeam[5] = int(calibratorBeam[5]) # nrTABrings - if calibratorBeam[6].endswith('deg') or calibratorBeam[6].endswith('d'): - calibratorBeam[6] = deg2rad(calibratorBeam[6].rstrip(' deg')) - else: - calibratorBeam[6] = float(calibratorBeam[6]) # TAB ring size - - calibratorBeam[7] = toBool(calibratorBeam[7]) # create pipeline? - create_calibrator_pipeline = calibratorBeam[7] - print ("right ascenscion:" + str(calibratorBeam[0]) + " declination:" + str(calibratorBeam[1]) + " target:" + calibratorBeam[2] + " subbands:" + calibratorBeam[3] + " nrSubbands:" + calibratorBeam[4] + " create pipeline:" + str(calibratorBeam[7])) - - if create_calibrator_pipeline: - BBSDefault = ['','','','true','','','',''] - DemixDefault = ['','','','','','',''] - calibratorBBS = [] #Can now be a list of pipelines per beam - calibratorDemix = [] - for pipeline in pipelines: - if pipeline.startswith("BBS"): - calibratorBBS.append(BBSDefault[:]) # [:] is needed to make a deep copy - calBBS = readExtraParms("BBS", [pipeline]) - if len(calBBS) > 0: - for i in range(0,len(calBBS)): - calibratorBBS[-1][i] = calBBS[i] - calibratorBBS[-1][3] = toBool(calibratorBBS[-1][3]) - elif globalBBS != []: - printInfo('Using global BBS settings for Calibrator beam pipeline') - for i in range(0,len(globalBBS)): - calibratorBBS[-1][i] = globalBBS[i] - - if pipeline.startswith("Demix"): - calibratorDemix.append(DemixDefault[:]) # [:] is needed to make a deep copy - calDemix = readExtraParms("Demix", [pipeline]) - if len(calDemix) > 0: - for i in range(0,len(calDemix)): - calibratorDemix[-1][i] = calDemix[i] - checkDemixMultiples(calibratorDemix[-1][0], calibratorDemix[-1][1], calibratorDemix[-1][2], calibratorDemix[-1][3], "calibratorDemix") - calibratorDemix[-1][6] = toBool(calibratorDemix[-1][6]) - elif globalDemix != []: - printInfo('Using global demix settings for Calibrator beam pipeline') - for i in range(0,len(globalDemix)): - calibratorDemix[-1][i] = globalDemix[i] - - calibratorTAB = readTiedArrayBeams(TABs) - if not calibratorTAB: - if globalTAB: - printInfo('Using global TABs for calibrator beam') - calibratorTAB = globalTAB #TODO check no possibility for globalTABrings? - if coherentStokesData and not (hasCoherentTab(calibratorTAB) or flysEye): - raise GenException("CalibratorBeam: no coherent TAB specified while coherent Stokes data requested") - - if not calibratorBBS: - calibratorBBS.append(BBSDefault) - if globalBBS: - printInfo('Using global BBS settings for pipeline(s) coupled to Calibrator beam') - for i in range(0,len(globalBBS)): - calibratorBBS[-1][i] = globalBBS[i] - - if not calibratorDemix: - calibratorDemix.append(DemixDefault) - if globalDemix: - printInfo('Using global demix settings for pipeline(s) coupled to Calibrator beam') - for i in range(0,len(globalDemix)): - calibratorDemix[-1][i] = globalDemix[i] - - return calibratorBeam, calibratorBBS, calibratorDemix, calibratorTAB, create_calibrator_pipeline - -def readTargetBeams(startLine, lines, globalSubbands, globalBBS, globalDemix, globalPulsar, globalTAB, globalTABrings, coherentStokesData, flysEye, numberOfBitsPerSample): - printInfo('found the target beams') - beamspecs = findBeamSpecs(startLine, lines) - if len(beamspecs) < 1: - raise GenException("the target beams are not specified") - targetBeams = [] - targetTAB = [] - targetBBS = [] - targetDemix = [] - targetPulsar = [] - nr_beams = 0 - - for beamspec in beamspecs: - beam = beamspec["beam"] - pipelines = beamspec["pipelines"] - TABs = beamspec["TABs"] + return beams + + +def readCalibratorBeam(startLine, lines, globalSubbands, globalTABrings, globalBBS, globalDemix, globalTAB, + coherentStokesData, flysEye): + printInfo("found a calibrator beam") + beamspecs = findBeamSpecs(startLine, lines) + if len(beamspecs) < 1: + raise GenException("the calibration beam is not specified") + # TODO currently only one Calibrator Beam? + beam = beamspecs[0]["beam"] + pipelines = beamspecs[0]["pipelines"] + TABs = beamspecs[0]["TABs"] + nr_parms = beam.count(';') + 1 if nr_parms > 9: - raise GenException("too many parameters for target beam: " + beam) + raise GenException("too many parameters for calibrator beam: " + beam) elif nr_parms < 8: - raise GenException("too few parameters for target beam: " + beam) + raise GenException("too few parameters for calibrator beam: " + beam) else: - targetBeams.append(beam.replace(' ;',';').replace('; ',';').split(';')) - - if nr_parms == 9: - try: - targetBeams[nr_beams][8] = int(targetBeams[nr_beams][8]) # the (optionally specified) duration of the pipeline - except: - raise GenException("the specified pipeline duration: " + targetBeams[nr_beams][8] + " needs to be an integer value in seconds") - else: - targetBeams[nr_beams].append(0) - - # convert coordinated HMS to degrees - # right ascension - targetBeams[nr_beams][0] = convertAngle1(targetBeams[nr_beams][0], "targetBeam:" + str(nr_beams)) - # declination - targetBeams[nr_beams][1] = convertAngle2(targetBeams[nr_beams][1], "targetBeam:" + str(nr_beams)) - - if not targetBeams[nr_beams][3]: - if globalSubbands: - targetBeams[nr_beams][3] = globalSubbands[0] - targetBeams[nr_beams][4] = globalSubbands[1] - printInfo('Using Global_Subband settings for target beam: %i' % nr_beams) + calibratorBeam = beam.replace(' ;', ';').replace('; ', ';').split(';') + if nr_parms == 9: + try: + calibratorBeam[8] = int(calibratorBeam[8]) # the (optionally specified) duration of the pipeline + except: + raise GenException("the specified pipeline duration: " + calibratorBeam[8] + + " needs to be an integer value in seconds") + else: + calibratorBeam.append(0) + + # convert coordinated HMS to degrees + # Right Ascension + calibratorBeam[0] = convertAngle1(calibratorBeam[0], "calibratorBeam") + # declination + calibratorBeam[1] = convertAngle2(calibratorBeam[1], "calibratorBeam") + + if not calibratorBeam[3]: + if globalSubbands: + calibratorBeam[3] = globalSubbands[0] + calibratorBeam[4] = globalSubbands[1] + else: + raise GenException("No subbands specified for the calibrator beam") else: - raise GenException("No subbands specified for the calibrator beam") - else: - verifySubbandList("targetBeam %i" % (nr_beams+1), targetBeams[nr_beams][3], targetBeams[nr_beams][4]) - - if not targetBeams[nr_beams][5]: # TABrings specified? - if globalTABrings: - targetBeams[nr_beams][5] = globalTABrings[0] - targetBeams[nr_beams][6] = globalTABrings[1] - printInfo('Using Global_TABrings settings for target beam: %i' % nr_beams) + verifySubbandList("calibratorBeam", calibratorBeam[3], calibratorBeam[4]) + + if not calibratorBeam[5]: # TABrings specified? + if globalTABrings: + calibratorBeam[5] = globalTABrings[0] + calibratorBeam[6] = globalTABrings[1] else: - targetBeams[nr_beams][5] = 0 - else: - targetBeams[nr_beams][5] = int(targetBeams[nr_beams][5]) - if targetBeams[nr_beams][5] > 0: - if targetBeams[nr_beams][6].endswith('deg') or targetBeams[nr_beams][6].endswith('d'): - targetBeams[nr_beams][6] = deg2rad(targetBeams[nr_beams][6].rstrip(' deg')) - else: #TODO try? - targetBeams[nr_beams][6] = float(targetBeams[nr_beams][6]) # TAB ring size - - targetBeams[nr_beams][7] = toBool(targetBeams[nr_beams][7]) # create pipeline coupled to target beam? - print ("right ascenscion:" + str(targetBeams[nr_beams][0]) + " declination:" + str(targetBeams[nr_beams][1]) + " target:" + targetBeams[nr_beams][2] + " subbands:" + targetBeams[nr_beams][3] + " nrSubbands:" + targetBeams[nr_beams][4] - + " create pipeline:" + str(targetBeams[nr_beams][7])) - - BBSDefault = ['','','','true','','','',''] - DemixDefault = ['','','','','','',''] - PulsarDefault = ['','','','','','','','','','','','','','','','','','','','','',''] - targetBBS.append([]) #Can now be a list of pipelines per beam - targetDemix.append([]) - targetPulsar.append([]) - if targetBeams[nr_beams][7]: # pipeline created? - for pipeline in pipelines: - if pipeline.startswith("BBS"): - targetBBS[nr_beams].append(BBSDefault[:]) # [:] is needed to make a deep copy - tarBBS = readExtraParms("BBS", [pipeline]) - for i in range(0, len(tarBBS)): - targetBBS[nr_beams][-1][i] = tarBBS[i] - targetBBS[nr_beams][-1][3] = toBool(targetBBS[nr_beams][-1][3]) - - if pipeline.startswith("Demix"): - targetDemix[nr_beams].append(DemixDefault[:]) # [:] is needed to make a deep copy - tarDemix = readExtraParms("Demix", [pipeline]) - if len(tarDemix) >= 4: - for i in range(0,len(tarDemix)): - targetDemix[nr_beams][-1][i] = tarDemix[i] - checkDemixMultiples(targetDemix[nr_beams][-1][0], targetDemix[nr_beams][-1][1], targetDemix[nr_beams][-1][2], targetDemix[nr_beams][-1][3], "targetDemix[%i]" % nr_beams) - targetDemix[nr_beams][-1][6] = toBool(targetDemix[nr_beams][-1][6]) # convert ignoreTarget to bool - elif len(tarDemix) > 0: - raise GenException("Demixing parameters should at least have the first four averaging/demixing steps (block %s, targetBeam %s)" % (blockNr, nr_beams)) - - if pipeline.startswith("Pulsar"): - targetPulsar[nr_beams].append(PulsarDefault[:]) # [:] is needed to make a deep copy - tarPulsar = readExtraParms("Pulsar", [pipeline]) - if len(tarPulsar) > 0: - for i in range(0,len(tarPulsar)): - targetPulsar[nr_beams][-1][i] = tarPulsar[i] - targetPulsar[nr_beams][-1][1] = toBool(targetPulsar[nr_beams][-1][1]) # singlePulse - targetPulsar[nr_beams][-1][2] = toBool(targetPulsar[nr_beams][-1][2]) # rawTo8bit - targetPulsar[nr_beams][-1][7] = toBool(targetPulsar[nr_beams][-1][7]) # norfi - targetPulsar[nr_beams][-1][8] = toBool(targetPulsar[nr_beams][-1][8]) # nofold - targetPulsar[nr_beams][-1][9] = toBool(targetPulsar[nr_beams][-1][9]) # nopdmp - targetPulsar[nr_beams][-1][10] = toBool(targetPulsar[nr_beams][-1][10]) # skipDsps - targetPulsar[nr_beams][-1][11] = toBool(targetPulsar[nr_beams][-1][11]) # rrats - targetPulsar[nr_beams][-1][19] = toBool(targetPulsar[nr_beams][-1][19]) # skipDynamicSpectrum - targetPulsar[nr_beams][-1][20] = toBool(targetPulsar[nr_beams][-1][20]) # skipPrepfold - - if not targetBBS[nr_beams]: - targetBBS[nr_beams].append(BBSDefault) - if globalBBS: - printInfo('Using global BBS settings for pipeline(s) coupled to target beam:' + str(nr_beams)) - for i in range(0,len(globalBBS)): - targetBBS[nr_beams][-1][i] = globalBBS[i] - - if not targetDemix[nr_beams]: - targetDemix[nr_beams].append(DemixDefault) - if globalDemix: - printInfo('Using global demix settings for pipeline(s) coupled to target beam:' + str(nr_beams)) - for i in range(0,len(globalDemix)): - targetDemix[nr_beams][-1][i] = globalDemix[i] - - if not targetPulsar[nr_beams]: - targetPulsar[nr_beams].append(PulsarDefault) - if globalPulsar: - printInfo('Using global Pulsar settings for pulsar pipeline(s) coupled to target beam:' + str(nr_beams)) - for i in range(0,len(globalPulsar)): - targetPulsar[nr_beams][-1][i] = globalPulsar[i] - - tarTAB = readTiedArrayBeams(TABs) - if tarTAB: - targetTAB.append(tarTAB) - elif globalTAB: - targetTAB.append(globalTAB) - else: - targetTAB.append([]) - if coherentStokesData and not (hasCoherentTab(targetTAB[-1]) or (targetBeams[nr_beams][5] > 0) or flysEye): - raise GenException("Target Beam %i: no coherent TAB specified while coherent Stokes data requested" % nr_beams) - nr_beams += 1 - totSubbands = sum([int(targetBeams[i][4]) for i in range(len(targetBeams))]) - maxSubbands = MAX_NR_SUBBANDS[NUMBER_OF_BITS_PER_SAMPLE.index(numberOfBitsPerSample)] - print "total subbands for all target beams = %s" % totSubbands - if totSubbands > maxSubbands: #TODO this doesn't count the calibrator beam! - raise GenException("the total number of subbands (%s) for all target beams exceeds the maximum number of subbands (%s) for %s bit mode" % (totSubbands, maxSubbands, numberOfBitsPerSample)) - return targetBeams, targetBBS, targetDemix, targetPulsar, targetTAB, nr_beams + calibratorBeam[5] = int(calibratorBeam[5]) # nrTABrings + if calibratorBeam[6].endswith('deg') or calibratorBeam[6].endswith('d'): + calibratorBeam[6] = deg2rad(calibratorBeam[6].rstrip(' deg')) + else: + calibratorBeam[6] = float(calibratorBeam[6]) # TAB ring size + + calibratorBeam[7] = toBool(calibratorBeam[7]) # create pipeline? + create_calibrator_pipeline = calibratorBeam[7] + print ("right ascenscion:" + str(calibratorBeam[0]) + " declination:" + str(calibratorBeam[1]) + " target:" + + calibratorBeam[2] + " subbands:" + calibratorBeam[3] + " nrSubbands:" + calibratorBeam[ + 4] + " create pipeline:" + str(calibratorBeam[7])) + + if create_calibrator_pipeline: + BBSDefault = ['', '', '', 'true', '', '', '', ''] + DemixDefault = ['', '', '', '', '', '', ''] + calibratorBBS = [] # Can now be a list of pipelines per beam + calibratorDemix = [] + for pipeline in pipelines: + if pipeline.startswith("BBS"): + calibratorBBS.append(BBSDefault[:]) # [:] is needed to make a deep copy + calBBS = readExtraParms("BBS", [pipeline]) + if len(calBBS) > 0: + for i in range(0, len(calBBS)): + calibratorBBS[-1][i] = calBBS[i] + calibratorBBS[-1][3] = toBool(calibratorBBS[-1][3]) + elif globalBBS != []: + printInfo('Using global BBS settings for Calibrator beam pipeline') + for i in range(0, len(globalBBS)): + calibratorBBS[-1][i] = globalBBS[i] + + if pipeline.startswith("Demix"): + calibratorDemix.append(DemixDefault[:]) # [:] is needed to make a deep copy + calDemix = readExtraParms("Demix", [pipeline]) + if len(calDemix) > 0: + for i in range(0, len(calDemix)): + calibratorDemix[-1][i] = calDemix[i] + checkDemixMultiples(calibratorDemix[-1][0], calibratorDemix[-1][1], calibratorDemix[-1][2], + calibratorDemix[-1][3], "calibratorDemix") + calibratorDemix[-1][6] = toBool(calibratorDemix[-1][6]) + elif globalDemix != []: + printInfo('Using global demix settings for Calibrator beam pipeline') + for i in range(0, len(globalDemix)): + calibratorDemix[-1][i] = globalDemix[i] + + calibratorTAB = readTiedArrayBeams(TABs) + if not calibratorTAB: + if globalTAB: + printInfo('Using global TABs for calibrator beam') + calibratorTAB = globalTAB # TODO check no possibility for globalTABrings? + if coherentStokesData and not (hasCoherentTab(calibratorTAB) or flysEye): + raise GenException("CalibratorBeam: no coherent TAB specified while coherent Stokes data requested") + + if not calibratorBBS: # FIXME: calibratorBBS might be undefined + calibratorBBS.append(BBSDefault) # FIXME: BBSDefault might be undefined + if globalBBS: + printInfo('Using global BBS settings for pipeline(s) coupled to Calibrator beam') + for i in range(0, len(globalBBS)): + calibratorBBS[-1][i] = globalBBS[i] + + if not calibratorDemix: # FIXME: calibratorDemix might be undefined + calibratorDemix.append(DemixDefault) # FIXME: DemixDefault might be undefined + if globalDemix: + printInfo('Using global demix settings for pipeline(s) coupled to Calibrator beam') + for i in range(0, len(globalDemix)): + calibratorDemix[-1][i] = globalDemix[i] + + return calibratorBeam, calibratorBBS, calibratorDemix, calibratorTAB, create_calibrator_pipeline + + +def readTargetBeams(startLine, lines, globalSubbands, globalBBS, globalDemix, globalPulsar, globalTAB, globalTABrings, + coherentStokesData, flysEye, numberOfBitsPerSample): + printInfo('found the target beams') + beamspecs = findBeamSpecs(startLine, lines) + if len(beamspecs) < 1: + raise GenException("the target beams are not specified") + targetBeams = [] + targetTAB = [] + targetBBS = [] + targetDemix = [] + targetPulsar = [] + nr_beams = 0 + + for beamspec in beamspecs: + beam = beamspec["beam"] + pipelines = beamspec["pipelines"] + TABs = beamspec["TABs"] + nr_parms = beam.count(';') + 1 + if nr_parms > 9: + raise GenException("too many parameters for target beam: " + beam) + elif nr_parms < 8: + raise GenException("too few parameters for target beam: " + beam) + else: + targetBeams.append(beam.replace(' ;', ';').replace('; ', ';').split(';')) + + if nr_parms == 9: + try: + targetBeams[nr_beams][8] = int( + targetBeams[nr_beams][8]) # the (optionally specified) duration of the pipeline + except: + raise GenException("the specified pipeline duration: " + targetBeams[nr_beams][ + 8] + " needs to be an integer value in seconds") + else: + targetBeams[nr_beams].append(0) + + # convert coordinated HMS to degrees + # right ascension + targetBeams[nr_beams][0] = convertAngle1(targetBeams[nr_beams][0], "targetBeam:" + str(nr_beams)) + # declination + targetBeams[nr_beams][1] = convertAngle2(targetBeams[nr_beams][1], "targetBeam:" + str(nr_beams)) + + if not targetBeams[nr_beams][3]: + if globalSubbands: + targetBeams[nr_beams][3] = globalSubbands[0] + targetBeams[nr_beams][4] = globalSubbands[1] + printInfo('Using Global_Subband settings for target beam: %i' % nr_beams) + else: + raise GenException("No subbands specified for the calibrator beam") + else: + verifySubbandList("targetBeam %i" % (nr_beams + 1), targetBeams[nr_beams][3], targetBeams[nr_beams][4]) + + if not targetBeams[nr_beams][5]: # TABrings specified? + if globalTABrings: + targetBeams[nr_beams][5] = globalTABrings[0] + targetBeams[nr_beams][6] = globalTABrings[1] + printInfo('Using Global_TABrings settings for target beam: %i' % nr_beams) + else: + targetBeams[nr_beams][5] = 0 + else: + targetBeams[nr_beams][5] = int(targetBeams[nr_beams][5]) + if targetBeams[nr_beams][5] > 0: + if targetBeams[nr_beams][6].endswith('deg') or targetBeams[nr_beams][6].endswith('d'): + targetBeams[nr_beams][6] = deg2rad(targetBeams[nr_beams][6].rstrip(' deg')) + else: # TODO try? + targetBeams[nr_beams][6] = float(targetBeams[nr_beams][6]) # TAB ring size + + targetBeams[nr_beams][7] = toBool(targetBeams[nr_beams][7]) # create pipeline coupled to target beam? + print ("right ascenscion:" + str(targetBeams[nr_beams][0]) + " declination:" + + str(targetBeams[nr_beams][1]) + " target:" + targetBeams[nr_beams][2] + " subbands:" + + targetBeams[nr_beams][3] + " nrSubbands:" + targetBeams[nr_beams][4] + " create pipeline:" + + str(targetBeams[nr_beams][7])) + + BBSDefault = ['', '', '', 'true', '', '', '', ''] + DemixDefault = ['', '', '', '', '', '', ''] + PulsarDefault = ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''] + targetBBS.append([]) # Can now be a list of pipelines per beam + targetDemix.append([]) + targetPulsar.append([]) + if targetBeams[nr_beams][7]: # pipeline created? + for pipeline in pipelines: + if pipeline.startswith("BBS"): + targetBBS[nr_beams].append(BBSDefault[:]) # [:] is needed to make a deep copy + tarBBS = readExtraParms("BBS", [pipeline]) + for i in range(0, len(tarBBS)): + targetBBS[nr_beams][-1][i] = tarBBS[i] + targetBBS[nr_beams][-1][3] = toBool(targetBBS[nr_beams][-1][3]) + + if pipeline.startswith("Demix"): + targetDemix[nr_beams].append(DemixDefault[:]) # [:] is needed to make a deep copy + tarDemix = readExtraParms("Demix", [pipeline]) + if len(tarDemix) >= 4: + for i in range(0, len(tarDemix)): + targetDemix[nr_beams][-1][i] = tarDemix[i] + checkDemixMultiples(targetDemix[nr_beams][-1][0], targetDemix[nr_beams][-1][1], + targetDemix[nr_beams][-1][2], targetDemix[nr_beams][-1][3], + "targetDemix[%i]" % nr_beams) + targetDemix[nr_beams][-1][6] = toBool( + targetDemix[nr_beams][-1][6]) # convert ignoreTarget to bool + elif len(tarDemix) > 0: + raise GenException( + "Demixing parameters should at least have the first four averaging/demixing steps") + #"(block %s, targetBeam %s)" % (blockNr, nr_beams)) # FIXME: blockNr is undefined!!! + + if pipeline.startswith("Pulsar"): + targetPulsar[nr_beams].append(PulsarDefault[:]) # [:] is needed to make a deep copy + tarPulsar = readExtraParms("Pulsar", [pipeline]) + if len(tarPulsar) > 0: + for i in range(0, len(tarPulsar)): + targetPulsar[nr_beams][-1][i] = tarPulsar[i] + targetPulsar[nr_beams][-1][1] = toBool(targetPulsar[nr_beams][-1][1]) # singlePulse + targetPulsar[nr_beams][-1][2] = toBool(targetPulsar[nr_beams][-1][2]) # rawTo8bit + targetPulsar[nr_beams][-1][7] = toBool(targetPulsar[nr_beams][-1][7]) # norfi + targetPulsar[nr_beams][-1][8] = toBool(targetPulsar[nr_beams][-1][8]) # nofold + targetPulsar[nr_beams][-1][9] = toBool(targetPulsar[nr_beams][-1][9]) # nopdmp + targetPulsar[nr_beams][-1][10] = toBool(targetPulsar[nr_beams][-1][10]) # skipDspsr + targetPulsar[nr_beams][-1][11] = toBool(targetPulsar[nr_beams][-1][11]) # rrats + targetPulsar[nr_beams][-1][19] = toBool( + targetPulsar[nr_beams][-1][19]) # skipDynamicSpectrum + targetPulsar[nr_beams][-1][20] = toBool(targetPulsar[nr_beams][-1][20]) # skipPrepfold + + if not targetBBS[nr_beams]: + targetBBS[nr_beams].append(BBSDefault) + if globalBBS: + printInfo('Using global BBS settings for pipeline(s) coupled to target beam:' + str(nr_beams)) + for i in range(0, len(globalBBS)): + targetBBS[nr_beams][-1][i] = globalBBS[i] + + if not targetDemix[nr_beams]: + targetDemix[nr_beams].append(DemixDefault) + if globalDemix: + printInfo('Using global demix settings for pipeline(s) coupled to target beam:' + str(nr_beams)) + for i in range(0, len(globalDemix)): + targetDemix[nr_beams][-1][i] = globalDemix[i] + + if not targetPulsar[nr_beams]: + targetPulsar[nr_beams].append(PulsarDefault) + if globalPulsar: + printInfo('Using global Pulsar settings for pulsar pipeline(s) coupled to target beam:' + + str(nr_beams)) + for i in range(0, len(globalPulsar)): + targetPulsar[nr_beams][-1][i] = globalPulsar[i] + + tarTAB = readTiedArrayBeams(TABs) + if tarTAB: + targetTAB.append(tarTAB) + elif globalTAB: + targetTAB.append(globalTAB) + else: + targetTAB.append([]) + if coherentStokesData and not (hasCoherentTab(targetTAB[-1]) or (targetBeams[nr_beams][5] > 0) or flysEye): + raise GenException( + "Target Beam %i: no coherent TAB specified while coherent Stokes data requested" % nr_beams) + nr_beams += 1 + totSubbands = sum([int(targetBeams[i][4]) for i in range(len(targetBeams))]) + maxSubbands = MAX_NR_SUBBANDS[NUMBER_OF_BITS_PER_SAMPLE.index(numberOfBitsPerSample)] + print "total subbands for all target beams = %s" % totSubbands + if totSubbands > maxSubbands: # TODO this doesn't count the calibrator beam! + raise GenException( + "the total number of subbands (%s) for all target beams exceeds the maximum number of subbands (%s) for %s bit mode" % ( + totSubbands, maxSubbands, numberOfBitsPerSample)) + return targetBeams, targetBBS, targetDemix, targetPulsar, targetTAB, nr_beams + def checkAntennaModeInstrumentFilterAndClockCombination(antennaMode, instrumentFilter, clock): -#TODO hardcoded values, should check against INSTRUMENT_FILTERS - if clock == '160 MHz': - if antennaMode.startswith('HBA'): # 160 MHz, HBA - if instrumentFilter != '170-230 MHz': - wrongCombiError() - else: # 160 MHz, LBA - if instrumentFilter not in ['10-70 MHz', '30-70 MHz']: - wrongCombiError() - else: - if antennaMode.startswith('HBA'): # 200 MHz, HBA - if instrumentFilter not in ['110-190 MHz', '210-250 MHz']: - wrongCombiError() - else: # 200 MHz, LBA - if instrumentFilter not in ['10-90 MHz', '30-90 MHz']: - wrongCombiError() + # TODO hardcoded values, should check against INSTRUMENT_FILTERS + if clock == '160 MHz': + if antennaMode.startswith('HBA'): # 160 MHz, HBA + if instrumentFilter != '170-230 MHz': + wrongCombiError() + else: # 160 MHz, LBA + if instrumentFilter not in ['10-70 MHz', '30-70 MHz']: + wrongCombiError() + else: + if antennaMode.startswith('HBA'): # 200 MHz, HBA + if instrumentFilter not in ['110-190 MHz', '210-250 MHz']: + wrongCombiError() + else: # 200 MHz, LBA + if instrumentFilter not in ['10-90 MHz', '30-90 MHz']: + wrongCombiError() + def determineNrImages(targetBeams, nrSubbandsPerImage, variableName): - nrImages = [] - for beam in targetBeams: - if beam[7]: ##Make pipelines - if int(beam[4]) % nrSubbandsPerImage <> 0: - raise GenException("nrSubbands (%s) should be integer dividable by the %s (%s) for target beam %i" % (beam[4], variableName, nrSubbandsPerImage, targetBeams.index(beam)+1)) - nrImages.append(int(beam[4]) / nrSubbandsPerImage) - else: - nrImages.append(0) - return nrImages + nrImages = [] + for beam in targetBeams: + if beam[7]: ##Make pipelines + if int(beam[4]) % nrSubbandsPerImage <> 0: + raise GenException("nrSubbands (%s) should be integer dividable by the %s (%s) for target beam %i" % ( + beam[4], variableName, nrSubbandsPerImage, targetBeams.index(beam) + 1)) + nrImages.append(int(beam[4]) / nrSubbandsPerImage) + else: + nrImages.append(0) + return nrImages + def readBlock(lines, projectName, blockNr): - s = { ##settings - "set_starttime": False, - "nrRepeats": 1, - "globalSubbands": [], - "globalBBS": [], - "globalDemix": [], - "globalPulsar": [], - "globalTAB": [], - "globalTABrings": [], - "coherentStokesData": False, - "flysEye": False, - "numberOfBitsPerSample": 0} - - for lineNr, cline in enumerate(lines): - if "=" in cline and not cline.startswith(('BBS','Demix','Pulsar')): #we skip beam and pipelines lines - key, value = readKeyValuePair(cline) - if key == "processing": - s["processing"] = readProcessing(value) - elif key == "split_targets": - s["split_targets"] = readBoolKey("split_targets", value) - elif key == "packageName": - s["packageName"] = readStringKey("packageName", value) - elif key == "packageDescription": - s["packageDescription"] = readOptionalStringKey("packageDescription", value) - elif key == "packageTag": - s["packageTag"] = readPackageTag(value) - elif key == "startTimeUTC": - s["startTime"], s["set_starttime"] = readStartTimeUTC(value) - elif key == "timeStep1": - s["timeStep1"] = readTimeStep(1, value) - elif key == "timeStep2": - s["timeStep2"] = readTimeStep(2, value) - elif key == "stationList": - s["stationList"] = readStationList(value) - elif key == "create_calibrator_observations": - s["create_calibrator_observations"] = readBoolKey("create_calibrator_observations", value) - elif key == "create_target_cal_beam": - s["create_target_cal_beam"] = readBoolKey("create_target_cal_beam", value) - elif key == "calibration": - s["calibration_mode"] = readListKey("calibration", value) - elif key == "create_extra_ncp_beam": - s["create_extra_ncp_beam"] = readCreate_extra_ncp_beam(value) - elif key == "antennaMode": - s["antennaMode"] = readListKey("antennaMode", value) - elif key == "clock": - s["clock"] = readListKey("clock", value) - elif key == "instrumentFilter": - s["instrumentFilter"] = readListKey("instrumentFilter", value) - elif key == "integrationTime": - #TODO should check if it's a valid float? - s["integrationTime"] = readStringKey("integrationTime", value) - elif key == "correlatedData": - s["correlatedData"] = readBoolKey("correlatedData", value) - elif key == "coherentStokesData": - s["coherentStokesData"] = readBoolKey("coherentStokesData", value) - elif key == "incoherentStokesData": - s["incoherentStokesData"] = readBoolKey("incoherentStokesData", value) - elif key == "coherentDedisperseChannels": - s["coherentDedisperseChannels"] = readBoolKey("coherentDedisperseChannels", value) - elif key == "flysEye": - s["flysEye"] = readBoolKey("flysEye", value) - elif key == "calibratorDuration_s": - s["calibratorDuration_s"] = readIntKey("calibratorDuration_s", value) - elif key == "targetDuration_s": - s["targetDuration_s"] = readIntKey("targetDuration_s", value) - elif key == "numberOfBitsPerSample": - s["numberOfBitsPerSample"] = readIntListKey("numberOfBitsPerSample", value) - elif key == "channelsPerSubband": - #TODO should this be Int? - s["channelsPerSubband"] = readStringKey("channelsPerSubband", value) - elif key == "subbandsPerFileCS": - s["subbandsPerFileCS"] = readIntKey("subbandsPerFileCS", value) - elif key == "numberCollapsedChannelsCS": - s["numberCollapsedChannelsCS"] = readIntKey("numberCollapsedChannelsCS", value) - elif key == "stokesDownsamplingStepsCS": - s["stokesDownsamplingStepsCS"] = readIntKey("stokesDownsamplingStepsCS", value) - elif key == "whichCS": - s["whichCS"] = readListKey("whichCS", value) - elif key == "subbandsPerFileIS": - s["subbandsPerFileIS"] = readIntKey("subbandsPerFileIS", value) - elif key == "numberCollapsedChannelsIS": - s["numberCollapsedChannelsIS"] = readIntKey("numberCollapsedChannelsIS", value) - elif key == "stokesDownsamplingStepsIS": - s["stokesDownsamplingStepsIS"] = readIntKey("stokesDownsamplingStepsIS", value) - elif key == "whichIS": - s["whichIS"] = readListKey("whichIS", value) - elif key == "nrSubbandsPerImage": - s["nrSubbandsPerImage"] = readIntKey("nrSubbandsPerImage", value) - elif key == "imagingPipeline": - s["imagingPipeline"] = readListKey("imagingPipeline", value) - elif key == "imagingDuration_s": - s["imaging_pipe_duration"] = readIntKey("imaging_pipe_duration", value) - elif key == "maxBaseline_m": - s["maxBaseline"] = readIntKey("maxBaseline", value) - elif key == "fieldOfView_deg": - s["fieldOfView"] = readFloatKey("fieldOfView", value) - elif key == "weightingScheme": - s["weightingScheme"] = readListKey("weightingScheme", value) - elif key == "robustParameter": - s["robustParameter"] = readFloatKey("robustParameter", value) - elif key == "nrOfIterations": - s["nrOfIterations"] = readIntKey("nrOfIterations", value) - elif key == "cleaningThreshold": - s["cleaningThreshold"] = readFloatKey("cleaningThreshold", value) - elif key == "uvMin_klambda": - s["uvMin"] = readFloatKey("uvMin", value) - elif key == "uvMax_klambda": - s["uvMax"] = readFloatKey("uvMax", value) - elif key == "stokesToImage": - s["stokesToImage"] = readStringKey("stokesToImage", value) - elif key == "skyModel": - s["skyModel"] = readStringKey("skyModel", value) - elif key == "tbbPiggybackAllowed": - s["tbbPiggybackAllowed"] = readBoolKey("tbbPiggybackAllowed", value) - elif key == "aartfaacPiggybackAllowed": - s["aartfaacPiggybackAllowed"] = readBoolKey("aartfaacPiggybackAllowed", value) - elif key == "flaggingStrategy": - s["flaggingStrategy"] = readStringKey("flaggingStrategy", value) - elif key == "subbandsPerSubbandGroup": - s["subbandsPerSubbandGroup"] = readIntKey("subbandsPerSubbandGroup", value) - elif key == "subbandGroupsPerMS": - s["subbandGroupsPerMS"] = readIntKey("subbandGroupsPerMS", value) - elif key == "Global_BBS": - s["globalBBS"] = readGlobalBBS(value) - elif key == "Imaging_BBS": - s["imagingBBS"] = readImagingBBS(value) - elif key == "Global_Demix": - s["globalDemix"] = readGlobalDemix(value) - elif key == "Global_Pulsar": - s["globalPulsar"] = readGlobalPulsar(value) - elif key == "Global_Subbands": - s["globalSubbands"] = readGlobalSubbands(value) - elif key == "Global_TAB": - s["globalTAB"] = readTiedArrayBeams(lines, lineNr, nr_lines) - elif key == "Global_TABrings": - s["globalTABrings"] = readGlobalTABrings(value) - elif key == "calibratorBeam": - s["calibratorBeam"], s["calibratorBBS"], s["calibratorDemix"], s["calibratorTAB"], s["create_calibrator_pipeline"] = \ - readCalibratorBeam(lineNr+1, lines, s["globalSubbands"], s["globalTABrings"], s["globalBBS"], s["globalDemix"], s["globalTAB"], - s["coherentStokesData"], s["flysEye"]) - elif key == 'targetBeams': - s["targetBeams"], s["targetBBS"], s["targetDemix"], s["targetPulsar"], s["targetTAB"], s["nr_beams"] = \ - readTargetBeams(lineNr+1, lines, s["globalSubbands"], s["globalBBS"], s["globalDemix"], s["globalPulsar"], s["globalTAB"], - s["globalTABrings"], s["coherentStokesData"], s["flysEye"], s["numberOfBitsPerSample"]) - elif key == "repeat": - try: - s["nrRepeats"] = int(value) - print "number of repeats = %s" % s["nrRepeats"] - except: - raise GenException("the repeat parameter is not valid for BLOCK: %i" % blockNr) - elif key == "cluster": - s["cluster"] = readStringKey("cluster", value) - elif key == "nr_tasks": - try: - s["nr_tasks"] = int(value) - print "number of tasks = %i" % s["nr_tasks"] - except: - raise GenException("the number of tasks parameter is not valid for BLOCK: %i" % blockNr) - elif key == "nr_cores_per_task": - try: - s["nr_cores_per_task"] = int(value) - print "number of cores per task = %i" % s["nr_cores_per_task"] - except: - raise GenException("the number of cores per task parameter is not valid for BLOCK: %i" % blockNr) - elif key == "nr_nodes": - try: - s["nr_tasks"] = int(value) * DEFAULT_TASKS_PER_NODE - s["nr_cores_per_task"] = DEFAULT_CORES_PER_TASK - print "number of nodes found, converted to number of tasks = %i, number of cores per task = %i" % (s["nr_tasks"], s["nr_cores_per_task"]) - except: - raise GenException("the number of nodes parameter is not valid for BLOCK: %i" % blockNr) - else: - raise GenException("unknown key:'%s' in BLOCK: %i" % (key, blockNr)) - return s ##settings + s = { ##settings + "set_starttime": False, + "nrRepeats": 1, + "globalSubbands": [], + "globalBBS": [], + "globalDemix": [], + "globalPulsar": [], + "globalTAB": [], + "globalTABrings": [], + "coherentStokesData": False, + "flysEye": False, + "numberOfBitsPerSample": 0, + "storagemanager": "dysco"} + + for lineNr, cline in enumerate(lines): + if "=" in cline and not cline.startswith(('BBS', 'Demix', 'Pulsar')): # we skip beam and pipelines lines + key, value = readKeyValuePair(cline) + if key == "processing": + s["processing"] = readProcessing(value) + elif key == "split_targets": + s["split_targets"] = readBoolKey("split_targets", value) + elif key == "packageName": + s["packageName"] = readStringKey("packageName", value) + elif key == "packageDescription": + s["packageDescription"] = readOptionalStringKey("packageDescription", value) + elif key == "packageTag": + s["packageTag"] = readPackageTag(value) + elif key == "startTimeUTC": + s["startTime"], s["set_starttime"] = readStartTimeUTC(value) + elif key == "timeStep1": + s["timeStep1"] = readTimeStep(1, value) + elif key == "timeStep2": + s["timeStep2"] = readTimeStep(2, value) + elif key == "stationList": + s["stationList"] = readStationList(value) + elif key == "create_calibrator_observations": + s["create_calibrator_observations"] = readBoolKey("create_calibrator_observations", value) + elif key == "create_target_cal_beam": + s["create_target_cal_beam"] = readBoolKey("create_target_cal_beam", value) + elif key == "calibration": + s["calibration_mode"] = readListKey("calibration", value) + elif key == "create_extra_ncp_beam": + s["create_extra_ncp_beam"] = readCreate_extra_ncp_beam(value) + elif key == "antennaMode": + s["antennaMode"] = readListKey("antennaMode", value) + elif key == "clock": + s["clock"] = readListKey("clock", value) + elif key == "instrumentFilter": + s["instrumentFilter"] = readListKey("instrumentFilter", value) + elif key == "integrationTime": + # TODO should check if it's a valid float? + s["integrationTime"] = readStringKey("integrationTime", value) + elif key == "correlatedData": + s["correlatedData"] = readBoolKey("correlatedData", value) + elif key == "coherentStokesData": + s["coherentStokesData"] = readBoolKey("coherentStokesData", value) + elif key == "incoherentStokesData": + s["incoherentStokesData"] = readBoolKey("incoherentStokesData", value) + elif key == "coherentDedisperseChannels": + s["coherentDedisperseChannels"] = readBoolKey("coherentDedisperseChannels", value) + elif key == "flysEye": + s["flysEye"] = readBoolKey("flysEye", value) + elif key == "calibratorDuration_s": + s["calibratorDuration_s"] = readIntKey("calibratorDuration_s", value) + elif key == "targetDuration_s": + s["targetDuration_s"] = readIntKey("targetDuration_s", value) + elif key == "numberOfBitsPerSample": + s["numberOfBitsPerSample"] = readIntListKey("numberOfBitsPerSample", value) + elif key == "channelsPerSubband": + # TODO should this be Int? + s["channelsPerSubband"] = readStringKey("channelsPerSubband", value) + elif key == "subbandsPerFileCS": + s["subbandsPerFileCS"] = readIntKey("subbandsPerFileCS", value) + elif key == "numberCollapsedChannelsCS": + s["numberCollapsedChannelsCS"] = readIntKey("numberCollapsedChannelsCS", value) + elif key == "stokesDownsamplingStepsCS": + s["stokesDownsamplingStepsCS"] = readIntKey("stokesDownsamplingStepsCS", value) + elif key == "whichCS": + s["whichCS"] = readListKey("whichCS", value) + elif key == "subbandsPerFileIS": + s["subbandsPerFileIS"] = readIntKey("subbandsPerFileIS", value) + elif key == "numberCollapsedChannelsIS": + s["numberCollapsedChannelsIS"] = readIntKey("numberCollapsedChannelsIS", value) + elif key == "stokesDownsamplingStepsIS": + s["stokesDownsamplingStepsIS"] = readIntKey("stokesDownsamplingStepsIS", value) + elif key == "whichIS": + s["whichIS"] = readListKey("whichIS", value) + elif key == "nrSubbandsPerImage": + s["nrSubbandsPerImage"] = readIntKey("nrSubbandsPerImage", value) + elif key == "imagingPipeline": + s["imagingPipeline"] = readListKey("imagingPipeline", value) + elif key == "imagingDuration_s": + s["imaging_pipe_duration"] = readIntKey("imaging_pipe_duration", value) + elif key == "maxBaseline_m": + s["maxBaseline"] = readIntKey("maxBaseline", value) + elif key == "fieldOfView_deg": + s["fieldOfView"] = readFloatKey("fieldOfView", value) + elif key == "weightingScheme": + s["weightingScheme"] = readListKey("weightingScheme", value) + elif key == "robustParameter": + s["robustParameter"] = readFloatKey("robustParameter", value) + elif key == "nrOfIterations": + s["nrOfIterations"] = readIntKey("nrOfIterations", value) + elif key == "cleaningThreshold": + s["cleaningThreshold"] = readFloatKey("cleaningThreshold", value) + elif key == "uvMin_klambda": + s["uvMin"] = readFloatKey("uvMin", value) + elif key == "uvMax_klambda": + s["uvMax"] = readFloatKey("uvMax", value) + elif key == "stokesToImage": + s["stokesToImage"] = readStringKey("stokesToImage", value) + elif key == "skyModel": + s["skyModel"] = readStringKey("skyModel", value) + elif key == "tbbPiggybackAllowed": + s["tbbPiggybackAllowed"] = readBoolKey("tbbPiggybackAllowed", value) + elif key == "aartfaacPiggybackAllowed": + s["aartfaacPiggybackAllowed"] = readBoolKey("aartfaacPiggybackAllowed", value) + elif key == "flaggingStrategy": + s["flaggingStrategy"] = readStringKey("flaggingStrategy", value) + elif key == "subbandsPerSubbandGroup": + s["subbandsPerSubbandGroup"] = readIntKey("subbandsPerSubbandGroup", value) + elif key == "subbandGroupsPerMS": + s["subbandGroupsPerMS"] = readIntKey("subbandGroupsPerMS", value) + elif key == "Global_BBS": + s["globalBBS"] = readGlobalBBS(value) + elif key == "Imaging_BBS": + s["imagingBBS"] = readImagingBBS(value) + elif key == "Global_Demix": + s["globalDemix"] = readGlobalDemix(value) + elif key == "Global_Pulsar": + s["globalPulsar"] = readGlobalPulsar(value) + elif key == "Global_Subbands": + s["globalSubbands"] = readGlobalSubbands(value) + elif key == "Global_TAB": + s["globalTAB"] = readTiedArrayBeams(lines) #, lineNr, nr_lines) # FIXME: readTiedArrayBeams takes single arg, also nr_lines is undefined!!! + elif key == "Global_TABrings": + s["globalTABrings"] = readGlobalTABrings(value) + elif key == "calibratorBeam": + s["calibratorBeam"], s["calibratorBBS"], s["calibratorDemix"], s["calibratorTAB"], \ + s["create_calibrator_pipeline"] = \ + readCalibratorBeam(lineNr + 1, lines, s["globalSubbands"], s["globalTABrings"], s["globalBBS"], + s["globalDemix"], s["globalTAB"], s["coherentStokesData"], s["flysEye"]) + elif key == 'targetBeams': + s["targetBeams"], s["targetBBS"], s["targetDemix"], s["targetPulsar"], s["targetTAB"], s["nr_beams"] = \ + readTargetBeams(lineNr + 1, lines, s["globalSubbands"], s["globalBBS"], s["globalDemix"], + s["globalPulsar"], s["globalTAB"], s["globalTABrings"], s["coherentStokesData"], + s["flysEye"], s["numberOfBitsPerSample"]) + elif key == "repeat": + try: + s["nrRepeats"] = int(value) + print "number of repeats = %s" % s["nrRepeats"] + except: + raise GenException("the repeat parameter is not valid for BLOCK: %i" % blockNr) + elif key == "cluster": + s["cluster"] = readStringKey("cluster", value) + elif key == "nr_tasks": + try: + s["nr_tasks"] = int(value) + print "number of tasks = %i" % s["nr_tasks"] + except: + raise GenException("the number of tasks parameter is not valid for BLOCK: %i" % blockNr) + elif key == "nr_cores_per_task": + try: + s["nr_cores_per_task"] = int(value) + print "number of cores per task = %i" % s["nr_cores_per_task"] + except: + raise GenException("the number of cores per task parameter is not valid for BLOCK: %i" % blockNr) + elif key == "nr_nodes": + try: + s["nr_tasks"] = int(value) * DEFAULT_TASKS_PER_NODE + s["nr_cores_per_task"] = DEFAULT_CORES_PER_TASK + print "number of nodes found, converted to number of tasks = %i, number of cores per task = %i" % ( + s["nr_tasks"], s["nr_cores_per_task"]) + except: + raise GenException("the number of nodes parameter is not valid for BLOCK: %i" % blockNr) + elif key == "storagemanager": + s["storagemanager"] = value + else: + raise GenException("unknown key:'%s' in BLOCK: %i" % (key, blockNr)) + return s ##settings + def checkSettings(settings, blockNr): - if "calibration_mode" not in settings: - raise GenException("the calibration parameter is not specified for BLOCK: %i" % blockNr) - elif settings["calibration_mode"] == "internal": - settings["create_target_cal_beam"] = True - if not "create_calibrator_observations" in settings: - settings["create_calibrator_observations"] = False - elif settings["calibration_mode"] == "external": - settings["create_calibrator_observations"] = True - elif settings["calibration_mode"] == "none": - settings["create_calibrator_observations"] = False - if "split_targets" not in settings: - raise GenException("the split_targets parameter is not specified for BLOCK: %i" % blockNr) - if "processing" not in settings: - raise GenException("the processing parameter has not been specified. It should be one of %s" % ", ".join(PROCESSING)); - elif settings["processing"] == 'Pulsar': - if not (("coherentStokesData" in settings and settings["coherentStokesData"]) - or ("incoherentStokesData" in settings and settings["incoherentStokesData"])): - raise GenException("Pulsar processing requires one or both of coherentStokesData / incoherentStokesData to be set for BLOCK: %i" % blockNr) - elif settings["processing"] == 'Imaging' and settings["calibration_mode"] == "none": - raise GenException("processing=imaging requires calibration. While calibration is set to 'none' for BLOCK: %i" % blockNr) - if settings["nr_beams"] == 0: - raise GenException("no target beams have been specified for BLOCK: %i" % blockNr) - elif settings["calibration_mode"] == "none": - settings["create_target_cal_beam"] = False - if "packageName" not in settings: - raise GenException("the packageName is not specified for BLOCK: %i" % blockNr) - if "stationList" not in settings: - raise GenException("the stationList is not specified for BLOCK: %i" % blockNr) - if "antennaMode" not in settings: - raise GenException("the antennaMode is not specified for BLOCK: %i" % blockNr) - if "instrumentFilter" not in settings: - raise GenException("the instrumentFilter is not specified for BLOCK: %i" % blockNr) - if "integrationTime" not in settings and ("correlatedData" in settings and settings["correlatedData"]): #TODO can it be false? - raise GenException("the integrationTime is not specified for BLOCK: %i" % blockNr) - if settings["create_calibrator_observations"] or settings["calibration_mode"] == "external": - if settings["calibratorDuration_s"] == 0: - raise GenException("the calibratorDuration_s is not specified for BLOCK: %i" % blockNr) - if (settings["calibration_mode"] != "none") and not settings["calibratorBeam"]: - raise GenException("the calibratorBeam is not specified while calibration parameter is not set to 'none' for BLOCK: %i" % blockNr) - if (not "calibratorBeam" in settings and settings["calibration_mode"] != "none"): # calibration_mode is no calibrator beam - raise GenException("the calibratorBeam is not specified for BLOCK: %i" % blockNr) - if ("targetDuration_s" not in settings): - raise GenException("the targetDuration_s is not specified for BLOCK: %i" % blockNr) - if "numberOfBitsPerSample" not in settings: - raise GenException("the numberOfBitsPerSample is not specified for BLOCK: %i" % blockNr) - if "channelsPerSubband" not in settings: - raise GenException("the channelsPerSubband is not specified for BLOCK: %i" % blockNr) - if "flysEye" in settings and settings["flysEye"] and not "coherentStokesData" in settings: - raise GenException("FlysEye cannot be switched on when coherentStokesData is switched off, specified in BLOCK: %i" % blockNr) - - if settings["processing"] == 'Imaging': - if "imagingPipeline" in settings: - if settings["imagingPipeline"] == 'none': - settings["do_imaging"] = False - else: - if not "nrSubbandsPerImage" in settings: - raise GenException("the nrSubbandsPerImage is not specified for BLOCK: %i" % blockNr) - - settings["do_imaging"] = True - if settings["imagingPipeline"] == 'standard': - settings["imaging_pipe_type"] = 'ImagingPipelineType' - if settings["antennaMode"].startswith("HBA"): - settings["imaging_pipe_default_template"] = "Imaging Pipeline HBA" - else: - settings["imaging_pipe_default_template"] = "Imaging Pipeline LBA" - elif settings["imagingPipeline"] == 'MSSS': - settings["imaging_pipe_type"] = 'ImagingPipelineMSSSType' - settings["imaging_pipe_default_template"] = "MSSS Imaging Pipeline" - settings["imaging_pipe_duration"] = 0 # img pipeline duration placeholder, MoM rejects <duration></duration> - # determine nrImages - settings["nrImages"] = determineNrImages(settings["targetBeams"], settings["nrSubbandsPerImage"], "nrSubbandsPerImage") - else: - raise GenException("the 'imagingPipeline' type parameter has not been specified while processing is set to Imaging. imagingPipeline should be one of: MSSS, standard or none"); - else: - settings["do_imaging"] = False - - if settings["processing"] == "LongBaseline": #TODO issue 8357, needs better function name - if (settings["calibration_mode"] == "none"): - raise GenException("LongBaseline does not work with calibration=none for BLOCK: %i" % blockNr) - determineNrImages(settings["targetBeams"], settings["subbandsPerSubbandGroup"], "subbandsPerSubbandGroup") - determineNrImages(settings["targetBeams"], settings["subbandGroupsPerMS"], "subbandGroupsPerMS") - - if not "flaggingStrategy" in settings: - if settings["antennaMode"].startswith("LBA"): - settings["flaggingStrategy"] = "LBAdefault" + if "calibration_mode" not in settings: + raise GenException("the calibration parameter is not specified for BLOCK: %i" % blockNr) + elif settings["calibration_mode"] == "internal": + settings["create_target_cal_beam"] = True + if not "create_calibrator_observations" in settings: + settings["create_calibrator_observations"] = False + elif settings["calibration_mode"] == "external": + settings["create_calibrator_observations"] = True + elif settings["calibration_mode"] == "none": + settings["create_calibrator_observations"] = False + if "split_targets" not in settings: + raise GenException("the split_targets parameter is not specified for BLOCK: %i" % blockNr) + if "processing" not in settings: + raise GenException( + "the processing parameter has not been specified. It should be one of %s" % ", ".join(PROCESSING)); + elif settings["processing"] == 'Pulsar': + if not (("coherentStokesData" in settings and settings["coherentStokesData"]) + or ("incoherentStokesData" in settings and settings["incoherentStokesData"])): + raise GenException( + "Pulsar processing requires one or both of coherentStokesData / incoherentStokesData to be set for BLOCK: %i" % blockNr) + elif settings["processing"] == 'Imaging' and settings["calibration_mode"] == "none": + raise GenException( + "processing=imaging requires calibration. While calibration is set to 'none' for BLOCK: %i" % blockNr) + if settings["nr_beams"] == 0: + raise GenException("no target beams have been specified for BLOCK: %i" % blockNr) + elif settings["calibration_mode"] == "none": + settings["create_target_cal_beam"] = False + if "packageName" not in settings: + raise GenException("the packageName is not specified for BLOCK: %i" % blockNr) + if "stationList" not in settings: + raise GenException("the stationList is not specified for BLOCK: %i" % blockNr) + if "antennaMode" not in settings: + raise GenException("the antennaMode is not specified for BLOCK: %i" % blockNr) + if "instrumentFilter" not in settings: + raise GenException("the instrumentFilter is not specified for BLOCK: %i" % blockNr) + if "integrationTime" not in settings and ( + "correlatedData" in settings and settings["correlatedData"]): # TODO can it be false? + raise GenException("the integrationTime is not specified for BLOCK: %i" % blockNr) + if settings["create_calibrator_observations"] or settings["calibration_mode"] == "external": + if settings["calibratorDuration_s"] == 0: + raise GenException("the calibratorDuration_s is not specified for BLOCK: %i" % blockNr) + if (settings["calibration_mode"] != "none") and not settings["calibratorBeam"]: + raise GenException( + "the calibratorBeam is not specified while calibration parameter is not set to 'none' for BLOCK: %i" % blockNr) + if (not "calibratorBeam" in settings and settings[ + "calibration_mode"] != "none"): # calibration_mode is no calibrator beam + raise GenException("the calibratorBeam is not specified for BLOCK: %i" % blockNr) + if ("targetDuration_s" not in settings): + raise GenException("the targetDuration_s is not specified for BLOCK: %i" % blockNr) + if "numberOfBitsPerSample" not in settings: + raise GenException("the numberOfBitsPerSample is not specified for BLOCK: %i" % blockNr) + if "channelsPerSubband" not in settings: + raise GenException("the channelsPerSubband is not specified for BLOCK: %i" % blockNr) + if "flysEye" in settings and settings["flysEye"] and not "coherentStokesData" in settings: + raise GenException( + "FlysEye cannot be switched on when coherentStokesData is switched off, specified in BLOCK: %i" % blockNr) + + if settings["processing"] == 'Imaging': + if "imagingPipeline" in settings: + if settings["imagingPipeline"] == 'none': + settings["do_imaging"] = False + else: + if not "nrSubbandsPerImage" in settings: + raise GenException("the nrSubbandsPerImage is not specified for BLOCK: %i" % blockNr) + + settings["do_imaging"] = True + if settings["imagingPipeline"] == 'standard': + settings["imaging_pipe_type"] = 'ImagingPipelineType' + if settings["antennaMode"].startswith("HBA"): + settings["imaging_pipe_default_template"] = "Imaging Pipeline HBA" + else: + settings["imaging_pipe_default_template"] = "Imaging Pipeline LBA" + elif settings["imagingPipeline"] == 'MSSS': + settings["imaging_pipe_type"] = 'ImagingPipelineMSSSType' + settings["imaging_pipe_default_template"] = "MSSS Imaging Pipeline" + settings[ + "imaging_pipe_duration"] = 0 # img pipeline duration placeholder, MoM rejects <duration></duration> + # determine nrImages + settings["nrImages"] = determineNrImages(settings["targetBeams"], settings["nrSubbandsPerImage"], + "nrSubbandsPerImage") + else: + raise GenException( + "the 'imagingPipeline' type parameter has not been specified while processing is set to Imaging. imagingPipeline should be one of: MSSS, standard or none"); else: - settings["flaggingStrategy"] = "HBAdefault" - - checkAntennaModeInstrumentFilterAndClockCombination(settings["antennaMode"], settings["instrumentFilter"], settings["clock"]) - - settings["writePackageTag"] = "packageTag" in settings and settings["packageTag"] - - if "nr_tasks" in settings: - if "nr_cores_per_task" not in settings: - printInfo("number of tasks found, but no number of cores, taking default number of cores per task = %i" % (DEFAULT_CORES_PER_TASK,)) - - return settings - -def writeImagingPipeline(ofile, nr_beams, targetBeams, blockTopo, nrRepeats, - imaging_pipe_inputs, imaging_pipe_predecessors, writePackageTag, packageTag, - nrImages, imagingPipelineSettings, imagingBBS, cluster, status, nr_tasks, nr_cores_per_task): - for key,val in imagingPipelineSettings.items(): #TODO somewhat dirty hack, to be solved better later. - exec(key + '=val') - for beamNr in range (0, nr_beams): - create_pipeline = targetBeams[beamNr][7] - if create_pipeline: - imaging_pipe_topology = blockTopo + 'PI' + str(beamNr) # 1.PI - imaging_pipe_output_topology = imaging_pipe_topology + '.dps' # 1.PI.dps - imaging_pipe_predecessors_string = ','.join(imaging_pipe_predecessors[beamNr]) #creates nrRepeats long comma separated list - - #for repeatNr in range (1, nrRepeats+1): - # ****** ADD AN IMAGING PIPELINE FOR EVERY TARGET BEAM ****** - - if writePackageTag: - imaging_pipe_name = packageTag + "/" + targetBeams[beamNr][2] + "/IM" - else: - imaging_pipe_name = targetBeams[beamNr][2] + "/IM" - - temp = {"imaging_pipe_topology":imaging_pipe_topology, - "imaging_pipe_predecessors_string":imaging_pipe_predecessors_string, - "imaging_pipe_name":imaging_pipe_name, "beamNr":beamNr, - "nrImages":nrImages[beamNr], "nrRepeats":nrRepeats, "initial_status": status} - - writeImagingPipelineXML(ofile, merge_dicts(temp, imagingPipelineSettings), imagingBBS, cluster, status, nr_tasks, nr_cores_per_task) - writeImagingPipelineInputDataproducts(ofile, imaging_pipe_inputs[beamNr]) - writeSkyImageOutputDataproduct(ofile, imaging_pipe_output_topology, cluster) + settings["do_imaging"] = False + + if settings["processing"] == "LongBaseline": # TODO issue 8357, needs better function name + if (settings["calibration_mode"] == "none"): + raise GenException("LongBaseline does not work with calibration=none for BLOCK: %i" % blockNr) + determineNrImages(settings["targetBeams"], settings["subbandsPerSubbandGroup"], "subbandsPerSubbandGroup") + determineNrImages(settings["targetBeams"], settings["subbandGroupsPerMS"], "subbandGroupsPerMS") + + if not "flaggingStrategy" in settings: + if settings["antennaMode"].startswith("LBA"): + settings["flaggingStrategy"] = "LBAdefault" + else: + settings["flaggingStrategy"] = "HBAdefault" + + checkAntennaModeInstrumentFilterAndClockCombination(settings["antennaMode"], settings["instrumentFilter"], + settings["clock"]) + + settings["writePackageTag"] = "packageTag" in settings and settings["packageTag"] + + if "nr_tasks" in settings: + if "nr_cores_per_task" not in settings: + printInfo("number of tasks found, but no number of cores, taking default number of cores per task = %i" % ( + DEFAULT_CORES_PER_TASK,)) + + if "storagemanager" in settings: + if settings["storagemanager"] not in ["", "dysco"]: + raise GenException("The storagemanager was set to an invalid value of: %s for BLOCK: %i" % + (settings["storagemanager"], blockNr)) + + return settings + + +def writeImagingPipeline(ofile, nr_beams, targetBeams, blockTopo, nrRepeats, + imaging_pipe_inputs, imaging_pipe_predecessors, writePackageTag, packageTag, + nrImages, imagingPipelineSettings, imagingBBS, cluster, status, nr_tasks, nr_cores_per_task, + miscParameters): + for beamNr in range(0, nr_beams): + create_pipeline = targetBeams[beamNr][7] + if create_pipeline: + imaging_pipe_topology = blockTopo + 'PI' + str(beamNr) # 1.PI + imaging_pipe_output_topology = imaging_pipe_topology + '.dps' # 1.PI.dps + imaging_pipe_predecessors_string = ','.join( + imaging_pipe_predecessors[beamNr]) # creates nrRepeats long comma separated list + + # for repeatNr in range (1, nrRepeats+1): + # ****** ADD AN IMAGING PIPELINE FOR EVERY TARGET BEAM ****** + + if writePackageTag: + imaging_pipe_name = packageTag + "/" + targetBeams[beamNr][2] + "/IM" + else: + imaging_pipe_name = targetBeams[beamNr][2] + "/IM" + + temp = {"imaging_pipe_topology": imaging_pipe_topology, + "imaging_pipe_predecessors_string": imaging_pipe_predecessors_string, + "imaging_pipe_name": imaging_pipe_name, "beamNr": beamNr, + "nrImages": nrImages[beamNr], "nrRepeats": nrRepeats, "initial_status": status} + + writeImagingPipelineXML(ofile, merge_dicts(temp, imagingPipelineSettings), imagingBBS, cluster, status, + nr_tasks, nr_cores_per_task, miscParameters) + writeImagingPipelineInputDataproducts(ofile, imaging_pipe_inputs[beamNr]) + writeSkyImageOutputDataproduct(ofile, imaging_pipe_output_topology, cluster) + def determineBfDataExtension(coherentStokesData, incoherentStokesData): - bfDataExtension = '' - if coherentStokesData | incoherentStokesData: - if coherentStokesData & ~incoherentStokesData: - bfDataExtension = '.cs' - elif incoherentStokesData & ~coherentStokesData: - bfDataExtension = '.is' - else: - bfDataExtension = '.csis' - return bfDataExtension + bfDataExtension = '' + if coherentStokesData | incoherentStokesData: + if coherentStokesData & ~incoherentStokesData: + bfDataExtension = '.cs' + elif incoherentStokesData & ~coherentStokesData: + bfDataExtension = '.is' + else: + bfDataExtension = '.csis' + return bfDataExtension + def writeRepeat(ofile, projectName, blockTopo, repeatNr, settings, imaging_pipe_inputs, - imaging_pipe_predecessors, status, nr_tasks, nr_cores_per_task): - for key,val in settings.items(): #TODO somewhat dirty hack, to be solved better later. - exec(key + '=val') - repeatTopo = blockTopo + str(repeatNr) - - tar_obs_beam_topologies = [] - tar_obs_uv_data_topologies = [] - tar_obs_bf_data_topologies = [] - tar_pipe_topologies = [] - LB_preproc_pipe_predecessor = [] - LB_preproc_pipe_topologies = [] - LB_preproc_pipe_output_MS_topologies = [] - - #nv 13okt2014: #6716 - Implement Long Baseline Pipeline - LB_pipeline_topologies =[] - LB_pipeline_predecessor =[] - LB_pipeline_input_uv_topologies =[] - LB_pipeline_output_uv_topologies =[] - - cal_obs_topology = repeatTopo + '.C' # 1.C - cal_obs_beam0_topology = cal_obs_topology + '.SAP000' # 1.C.SAP000 - tar_obs_topology = repeatTopo + '.T' # 1.T - cal_pipe_calibrator_topology = repeatTopo + '.CPC' # 1.CPC - cal_pipe_target_topology = repeatTopo + '.CPT' # 1.CPT - - if processing == 'Imaging': - if calibration_mode == "internal": - cal_obs_pipe_default_template = "Calibrator Pipeline (export)" - cal_tar_pipe_default_template = "Calibrator Pipeline (no export)" - cal_pipe_calibrator_description = "Cal Pipe Calibrator" - cal_pipe_target_description = "Cal Pipe Target" - tar_pipe_predecessor = tar_obs_topology + ',' + cal_pipe_target_topology # 1.T,1.CPT - tar_pipe_input_INST_topo = cal_pipe_target_topology + '.inst.dps' # 1.P1.dps - elif calibration_mode == "external": - cal_obs_pipe_default_template = "Calibrator Pipeline (export)" - cal_tar_pipe_default_template = "Calibrator Pipeline (no export)" - cal_pipe_calibrator_description = "Cal Pipe Calibrator" - cal_pipe_target_description = "Cal Pipe Target" - tar_pipe_predecessor = tar_obs_topology + ',' + cal_pipe_calibrator_topology # 1.T,1.CPC - tar_pipe_input_INST_topo = cal_pipe_calibrator_topology + '.inst.dps' # 1.CPC.inst.dps - elif processing == 'Preprocessing': - tar_pipe_predecessor = tar_obs_topology # 1.T - tar_pipe_input_INST_topo = '' # no input instrument models for these modes - cal_obs_pipe_default_template = "Preprocessing Pipeline" - cal_tar_pipe_default_template = "Preprocessing Pipeline" - cal_pipe_calibrator_description = "Preprocessing" - cal_pipe_target_description = "Preprocessing" - elif processing == 'Calibration': - tar_pipe_predecessor = tar_obs_topology # 1.T - tar_pipe_input_INST_topo = '' # no input instrument models for these modes - cal_obs_pipe_default_template = "Calibration Pipeline" - cal_tar_pipe_default_template = "Calibration Pipeline" - cal_pipe_calibrator_description = "Calibration" - cal_pipe_target_description = "Calibration" - elif processing == 'Pulsar': - #pulsar_pipe_predecessor = tar_obs_topology - pulsar_pipe_default_template = "Pulsar Pipeline" - elif processing == 'LongBaseline': - if calibration_mode == "internal": # internal calibration (previously Calbeam) - cal_obs_pipe_default_template = "Calibrator Pipeline (export)" - cal_tar_pipe_default_template = "Calibrator Pipeline (no export)" - cal_pipe_calibrator_description = "Cal Pipe Calibrator" - cal_pipe_target_description = "Cal Pipe Target" - tar_pipe_predecessor = tar_obs_topology + ',' + cal_pipe_target_topology # 1.T,1.CPT - tar_pipe_input_INST_topo = cal_pipe_target_topology + '.inst.dps' # 1.P1.dps - elif calibration_mode == "external": # external calibration (previously calObs) - cal_obs_pipe_default_template = "Calibrator Pipeline (export)" - cal_tar_pipe_default_template = "Calibrator Pipeline (no export)" - cal_pipe_calibrator_description = "Cal Pipe Calibrator" - cal_pipe_target_description = "Cal Pipe Target" - tar_pipe_predecessor = tar_obs_topology + ',' + cal_pipe_calibrator_topology # 1.T,1.CPC - tar_pipe_input_INST_topo = cal_pipe_calibrator_topology + '.inst.dps' # 1.CPC.inst.dps - - bfDataExtension = determineBfDataExtension(coherentStokesData, incoherentStokesData) - - for beamNr in range (0, nr_beams): - beam_nr_str = str(beamNr) - if create_calibrator_observations: - if writePackageTag: - cal_obs_name = packageTag + "/" + calibratorBeam[2] + "/" + str(repeatNr) + "/CO" - else: - cal_obs_name = calibratorBeam[2] + "/" + str(repeatNr) + "/CO" - - # TODO: for multiObs this is not ok. The SAP numbers should start from scratch again with every new target observation - # and there should be a .beamnr added before the .SAP in the topology - # this work has to be done when multiObs with multiple SAPs per target observation is implemented. - tar_obs_beam_topologies.append(tar_obs_topology + ".SAP" + beam_nr_str.rjust(3,'0')) - - tar_obs_bf_data_topologies.append(tar_obs_beam_topologies[beamNr] + bfDataExtension) - tar_obs_uv_data_topologies.append(tar_obs_beam_topologies[beamNr] + ".uv.dps") - - tar_pipe_topologies.append(repeatTopo + ".PT" + beam_nr_str) + imaging_pipe_predecessors, status, nr_tasks, nr_cores_per_task, miscParameters): - if processing == 'LongBaseline': - LB_preproc_pipe_topologies.append(repeatTopo + ".PTLB" + beam_nr_str) - LB_preproc_pipe_output_MS_topologies.append(LB_preproc_pipe_topologies[beamNr] + ".uv.dps") - LB_preproc_pipe_predecessor.append(tar_pipe_topologies[beamNr]) - - #nv 13okt2014: #6716 - Implement Long Baseline Pipeline - LB_pipeline_topologies.append(repeatTopo + ".LBP" + beam_nr_str) - LB_pipeline_predecessor.append(LB_preproc_pipe_topologies[beamNr]) - LB_pipeline_input_uv_topologies.append(LB_preproc_pipe_output_MS_topologies[beamNr]) - LB_pipeline_output_uv_topologies.append(LB_pipeline_topologies[beamNr] + ".uv.dps") - - if do_imaging: - imaging_pipe_inputs[beamNr].append(tar_pipe_topologies[beamNr] + ".uv.dps") - imaging_pipe_predecessors[beamNr].append(tar_pipe_topologies[beamNr]) - - if "create_extra_ncp_beam" in settings and settings["create_extra_ncp_beam"]: - tarObsCalBeamDataTopoStr = tar_obs_topology + ".SAP%03i" % (nr_beams+1,) - else: - tarObsCalBeamDataTopoStr = tar_obs_topology + ".SAP%03i" % (nr_beams,) - tar_obs_beam_topologies.append(tarObsCalBeamDataTopoStr) - tar_obs_bf_data_topologies.append(tarObsCalBeamDataTopoStr + bfDataExtension) - tar_obs_uv_data_topologies.append(tarObsCalBeamDataTopoStr + ".uv.dps") - - tar_obs_predecessor = '' - if create_calibrator_observations: - tar_obs_predecessor = cal_obs_topology # 1.C - if set_starttime: - startTimeStr = startTimeObs.strftime('%Y-%m-%dT%H:%M:%S') - endTimeStr = (startTimeObs + timedelta(seconds=calibratorDuration_s)).strftime('%Y-%m-%dT%H:%M:%S') - else: - startTimeStr = '' - endTimeStr = '' - - writeXMLObs(ofile, cal_obs_name, cal_obs_name + ' (Calibration Observation)', - cal_obs_topology, '', cal_obs_name, projectName, tbbPiggybackAllowed, - aartfaacPiggybackAllowed, correlatedData, coherentStokesData, incoherentStokesData, - antennaMode, clock, instrumentFilter, integrationTime, channelsPerSubband, - coherentDedisperseChannels, flysEye, subbandsPerFileCS, numberCollapsedChannelsCS, - stokesDownsamplingStepsCS, whichCS, subbandsPerFileIS, numberCollapsedChannelsIS, - stokesDownsamplingStepsIS, whichIS, stationList, startTimeStr, endTimeStr, - calibratorDuration_s, numberOfBitsPerSample, status) - writeXMLBeam(ofile, calibratorBeam[2], calibratorBeam[2], cal_obs_beam0_topology, - 'Calibration', calibratorBeam[2], calibratorBeam[0], calibratorBeam[1], - calibratorBeam[3], flysEye, str(calibratorBeam[5]), str(calibratorBeam[6]), - writeTABXML(calibratorTAB), writeDataProducts(cal_obs_beam0_topology, correlatedData, - coherentStokesData, incoherentStokesData, cluster), status) - writeXMLObsEnd(ofile) - - # target start and end time: - if set_starttime: - if create_calibrator_observations: - startTimeObs = startTimeObs + timedelta(seconds=timeStep1+calibratorDuration_s) - startTimeStr = startTimeObs.strftime('%Y-%m-%dT%H:%M:%S') - endTimeStr = (startTimeObs + timedelta(seconds=targetDuration_s)).strftime('%Y-%m-%dT%H:%M:%S') - else: - startTimeStr = '' - endTimeStr = '' - - if create_calibrator_observations and create_calibrator_pipeline: - - if writePackageTag: - cal_pipe_name = packageTag + "/" + calibratorBeam[2] + "/" + str(repeatNr) + "/CPC" - else: - cal_pipe_name = calibratorBeam[2] + "/" + str(repeatNr) + "/CPC" - - if processing == 'Imaging' or processing == 'LongBaseline': - if not calibratorBBS: - raise GenException("BBS SkyModel is not specified for pipeline coupled to calibrator beam") - - writeXMLCalPipe(ofile, cal_pipe_calibrator_topology, cal_obs_topology, cal_pipe_name, - cal_pipe_calibrator_description, cal_obs_pipe_default_template, - flaggingStrategy, calibratorBeam[8], calibratorBBS[0][0], calibratorDemix[0], calibratorBBS[0][1:], - cal_obs_beam0_topology + '.uv.dps', cal_pipe_calibrator_topology + '.inst.dps', - cal_pipe_calibrator_topology + '.inst.dps', cal_pipe_calibrator_topology + '.uv.dps', cluster, status, nr_tasks, nr_cores_per_task) + # This is to get rid of the crazy hack that declared all keys in settings as a local variable. I first tried to + # read all the variables that are required in the following and raise an error if something is missing. + # Dependencies are an issue, though, and resolving the ones that pop up with the regression tests might not be + # sufficient. After consulting Adriaan, we came to the conclusion that it is best to go the nasty way of just + # accessing things on demand in the code paths where they are actually required, to have no impact on behavior. + # These items from settings are used on top level, so they must be there: + try: + processing = settings['processing'] + coherentStokesData = settings['coherentStokesData'] + incoherentStokesData = settings['incoherentStokesData'] + nr_beams = settings['nr_beams'] + except KeyError as ex: + print ex + raise GenException("Could not read required setting! (%s)" % ex.message) + + repeatTopo = blockTopo + str(repeatNr) + + tar_obs_beam_topologies = [] + tar_obs_uv_data_topologies = [] + tar_obs_bf_data_topologies = [] + tar_pipe_topologies = [] + LB_preproc_pipe_predecessor = [] + LB_preproc_pipe_topologies = [] + LB_preproc_pipe_output_MS_topologies = [] + + # nv 13okt2014: #6716 - Implement Long Baseline Pipeline + LB_pipeline_topologies = [] + LB_pipeline_predecessor = [] + LB_pipeline_input_uv_topologies = [] + LB_pipeline_output_uv_topologies = [] + + cal_obs_topology = repeatTopo + '.C' # 1.C + cal_obs_beam0_topology = cal_obs_topology + '.SAP000' # 1.C.SAP000 + tar_obs_topology = repeatTopo + '.T' # 1.T + cal_pipe_calibrator_topology = repeatTopo + '.CPC' # 1.CPC + cal_pipe_target_topology = repeatTopo + '.CPT' # 1.CPT + + if processing == 'Imaging': + if settings['calibration_mode'] == "internal": + cal_obs_pipe_default_template = "Calibrator Pipeline (export)" + cal_tar_pipe_default_template = "Calibrator Pipeline (no export)" + cal_pipe_calibrator_description = "Cal Pipe Calibrator" + cal_pipe_target_description = "Cal Pipe Target" + tar_pipe_predecessor = tar_obs_topology + ',' + cal_pipe_target_topology # 1.T,1.CPT + tar_pipe_input_INST_topo = cal_pipe_target_topology + '.inst.dps' # 1.P1.dps + elif settings['calibration_mode'] == "external": + cal_obs_pipe_default_template = "Calibrator Pipeline (export)" + cal_tar_pipe_default_template = "Calibrator Pipeline (no export)" + cal_pipe_calibrator_description = "Cal Pipe Calibrator" + cal_pipe_target_description = "Cal Pipe Target" + tar_pipe_predecessor = tar_obs_topology + ',' + cal_pipe_calibrator_topology # 1.T,1.CPC + tar_pipe_input_INST_topo = cal_pipe_calibrator_topology + '.inst.dps' # 1.CPC.inst.dps elif processing == 'Preprocessing': - for i in range(0,len(calibratorDemix)): - if len(calibratorDemix) > 1: #TODO a cludge right now, but want to refactor how to call the writeXML soon - cal_pipe_calibrator_topology_tmp = cal_pipe_calibrator_topology + ".%i" % i - cal_pipe_name_tmp = cal_pipe_name + ".%i" % i - else: - cal_pipe_calibrator_topology_tmp = cal_pipe_calibrator_topology - cal_pipe_name_tmp = cal_pipe_name - writeXMLAvgPipeline(ofile, cal_pipe_calibrator_topology_tmp, cal_obs_topology, - cal_pipe_name_tmp, cal_pipe_calibrator_description, cal_obs_pipe_default_template, - flaggingStrategy, calibratorBeam[8], calibratorDemix[i], - cal_obs_beam0_topology + '.uv.dps', cal_pipe_calibrator_topology_tmp + '.uv.dps', - cluster, status, nr_tasks, nr_cores_per_task) - + tar_pipe_predecessor = tar_obs_topology # 1.T + tar_pipe_input_INST_topo = '' # no input instrument models for these modes + cal_obs_pipe_default_template = "Preprocessing Pipeline" + cal_tar_pipe_default_template = "Preprocessing Pipeline" + cal_pipe_calibrator_description = "Preprocessing" + cal_pipe_target_description = "Preprocessing" elif processing == 'Calibration': - - if not calibratorBBS: - raise GenException("BBS SkyModel is not specified for pipeline coupled to calibrator beam") - - #TODO ['', '', '', '', '', '', ''] is really ugly, this will break the regression test - writeXMLCalPipe(ofile, cal_pipe_calibrator_topology, cal_obs_topology, cal_pipe_name, - cal_pipe_calibrator_description, cal_obs_pipe_default_template, flaggingStrategy, - calibratorBeam[8], calibratorBBS[0][0], calibratorDemix[0], ['', '', '', '', '', '', ''], - cal_obs_beam0_topology + '.uv.dps', cal_pipe_calibrator_topology + '.inst.dps', - cal_pipe_calibrator_topology + '.inst.dps', cal_pipe_calibrator_topology + '.uv.dps', - cluster, status, nr_tasks, nr_cores_per_task) - - if not split_targets: - if writePackageTag: - tar_obs_name = packageTag + "/" + targetBeams[0][2] + "/" + str(repeatNr) + "/TO" - else: - tar_obs_name = targetBeams[0][2] + "/" + str(repeatNr) + "/TO" - - writeXMLObs(ofile, tar_obs_name, tar_obs_name + ' (Target Observation)', tar_obs_topology, - tar_obs_predecessor, tar_obs_name, projectName, tbbPiggybackAllowed, - aartfaacPiggybackAllowed, correlatedData, coherentStokesData, incoherentStokesData, - antennaMode, clock, instrumentFilter, integrationTime, channelsPerSubband, - coherentDedisperseChannels, flysEye, subbandsPerFileCS, numberCollapsedChannelsCS, - stokesDownsamplingStepsCS, whichCS, subbandsPerFileIS, numberCollapsedChannelsIS, - stokesDownsamplingStepsIS, whichIS, stationList, startTimeStr, endTimeStr, - targetDuration_s, numberOfBitsPerSample, status) - - if set_starttime: - if create_calibrator_observations: - startTimeObs = startTimeObs + timedelta(seconds=timeStep2+targetDuration_s) - else: - startTimeObs = startTimeObs + timedelta(seconds=timeStep1+targetDuration_s) + tar_pipe_predecessor = tar_obs_topology # 1.T + tar_pipe_input_INST_topo = '' # no input instrument models for these modes + cal_obs_pipe_default_template = "Calibration Pipeline" + cal_tar_pipe_default_template = "Calibration Pipeline" + cal_pipe_calibrator_description = "Calibration" + cal_pipe_target_description = "Calibration" + elif processing == 'Pulsar': + # pulsar_pipe_predecessor = tar_obs_topology + pulsar_pipe_default_template = "Pulsar Pipeline" + elif processing == 'LongBaseline': + if settings['calibration_mode'] == "internal": # internal calibration (previously Calbeam) + cal_obs_pipe_default_template = "Calibrator Pipeline (export)" + cal_tar_pipe_default_template = "Calibrator Pipeline (no export)" + cal_pipe_calibrator_description = "Cal Pipe Calibrator" + cal_pipe_target_description = "Cal Pipe Target" + tar_pipe_predecessor = tar_obs_topology + ',' + cal_pipe_target_topology # 1.T,1.CPT + tar_pipe_input_INST_topo = cal_pipe_target_topology + '.inst.dps' # 1.P1.dps + elif settings['calibration_mode'] == "external": # external calibration (previously calObs) + cal_obs_pipe_default_template = "Calibrator Pipeline (export)" + cal_tar_pipe_default_template = "Calibrator Pipeline (no export)" + cal_pipe_calibrator_description = "Cal Pipe Calibrator" + cal_pipe_target_description = "Cal Pipe Target" + tar_pipe_predecessor = tar_obs_topology + ',' + cal_pipe_calibrator_topology # 1.T,1.CPC + tar_pipe_input_INST_topo = cal_pipe_calibrator_topology + '.inst.dps' # 1.CPC.inst.dps + + bfDataExtension = determineBfDataExtension(coherentStokesData, incoherentStokesData) for beamNr in range(0, nr_beams): - writeXMLBeam(ofile, targetBeams[beamNr][2], targetBeams[beamNr][2], - tar_obs_beam_topologies[beamNr], 'Target', targetBeams[beamNr][2], - targetBeams[beamNr][0], targetBeams[beamNr][1], targetBeams[beamNr][3], flysEye, - targetBeams[beamNr][5], targetBeams[beamNr][6], writeTABXML(targetTAB[beamNr]), - writeDataProducts(tar_obs_beam_topologies[beamNr], correlatedData, - coherentStokesData, incoherentStokesData, cluster), status) - - # create the extra polarization beam? + beam_nr_str = str(beamNr) + if settings['create_calibrator_observations']: + if settings['writePackageTag']: + cal_obs_name = settings['packageTag'] + "/" + settings['calibratorBeam'][2] + "/" + str(repeatNr) + "/CO" + else: + cal_obs_name = settings['calibratorBeam'][2] + "/" + str(repeatNr) + "/CO" + + # TODO: for multiObs this is not ok. The SAP numbers should start from scratch again with every new target observation + # and there should be a .beamnr added before the .SAP in the topology + # this work has to be done when multiObs with multiple SAPs per target observation is implemented. + tar_obs_beam_topologies.append(tar_obs_topology + ".SAP" + beam_nr_str.rjust(3, '0')) + + tar_obs_bf_data_topologies.append(tar_obs_beam_topologies[beamNr] + bfDataExtension) + tar_obs_uv_data_topologies.append(tar_obs_beam_topologies[beamNr] + ".uv.dps") + + tar_pipe_topologies.append(repeatTopo + ".PT" + beam_nr_str) + + if processing == 'LongBaseline': + LB_preproc_pipe_topologies.append(repeatTopo + ".PTLB" + beam_nr_str) + LB_preproc_pipe_output_MS_topologies.append(LB_preproc_pipe_topologies[beamNr] + ".uv.dps") + LB_preproc_pipe_predecessor.append(tar_pipe_topologies[beamNr]) + + # nv 13okt2014: #6716 - Implement Long Baseline Pipeline + LB_pipeline_topologies.append(repeatTopo + ".LBP" + beam_nr_str) + LB_pipeline_predecessor.append(LB_preproc_pipe_topologies[beamNr]) + LB_pipeline_input_uv_topologies.append(LB_preproc_pipe_output_MS_topologies[beamNr]) + LB_pipeline_output_uv_topologies.append(LB_pipeline_topologies[beamNr] + ".uv.dps") + + if settings['do_imaging']: + imaging_pipe_inputs[beamNr].append(tar_pipe_topologies[beamNr] + ".uv.dps") + imaging_pipe_predecessors[beamNr].append(tar_pipe_topologies[beamNr]) + if "create_extra_ncp_beam" in settings and settings["create_extra_ncp_beam"]: - polBeamTopo = tar_obs_topology + ".SAP" + str(beamNr+1).rjust(3,'0') - writeXMLBeam(ofile, targetBeams[0][2], targetBeams[0][2], targetBeams[0][2], 'Target', - targetBeams[0][0], flysEye, targetBeams[0][5], targetBeams[0][6], - writeTABXML(targetTAB[0]), writeDataProducts(polBeamTopo, correlatedData, - coherentStokesData, incoherentStokesData, cluster), status) - - # create a calibrator beam in the target observation? - if create_target_cal_beam: - if "create_extra_ncp_beam" in settings and settings["create_extra_ncp_beam"]: - calBeamTopo = tar_obs_topology + ".SAP" + str(beamNr+2).rjust(3,'0') - else: - calBeamTopo = tar_obs_topology + ".SAP" + str(beamNr+1).rjust(3,'0') - - writeXMLBeam(ofile, calibratorBeam[2], calibratorBeam[2], calBeamTopo, 'Calibration', - calibratorBeam[2], calibratorBeam[0], calibratorBeam[1], calibratorBeam[3], flysEye, - calibratorBeam[5], calibratorBeam[6], writeTABXML(calibratorTAB), - writeDataProducts(tar_obs_beam_topologies[nr_beams], correlatedData, - coherentStokesData, incoherentStokesData, cluster), status) - - writeXMLObsEnd(ofile) - - if writePackageTag: - cal_pipe_target_name = packageTag + "/" + calibratorBeam[2] + "/" + str(repeatNr) + "/CPT" - else: - cal_pipe_target_name = calibratorBeam[2] + "/" + str(repeatNr) + "/CPT" - - create_pipeline = calibratorBeam[7] - if create_pipeline: + tarObsCalBeamDataTopoStr = tar_obs_topology + ".SAP%03i" % (nr_beams + 1,) + else: + tarObsCalBeamDataTopoStr = tar_obs_topology + ".SAP%03i" % (nr_beams,) + tar_obs_beam_topologies.append(tarObsCalBeamDataTopoStr) + tar_obs_bf_data_topologies.append(tarObsCalBeamDataTopoStr + bfDataExtension) + tar_obs_uv_data_topologies.append(tarObsCalBeamDataTopoStr + ".uv.dps") + + tar_obs_predecessor = '' + if settings['create_calibrator_observations']: + tar_obs_predecessor = cal_obs_topology # 1.C + if settings['set_starttime']: + startTimeStr = settings['startTimeObs'].strftime('%Y-%m-%dT%H:%M:%S') + endTimeStr = (settings['startTimeObs'] + timedelta(seconds=settings['calibratorDuration_s'])).strftime('%Y-%m-%dT%H:%M:%S') + else: + startTimeStr = '' + endTimeStr = '' + + writeXMLObs(ofile, + cal_obs_name, # FIXME: Might be undefined + cal_obs_name + ' (Calibration Observation)', + cal_obs_topology, + '', + cal_obs_name, + projectName, + settings['tbbPiggybackAllowed'], + settings['aartfaacPiggybackAllowed'], + settings['correlatedData'], + coherentStokesData, + incoherentStokesData, + settings['antennaMode'], + settings['clock'], + settings['instrumentFilter'], + settings['integrationTime'], + settings['channelsPerSubband'], + settings['coherentDedisperseChannels'], + settings['flysEye'], + settings['subbandsPerFileCS'], + settings['numberCollapsedChannelsCS'], + settings['stokesDownsamplingStepsCS'], + settings['whichCS'], + settings['subbandsPerFileIS'], + settings['numberCollapsedChannelsIS'], + settings['stokesDownsamplingStepsIS'], + settings['whichIS'], + settings['stationList'], + startTimeStr, + endTimeStr, + settings['calibratorDuration_s'], + settings['numberOfBitsPerSample'], + status) + + calibratorBeam = settings['calibratorBeam'] + writeXMLBeam(ofile, + calibratorBeam[2], + calibratorBeam[2], + cal_obs_beam0_topology, + 'Calibration', + calibratorBeam[2], + calibratorBeam[0], + calibratorBeam[1], + calibratorBeam[3], + settings['flysEye'], + str(calibratorBeam[5]), + str(calibratorBeam[6]), + writeTABXML(settings['calibratorTAB']), + writeDataProducts(cal_obs_beam0_topology, + settings['correlatedData'], + coherentStokesData, + incoherentStokesData, + settings['cluster']), + status) + writeXMLObsEnd(ofile) + + # target start and end time: + if settings['set_starttime']: + if settings['create_calibrator_observations']: + settings['startTimeObs'] = settings['startTimeObs'] + timedelta(seconds=settings['timeStep1'] + settings['calibratorDuration_s']) + startTimeStr = settings['startTimeObs'].strftime('%Y-%m-%dT%H:%M:%S') + endTimeStr = (settings['startTimeObs'] + timedelta(seconds=settings['targetDuration_s'])).strftime('%Y-%m-%dT%H:%M:%S') + else: + startTimeStr = '' + endTimeStr = '' + + if settings['create_calibrator_observations'] and settings['create_calibrator_pipeline']: + + if settings['writePackageTag']: + cal_pipe_name = settings['packageTag'] + "/" + settings['calibratorBeam'][2] + "/" + str(repeatNr) + "/CPC" + else: + cal_pipe_name = settings['calibratorBeam'][2] + "/" + str(repeatNr) + "/CPC" + if processing == 'Imaging' or processing == 'LongBaseline': - if not calibratorBBS: - raise GenException("BBS SkyModel is not specified for pipeline coupled to calibration beam") - - writeXMLCalPipe(ofile, cal_pipe_target_topology, tar_obs_topology, cal_pipe_target_name, - cal_pipe_target_description, cal_tar_pipe_default_template, flaggingStrategy, - calibratorBeam[8], calibratorBBS[0][0], calibratorDemix[0], calibratorBBS[0][1:], - tar_obs_uv_data_topologies[nr_beams], cal_pipe_target_topology + '.inst.dps', - cal_pipe_target_topology + '.inst.dps', cal_pipe_target_topology + '.uv.dps', - cluster, status, nr_tasks, nr_cores_per_task) - + calibratorBBS = settings['calibratorBBS'] + if not calibratorBBS: + raise GenException("BBS SkyModel is not specified for pipeline coupled to calibrator beam") + + writeXMLCalPipe(ofile, + cal_pipe_calibrator_topology, + cal_obs_topology, + cal_pipe_name, + cal_pipe_calibrator_description, + cal_obs_pipe_default_template, + settings['flaggingStrategy'], + settings['calibratorBeam'][8], + calibratorBBS[0][0], + settings['calibratorDemix'][0], + calibratorBBS[0][1:], + cal_obs_beam0_topology + '.uv.dps', + cal_pipe_calibrator_topology + '.inst.dps', + cal_pipe_calibrator_topology + '.inst.dps', + cal_pipe_calibrator_topology + '.uv.dps', + settings['cluster'], + status, + nr_tasks, + nr_cores_per_task, + miscParameters) + elif processing == 'Preprocessing': - for i in range(0, len(calibratorDemix)): - if len(calibratorDemix) > 1: #TODO a cludge right now, but want to refactor how to call the writeXML soon - cal_pipe_target_topology_tmp = cal_pipe_target_topology + ".%i" % i - cal_pipe_target_name_tmp = cal_pipe_target_name + ".%i" % i - else: - cal_pipe_target_topology_tmp = cal_pipe_target_topology - cal_pipe_target_name_tmp = cal_pipe_target_name - writeXMLAvgPipeline(ofile, cal_pipe_target_topology_tmp, tar_obs_topology, - cal_pipe_target_name_tmp, cal_pipe_target_description, - cal_tar_pipe_default_template, flaggingStrategy, calibratorBeam[8], - calibratorDemix[i], tar_obs_uv_data_topologies[nr_beams], - cal_pipe_target_topology_tmp + '.uv.dps', cluster, status, nr_tasks, nr_cores_per_task) - + calibratorDemix = settings['calibratorDemix'] + for i in range(0, len(calibratorDemix)): + if len(calibratorDemix) > 1: # TODO a cludge right now, but want to refactor how to call the writeXML soon + cal_pipe_calibrator_topology_tmp = cal_pipe_calibrator_topology + ".%i" % i + cal_pipe_name_tmp = cal_pipe_name + ".%i" % i + else: + cal_pipe_calibrator_topology_tmp = cal_pipe_calibrator_topology + cal_pipe_name_tmp = cal_pipe_name + + writeXMLAvgPipeline(ofile, + cal_pipe_calibrator_topology_tmp, + cal_obs_topology, + cal_pipe_name_tmp, + cal_pipe_calibrator_description, + cal_obs_pipe_default_template, + settings['flaggingStrategy'], + settings['calibratorBeam'][8], + calibratorDemix[i], + cal_obs_beam0_topology + '.uv.dps', + cal_pipe_calibrator_topology_tmp + '.uv.dps', + settings['cluster'], + status, + nr_tasks, + nr_cores_per_task, + miscParameters) + elif processing == 'Calibration': - - if not calibratorBBS: - raise GenException("BBS SkyModel is not specified for pipeline coupled to calibration beam") - - writeXMLCalPipe(ofile, cal_pipe_target_topology, tar_obs_topology, - cal_pipe_target_name, cal_pipe_target_description, cal_tar_pipe_default_template, - flaggingStrategy, calibratorBeam[8], calibratorBBS[0][0], calibratorDemix[0], - calibratorBBS[0][1:], tar_obs_uv_data_topologies[nr_beams], - cal_pipe_target_topology + '.inst.dps', cal_pipe_target_topology + '.inst.dps', - cal_pipe_target_topology + '.uv.dps', cluster, status, nr_tasks, nr_cores_per_task) - else: - writeXMLObsEnd(ofile) - - else: # split target sources into separate observations - for beamNr in range(0, nr_beams): - if writePackageTag: - tar_obs_name = packageTag + "/" + targetBeams[beamNr][2] + "/" + str(repeatNr) + "/TO" - else: - tar_obs_name = targetBeams[beamNr][2] + "/" + str(repeatNr) + "/TO" - - tar_obs_topology_MultiObs = tar_obs_topology + '.' + str(beamNr) - writeXMLObs(ofile, tar_obs_name, tar_obs_name + ' (Target Observation)', - tar_obs_topology_MultiObs, '', tar_obs_name, projectName, tbbPiggybackAllowed, - aartfaacPiggybackAllowed, correlatedData, coherentStokesData, incoherentStokesData, - antennaMode, clock, instrumentFilter, integrationTime, channelsPerSubband, - coherentDedisperseChannels, flysEye, subbandsPerFileCS, numberCollapsedChannelsCS, - stokesDownsamplingStepsCS, whichCS, subbandsPerFileIS, numberCollapsedChannelsIS, - stokesDownsamplingStepsIS, whichIS, stationList, startTimeStr, endTimeStr, - targetDuration_s, numberOfBitsPerSample, status) - - writeXMLBeam(ofile, targetBeams[beamNr][2], targetBeams[beamNr][2], - tar_obs_beam_topologies[beamNr], 'Target', targetBeams[beamNr][2], - targetBeams[beamNr][0], targetBeams[beamNr][1], targetBeams[beamNr][3], flysEye, - targetBeams[beamNr][5], targetBeams[beamNr][6], writeTABXML(targetTAB[beamNr]), - writeDataProducts(tar_obs_beam_topologies[beamNr], correlatedData, - coherentStokesData, incoherentStokesData, cluster), status) - - writeXMLObsEnd(ofile) - - if set_starttime: - startTimeObs = startTimeObs + timedelta(seconds=timeStep1+targetDuration_s) - - - # Target PIPELINES generation from here on - - for beamNr in range(0, nr_beams): - create_pipeline = targetBeams[beamNr][7] - if create_pipeline: - tar_pipe_ID = "/TP" - if processing == 'Imaging': # imaging modes - tar_pipe_default_template = "Calibration Pipeline Target" - tar_pipe_description = "Target Pipeline" - elif processing == 'Preprocessing': - tar_pipe_default_template = "Preprocessing Pipeline" - tar_pipe_description = "Preprocessing" - elif processing == 'Calibration': - tar_pipe_default_template = "Calibration Pipeline" - tar_pipe_description = "Calibration" - elif processing == 'Pulsar': - tar_pipe_default_template = "Pulsar Pipeline" - tar_pipe_description = "Pulsar Pipeline" - tar_pipe_ID = "/PP" - elif processing == 'LongBaseline': - tar_pipe_default_template = "Calibration Pipeline Target" - tar_pipe_description = "Target Pipeline" - - if writePackageTag: - tar_pipe_name = packageTag + "/" + targetBeams[beamNr][2] + "/" + str(repeatNr) + "." + str(beamNr) + tar_pipe_ID - else: - tar_pipe_name = targetBeams[beamNr][2] + "/" + str(repeatNr) + "." + str(beamNr) + tar_pipe_ID - - if processing == 'Imaging' or processing == 'LongBaseline': - writeXMLTargetPipeline(ofile, tar_pipe_topologies[beamNr], tar_pipe_predecessor, - tar_pipe_name, tar_pipe_description, tar_pipe_default_template, flaggingStrategy, - targetBeams[beamNr][8], targetDemix[beamNr][0], targetBBS[beamNr][0][1:], - tar_obs_uv_data_topologies[beamNr], tar_obs_uv_data_topologies[beamNr], - tar_pipe_input_INST_topo, tar_pipe_input_INST_topo, - tar_pipe_topologies[beamNr] + ".uv.dps", tar_pipe_topologies[beamNr] + ".uv.dps", - cluster, status, nr_tasks, nr_cores_per_task) - - elif processing == 'Preprocessing': - for i in range(0,len(targetDemix[beamNr])): - if len(targetDemix[beamNr]) > 1: #TODO a cludge right now, but want to refactor how to call the writeXML soon - tar_pipe_topology_tmp = tar_pipe_topologies[beamNr] + ".%i" % i - tar_pipe_name_tmp = tar_pipe_name + ".%i" % i - tar_pipe_topology_output_MS_tmp= tar_pipe_topologies[beamNr] + ".%i" % i + ".uv.dps" - else: - tar_pipe_topology_tmp = tar_pipe_topologies[beamNr] - tar_pipe_name_tmp = tar_pipe_name - tar_pipe_topology_output_MS_tmp= tar_pipe_topologies[beamNr] + ".uv.dps" - writeXMLAvgPipeline(ofile, tar_pipe_topology_tmp, tar_pipe_predecessor, - tar_pipe_name_tmp, tar_pipe_description, tar_pipe_default_template, - flaggingStrategy, targetBeams[beamNr][8], targetDemix[beamNr][i], - tar_obs_uv_data_topologies[beamNr], tar_pipe_topology_output_MS_tmp, - cluster, status, nr_tasks, nr_cores_per_task) - - elif processing == 'Calibration': #TODO currently doesn't work according to Alwin's wiki, why? - if targetBBS[beamNr][0][0] == '': - raise GenException("BBS SkyModel is not specified for pipeline coupled to target beam " + str(beamNr)) - - writeXMLCalPipe(ofile, tar_pipe_topologies[beamNr], tar_pipe_predecessor, - tar_pipe_name, tar_pipe_description, tar_pipe_default_template, flaggingStrategy, - targetBeams[beamNr][8], targetBBS[beamNr][0][0], targetDemix[beamNr][0], - targetBBS[beamNr][0][1:], tar_obs_uv_data_topologies[beamNr], - tar_pipe_topologies[beamNr] + ".inst.dps", tar_pipe_topologies[beamNr] + ".inst.dps", - tar_pipe_topologies[beamNr] + ".uv.dps", cluster, status, nr_tasks, nr_cores_per_task) - - elif processing == 'Pulsar': - #tar_obs_topology_MultiObs = tar_obs_topology + '.' + str(beamNr) - tar_pipe_predecessor = tar_obs_topology - - writeXMLPulsarPipe(ofile, tar_pipe_topologies[beamNr], tar_obs_topology, - tar_pipe_name, tar_pipe_description, tar_pipe_default_template, - targetBeams[beamNr][8], tar_obs_bf_data_topologies[beamNr], - tar_pipe_topologies[beamNr] + ".pu.dps", cluster, status, nr_tasks, nr_cores_per_task, - pulsar = targetPulsar[beamNr][0][0], - singlePulse = targetPulsar[beamNr][0][1], - rawTo8bit = targetPulsar[beamNr][0][2], - dspsrExtraOpts = targetPulsar[beamNr][0][3], - prepdataExtraOpts = targetPulsar[beamNr][0][4], - _8bitConversionSigma = targetPulsar[beamNr][0][5], - tsubint = targetPulsar[beamNr][0][6], - norfi = targetPulsar[beamNr][0][7], - nofold = targetPulsar[beamNr][0][8], - nopdmp = targetPulsar[beamNr][0][9], - skipDsps = targetPulsar[beamNr][0][10], - rrats = targetPulsar[beamNr][0][11], - _2bf2fitsExtraOpts = targetPulsar[beamNr][0][12], - decodeSigma = targetPulsar[beamNr][0][13], - decodeNblocks = targetPulsar[beamNr][0][14], - rfifindExtraOpts = targetPulsar[beamNr][0][15], - prepfoldExtraOpts = targetPulsar[beamNr][0][16], - prepsubbandExtraOpts = targetPulsar[beamNr][0][17], - dynamicSpectrumTimeAverage = targetPulsar[beamNr][0][18], - skipDynamicSpectrum = targetPulsar[beamNr][0][19], - skipPrepfold = targetPulsar[beamNr][0][20], - digifilExtraOpts = targetPulsar[beamNr][0][21]) - - # for long baseline processsing an additional (special purpose adapted) preprocessing pipeline is necessary - if processing == 'LongBaseline': - LB_preproc_pipe_template = 'Preprocessing LB' - LB_preproc_pipe_description = 'Phaseshift + adding CS stations' - - LB_pipeline_default_template = "Long-Baseline Pipeline" - LB_pipeline_description = "Long-Baseline Concat" - - for beamNr in range(0, nr_beams): - if targetBeams[beamNr][7]: #create pipelines for this beam - if writePackageTag: - LB_preproc_pipe_name = packageTag + "/" + targetBeams[beamNr][2] + "/" + str(repeatNr) + "." + str(beamNr) + "/PP" - LB_pipeline_name = packageTag + "/" + targetBeams[beamNr][2] + "/" + str(repeatNr) + "." + str(beamNr) + "/LBP" + + calibratorBBS = settings['calibratorBBS'] + if not calibratorBBS: + raise GenException("BBS SkyModel is not specified for pipeline coupled to calibrator beam") + + # TODO ['', '', '', '', '', '', ''] is really ugly, this will break the regression test + writeXMLCalPipe(ofile, + cal_pipe_calibrator_topology, + cal_obs_topology, + cal_pipe_name, + cal_pipe_calibrator_description, + cal_obs_pipe_default_template, + settings['flaggingStrategy'], + settings['calibratorBeam'][8], + calibratorBBS[0][0], + settings['calibratorDemix'][0], + ['', '', '', '', '', '', ''], + cal_obs_beam0_topology + '.uv.dps', + cal_pipe_calibrator_topology + '.inst.dps', + cal_pipe_calibrator_topology + '.inst.dps', + cal_pipe_calibrator_topology + '.uv.dps', + settings['cluster'], + status, + nr_tasks, + nr_cores_per_task, + miscParameters) + + if not settings['split_targets']: + if settings['writePackageTag']: + tar_obs_name = settings['packageTag'] + "/" + settings['targetBeams'][0][2] + "/" + str(repeatNr) + "/TO" else: - LB_preproc_pipe_name = targetBeams[beamNr][2] + "/" + str(repeatNr) + "." + str(beamNr) + "/PP" - LB_pipeline_name = targetBeams[beamNr][2] + "/" + str(repeatNr) + "." + str(beamNr) + "/LBP" - - writeXMLAvgPipeline(ofile, LB_preproc_pipe_topologies[beamNr], - LB_preproc_pipe_predecessor[beamNr], LB_preproc_pipe_name, - LB_preproc_pipe_description, LB_preproc_pipe_template, flaggingStrategy, - targetBeams[beamNr][8], targetDemix[beamNr][0], - tar_pipe_topologies[beamNr] + ".uv.dps", LB_preproc_pipe_output_MS_topologies[beamNr], - cluster, status, nr_tasks, nr_cores_per_task) - - #nv 13okt2014: #6716 - Implement Long Baseline Pipeline - writeXMLLongBaselinePipe(ofile, LB_pipeline_topologies[beamNr], - LB_pipeline_predecessor[beamNr], LB_pipeline_name, LB_pipeline_description, - LB_pipeline_default_template, targetBeams[beamNr][8], subbandsPerSubbandGroup, - subbandGroupsPerMS, LB_pipeline_input_uv_topologies[beamNr], - LB_pipeline_output_uv_topologies[beamNr], cluster, status, nr_tasks, nr_cores_per_task) - - return imaging_pipe_inputs, imaging_pipe_predecessors, startTimeObs + tar_obs_name = settings['targetBeams'][0][2] + "/" + str(repeatNr) + "/TO" + + writeXMLObs(ofile, + tar_obs_name, + tar_obs_name + ' (Target Observation)', + tar_obs_topology, + tar_obs_predecessor, + tar_obs_name, + projectName, + settings['tbbPiggybackAllowed'], + settings['aartfaacPiggybackAllowed'], + settings['correlatedData'], + coherentStokesData, + incoherentStokesData, + settings['antennaMode'], + settings['clock'], + settings['instrumentFilter'], + settings['integrationTime'], + settings['channelsPerSubband'], + settings['coherentDedisperseChannels'], + settings['flysEye'], + settings['subbandsPerFileCS'], + settings['numberCollapsedChannelsCS'], + settings['stokesDownsamplingStepsCS'], + settings['whichCS'], + settings['subbandsPerFileIS'], + settings['numberCollapsedChannelsIS'], + settings['stokesDownsamplingStepsIS'], + settings['whichIS'], + settings['stationList'], + startTimeStr, + endTimeStr, + settings['targetDuration_s'], + settings['numberOfBitsPerSample'], + status) + + if settings['set_starttime']: + if settings['create_calibrator_observations']: + settings['startTimeObs'] = settings['startTimeObs'] + timedelta(seconds=settings['timeStep2'] + settings['targetDuration_s']) + else: + settings['startTimeObs'] = settings['startTimeObs'] + timedelta(seconds=settings['timeStep1'] + settings['targetDuration_s']) + + for beamNr in range(0, nr_beams): + targetBeams = settings['targetBeams'] + writeXMLBeam(ofile, + targetBeams[beamNr][2], + targetBeams[beamNr][2], + tar_obs_beam_topologies[beamNr], + 'Target', + targetBeams[beamNr][2], + targetBeams[beamNr][0], + targetBeams[beamNr][1], + targetBeams[beamNr][3], + settings['flysEye'], + targetBeams[beamNr][5], + targetBeams[beamNr][6], + writeTABXML(settings['targetTAB'][beamNr]), + writeDataProducts(tar_obs_beam_topologies[beamNr], + settings['correlatedData'], + coherentStokesData, + incoherentStokesData, + settings['cluster']), + status) + + # create the extra polarization beam? + if "create_extra_ncp_beam" in settings and settings["create_extra_ncp_beam"]: + polBeamTopo = tar_obs_topology + ".SAP" + str(beamNr + 1).rjust(3, '0') # FIXME beamNr referenced outside loop?! + targetBeams = settings['targetBeams'] + writeXMLBeam(ofile, + targetBeams[0][2], + targetBeams[0][2], + targetBeams[0][2], + 'Target', + targetBeams[0][0], + settings['flysEye'], + targetBeams[0][5], + targetBeams[0][6], + writeTABXML(settings['targetTAB'][0]), + writeDataProducts(polBeamTopo, + settings['correlatedData'], + coherentStokesData, + incoherentStokesData, + settings['cluster']), + status) # FIXME missing parameters + + # create a calibrator beam in the target observation? + if settings['create_target_cal_beam']: + if "create_extra_ncp_beam" in settings and settings["create_extra_ncp_beam"]: + calBeamTopo = tar_obs_topology + ".SAP" + str(beamNr + 2).rjust(3, '0') + else: + calBeamTopo = tar_obs_topology + ".SAP" + str(beamNr + 1).rjust(3, '0') + + calibratorBeam = settings['calibratorBeam'] + writeXMLBeam(ofile, + calibratorBeam[2], + calibratorBeam[2], + calBeamTopo, + 'Calibration', + calibratorBeam[2], + calibratorBeam[0], + calibratorBeam[1], + calibratorBeam[3], + settings['flysEye'], + calibratorBeam[5], + calibratorBeam[6], + writeTABXML(settings['calibratorTAB']), + writeDataProducts(tar_obs_beam_topologies[nr_beams], + settings['correlatedData'], + coherentStokesData, + incoherentStokesData, + settings['cluster']), + status) + + writeXMLObsEnd(ofile) + + if settings['writePackageTag']: + cal_pipe_target_name = settings['packageTag'] + "/" + calibratorBeam[2] + "/" + str(repeatNr) + "/CPT" + else: + cal_pipe_target_name = calibratorBeam[2] + "/" + str(repeatNr) + "/CPT" + + create_pipeline = calibratorBeam[7] + if create_pipeline: + if processing == 'Imaging' or processing == 'LongBaseline': + calibratorBBS = settings['calibratorBBS'] + if not calibratorBBS: + raise GenException("BBS SkyModel is not specified for pipeline coupled to calibration beam") + + writeXMLCalPipe(ofile, + cal_pipe_target_topology, + tar_obs_topology, + cal_pipe_target_name, + cal_pipe_target_description, + cal_tar_pipe_default_template, + settings['flaggingStrategy'], + calibratorBeam[8], + calibratorBBS[0][0], + settings['calibratorDemix'][0], + calibratorBBS[0][1:], + tar_obs_uv_data_topologies[nr_beams], + cal_pipe_target_topology + '.inst.dps', + cal_pipe_target_topology + '.inst.dps', + cal_pipe_target_topology + '.uv.dps', + settings['cluster'], + status, + nr_tasks, + nr_cores_per_task, + miscParameters) + + elif processing == 'Preprocessing': + calibratorDemix = settings['calibratorDemix'] + for i in range(0, len(calibratorDemix)): + if len(calibratorDemix) > 1: # TODO a cludge right now, but want to refactor how to call the writeXML soon + cal_pipe_target_topology_tmp = cal_pipe_target_topology + ".%i" % i + cal_pipe_target_name_tmp = cal_pipe_target_name + ".%i" % i + else: + cal_pipe_target_topology_tmp = cal_pipe_target_topology + cal_pipe_target_name_tmp = cal_pipe_target_name + + writeXMLAvgPipeline(ofile, + cal_pipe_target_topology_tmp, + tar_obs_topology, + cal_pipe_target_name_tmp, + cal_pipe_target_description, # FIXME: Might be undefined + cal_tar_pipe_default_template, # FIXME: Might be undefined + settings['flaggingStrategy'], + calibratorBeam[8], + calibratorDemix[i], + tar_obs_uv_data_topologies[nr_beams], + cal_pipe_target_topology_tmp + '.uv.dps', + settings['cluster'], + status, + nr_tasks, + nr_cores_per_task, + miscParameters) + + elif processing == 'Calibration': + + calibratorBBS = settings['calibratorBBS'] + if not calibratorBBS: + raise GenException("BBS SkyModel is not specified for pipeline coupled to calibration beam") + + writeXMLCalPipe(ofile, + cal_pipe_target_topology, + tar_obs_topology, + cal_pipe_target_name, + cal_pipe_target_description, # FIXME: Might be undefined + cal_tar_pipe_default_template, # FIXME: Might be undefined + settings['flaggingStrategy'], + calibratorBeam[8], + calibratorBBS[0][0], + settings['calibratorDemix'][0], + calibratorBBS[0][1:], + tar_obs_uv_data_topologies[nr_beams], + cal_pipe_target_topology + '.inst.dps', + cal_pipe_target_topology + '.inst.dps', + cal_pipe_target_topology + '.uv.dps', + settings['cluster'], + status, + nr_tasks, + nr_cores_per_task, + miscParameters) + else: + writeXMLObsEnd(ofile) + + else: # split target sources into separate observations + for beamNr in range(0, nr_beams): + if settings['writePackageTag']: + tar_obs_name = settings['packageTag'] + "/" + settings['targetBeams'][beamNr][2] + "/" + str(repeatNr) + "/TO" + else: + tar_obs_name = settings['targetBeams'][beamNr][2] + "/" + str(repeatNr) + "/TO" + + tar_obs_topology_MultiObs = tar_obs_topology + '.' + str(beamNr) + writeXMLObs(ofile, + tar_obs_name, + tar_obs_name + ' (Target Observation)', + tar_obs_topology_MultiObs, + '', + tar_obs_name, + projectName, + settings['tbbPiggybackAllowed'], + settings['aartfaacPiggybackAllowed'], + settings['correlatedData'], + coherentStokesData, + incoherentStokesData, + settings['antennaMode'], + settings['clock'], + settings['instrumentFilter'], + settings['integrationTime'], + settings['channelsPerSubband'], + settings['coherentDedisperseChannels'], + settings['flysEye'], + settings['subbandsPerFileCS'], + settings['numberCollapsedChannelsCS'], + settings['stokesDownsamplingStepsCS'], + settings['whichCS'], + settings['subbandsPerFileIS'], + settings['numberCollapsedChannelsIS'], + settings['stokesDownsamplingStepsIS'], + settings['whichIS'], + settings['stationList'], + startTimeStr, + endTimeStr, + settings['targetDuration_s'], + settings['numberOfBitsPerSample'], + status) + + targetBeams = settings['targetBeams'] + writeXMLBeam(ofile, + targetBeams[beamNr][2], + targetBeams[beamNr][2], + tar_obs_beam_topologies[beamNr], + 'Target', + targetBeams[beamNr][2], + targetBeams[beamNr][0], + targetBeams[beamNr][1], + targetBeams[beamNr][3], + settings['flysEye'], + targetBeams[beamNr][5], + targetBeams[beamNr][6], + writeTABXML(settings['targetTAB'][beamNr]), + writeDataProducts(tar_obs_beam_topologies[beamNr], + settings['correlatedData'], + coherentStokesData, + incoherentStokesData, + settings['cluster']), + status) + + writeXMLObsEnd(ofile) + + if settings['set_starttime']: + settings['startTimeObs'] = settings['startTimeObs'] + timedelta(seconds=settings['timeStep1'] + settings['targetDuration_s']) + + # Target PIPELINES generation from here on + + for beamNr in range(0, nr_beams): + targetBeams = settings['targetBeams'] + create_pipeline = targetBeams[beamNr][7] + if create_pipeline: + tar_pipe_ID = "/TP" + if processing == 'Imaging': # imaging modes + tar_pipe_default_template = "Calibration Pipeline Target" + tar_pipe_description = "Target Pipeline" + elif processing == 'Preprocessing': + tar_pipe_default_template = "Preprocessing Pipeline" + tar_pipe_description = "Preprocessing" + elif processing == 'Calibration': + tar_pipe_default_template = "Calibration Pipeline" + tar_pipe_description = "Calibration" + elif processing == 'Pulsar': + tar_pipe_default_template = "Pulsar Pipeline" + tar_pipe_description = "Pulsar Pipeline" + tar_pipe_ID = "/PP" + elif processing == 'LongBaseline': + tar_pipe_default_template = "Calibration Pipeline Target" + tar_pipe_description = "Target Pipeline" + + if settings['writePackageTag']: + tar_pipe_name = settings['packageTag'] + "/" + targetBeams[beamNr][2] + "/" + str(repeatNr) + "." + \ + str(beamNr) + tar_pipe_ID + else: + tar_pipe_name = targetBeams[beamNr][2] + "/" + str(repeatNr) + "." + str(beamNr) + tar_pipe_ID + + if processing == 'Imaging' or processing == 'LongBaseline': + writeXMLTargetPipeline(ofile, + tar_pipe_topologies[beamNr], + tar_pipe_predecessor, + tar_pipe_name, + tar_pipe_description, + tar_pipe_default_template, + settings['flaggingStrategy'], + targetBeams[beamNr][8], + settings['targetDemix'][beamNr][0], + settings['targetBBS'][beamNr][0][1:], + tar_obs_uv_data_topologies[beamNr], + tar_obs_uv_data_topologies[beamNr], + tar_pipe_input_INST_topo, + tar_pipe_input_INST_topo, + tar_pipe_topologies[beamNr] + ".uv.dps", + tar_pipe_topologies[beamNr] + ".uv.dps", + settings['cluster'], + status, + nr_tasks, + nr_cores_per_task, + miscParameters) + + elif processing == 'Preprocessing': + targetDemix = settings['targetDemix'] + for i in range(0, len(targetDemix[beamNr])): + if len(targetDemix[ + beamNr]) > 1: # TODO a cludge right now, but want to refactor how to call the writeXML soon + tar_pipe_topology_tmp = tar_pipe_topologies[beamNr] + ".%i" % i + tar_pipe_name_tmp = tar_pipe_name + ".%i" % i + tar_pipe_topology_output_MS_tmp = tar_pipe_topologies[beamNr] + ".%i" % i + ".uv.dps" + else: + tar_pipe_topology_tmp = tar_pipe_topologies[beamNr] + tar_pipe_name_tmp = tar_pipe_name + tar_pipe_topology_output_MS_tmp = tar_pipe_topologies[beamNr] + ".uv.dps" + writeXMLAvgPipeline(ofile, + tar_pipe_topology_tmp, + tar_pipe_predecessor, + tar_pipe_name_tmp, + tar_pipe_description, + tar_pipe_default_template, + settings['flaggingStrategy'], + targetBeams[beamNr][8], + targetDemix[beamNr][i], + tar_obs_uv_data_topologies[beamNr], + tar_pipe_topology_output_MS_tmp, + settings['cluster'], + status, + nr_tasks, + nr_cores_per_task, + miscParameters) + + elif processing == 'Calibration': # TODO currently doesn't work according to Alwin's wiki, why? + targetBBS = settings['targetBBS'] + if targetBBS[beamNr][0][0] == '': + raise GenException( + "BBS SkyModel is not specified for pipeline coupled to target beam " + str(beamNr)) + + writeXMLCalPipe(ofile, + tar_pipe_topologies[beamNr], + tar_pipe_predecessor, + tar_pipe_name, + tar_pipe_description, + tar_pipe_default_template, + settings['flaggingStrategy'], + targetBeams[beamNr][8], + targetBBS[beamNr][0][0], + settings['targetDemix'][beamNr][0], + targetBBS[beamNr][0][1:], + tar_obs_uv_data_topologies[beamNr], + tar_pipe_topologies[beamNr] + ".inst.dps", + tar_pipe_topologies[beamNr] + ".inst.dps", + tar_pipe_topologies[beamNr] + ".uv.dps", + settings['cluster'], + status, + nr_tasks, + nr_cores_per_task, + miscParameters) + + elif processing == 'Pulsar': + # tar_obs_topology_MultiObs = tar_obs_topology + '.' + str(beamNr) + tar_pipe_predecessor = tar_obs_topology + targetPulsar = settings['targetPulsar'] + writeXMLPulsarPipe(ofile, + tar_pipe_topologies[beamNr], + tar_obs_topology, + tar_pipe_name, + tar_pipe_description, + tar_pipe_default_template, + targetBeams[beamNr][8], + tar_obs_bf_data_topologies[beamNr], + tar_pipe_topologies[beamNr] + ".pu.dps", + settings['cluster'], + status, + nr_tasks, + nr_cores_per_task, + pulsar=targetPulsar[beamNr][0][0], + singlePulse=targetPulsar[beamNr][0][1], + rawTo8bit=targetPulsar[beamNr][0][2], + dspsrExtraOpts=targetPulsar[beamNr][0][3], + prepdataExtraOpts=targetPulsar[beamNr][0][4], + _8bitConversionSigma=targetPulsar[beamNr][0][5], + tsubint=targetPulsar[beamNr][0][6], + norfi=targetPulsar[beamNr][0][7], + nofold=targetPulsar[beamNr][0][8], + nopdmp=targetPulsar[beamNr][0][9], + skipDspsr=targetPulsar[beamNr][0][10], + rrats=targetPulsar[beamNr][0][11], + _2bf2fitsExtraOpts=targetPulsar[beamNr][0][12], + decodeSigma=targetPulsar[beamNr][0][13], + decodeNblocks=targetPulsar[beamNr][0][14], + rfifindExtraOpts=targetPulsar[beamNr][0][15], + prepfoldExtraOpts=targetPulsar[beamNr][0][16], + prepsubbandExtraOpts=targetPulsar[beamNr][0][17], + dynamicSpectrumTimeAverage=targetPulsar[beamNr][0][18], + skipDynamicSpectrum=targetPulsar[beamNr][0][19], + skipPrepfold=targetPulsar[beamNr][0][20], + digifilExtraOpts=targetPulsar[beamNr][0][21], + miscParameters=miscParameters) + + # for long baseline processsing an additional (special purpose adapted) preprocessing pipeline is necessary + if processing == 'LongBaseline': + LB_preproc_pipe_template = 'Preprocessing LB' + LB_preproc_pipe_description = 'Phaseshift + adding CS stations' + + LB_pipeline_default_template = "Long-Baseline Pipeline" + LB_pipeline_description = "Long-Baseline Concat" + + for beamNr in range(0, nr_beams): + targetBeams = settings['targetBeams'] + if targetBeams[beamNr][7]: # create pipelines for this beam + if settings['writePackageTag']: + LB_preproc_pipe_name = settings['packageTag'] + "/" + targetBeams[beamNr][2] + "/" + str(repeatNr) + "." + str( + beamNr) + "/PP" + LB_pipeline_name = settings['packageTag'] + "/" + targetBeams[beamNr][2] + "/" + str(repeatNr) + "." + str( + beamNr) + "/LBP" + else: + LB_preproc_pipe_name = targetBeams[beamNr][2] + "/" + str(repeatNr) + "." + str(beamNr) + "/PP" + LB_pipeline_name = targetBeams[beamNr][2] + "/" + str(repeatNr) + "." + str(beamNr) + "/LBP" + + writeXMLAvgPipeline(ofile, + LB_preproc_pipe_topologies[beamNr], + LB_preproc_pipe_predecessor[beamNr], + LB_preproc_pipe_name, + LB_preproc_pipe_description, + LB_preproc_pipe_template, + settings['flaggingStrategy'], + targetBeams[beamNr][8], + settings['targetDemix'][beamNr][0], + tar_pipe_topologies[beamNr] + ".uv.dps", + LB_preproc_pipe_output_MS_topologies[beamNr], + settings['cluster'], + status, + nr_tasks, + nr_cores_per_task, + miscParameters) + + # nv 13okt2014: #6716 - Implement Long Baseline Pipeline + writeXMLLongBaselinePipe(ofile, + LB_pipeline_topologies[beamNr], + LB_pipeline_predecessor[beamNr], + LB_pipeline_name, + LB_pipeline_description, + LB_pipeline_default_template, + targetBeams[beamNr][8], + settings['subbandsPerSubbandGroup'], + settings['subbandGroupsPerMS'], + LB_pipeline_input_uv_topologies[beamNr], + LB_pipeline_output_uv_topologies[beamNr], + settings['cluster'], + status, + nr_tasks, + nr_cores_per_task) + + return imaging_pipe_inputs, imaging_pipe_predecessors, settings['startTimeObs'] + def writeBlock(ofile, settings, projectName, blockNr, status): - defaults = { - "subbandsPerFileCS": '', - "numberCollapsedChannelsCS": '', - "stokesDownsamplingStepsCS": '', - "whichCS": '', - "subbandsPerFileIS": '', - "numberCollapsedChannelsIS": '', - "stokesDownsamplingStepsIS": '', - "whichIS": '', - "tbbPiggybackAllowed":True, - "aartfaacPiggybackAllowed":True, - "imagingBBS": '', - "cluster":'CEP4'} - defaults.update(settings) #FIXME somewhat dirty hack, to be solved better later. - settings = defaults - if 'nr_tasks' in settings: # We can set a (different) number per BLOCK - nr_tasks = settings['nr_tasks'] - nr_cores_per_task = settings['nr_cores_per_task'] - else: - nr_tasks = 10 * DEFAULT_TASKS_PER_NODE - nr_cores_per_task = DEFAULT_CORES_PER_TASK - - #There's a lot of stuff in settings that's only relevant to the imaging pipelines - #otherSettings = { key: settings[key] for key not in imagingPipelineKeys } - - writeFolderStart(ofile, blockNr-1, settings["packageName"], settings["packageDescription"], settings["processing"]) - - if settings["set_starttime"]: - settings["startTimeObs"] = settings["startTime"] - else: - settings["startTimeObs"] = 0 - - imaging_pipe_inputs = [[] for i in range(settings["nr_beams"])] - imaging_pipe_predecessors = [[] for i in range(settings["nr_beams"])] - - blockTopo = "B%i." % (blockNr-1,) - for repeatNr in range (1, settings["nrRepeats"]+1): - imaging_pipe_inputs, imaging_pipe_predecessors, settings["startTimeObs"] = writeRepeat(ofile, - projectName, blockTopo, repeatNr, settings, imaging_pipe_inputs, imaging_pipe_predecessors, status, nr_tasks, nr_cores_per_task) - - if settings["do_imaging"]: - imagingPipelineKeys = ["imaging_pipe_type", "imaging_pipe_default_template", "imaging_pipe_duration", - "nrSubbandsPerImage", "maxBaseline", "fieldOfView", "weightingScheme", - "robustParameter", "nrOfIterations", "cleaningThreshold", - "uvMin", "uvMax", "stokesToImage"] - for key in imagingPipelineKeys: #Can this be done with list comprehension as well? - if key not in settings.keys(): - settings[key] = '' - ##imagingPipelineSettings = { key: settings[key] for key in imagingPipelineKeys } - imagingPipelineSettings = {} - for key in imagingPipelineKeys: - imagingPipelineSettings[key] = settings[key] - writeImagingPipeline(ofile, settings["nr_beams"], settings["targetBeams"], blockTopo, - settings["nrRepeats"], imaging_pipe_inputs, imaging_pipe_predecessors, - settings["writePackageTag"], settings["packageTag"], settings["nrImages"], - imagingPipelineSettings, settings["imagingBBS"], settings["cluster"], status, nr_tasks, nr_cores_per_task) - - writeFolderEnd(ofile) - + defaults = { + "subbandsPerFileCS": '', + "numberCollapsedChannelsCS": '', + "stokesDownsamplingStepsCS": '', + "whichCS": '', + "subbandsPerFileIS": '', + "numberCollapsedChannelsIS": '', + "stokesDownsamplingStepsIS": '', + "whichIS": '', + "tbbPiggybackAllowed": True, + "aartfaacPiggybackAllowed": True, + "imagingBBS": '', + "cluster": 'CEP4'} + defaults.update(settings) # FIXME somewhat dirty hack, to be solved better later. + settings = defaults + if 'nr_tasks' in settings: # We can set a (different) number per BLOCK + nr_tasks = settings['nr_tasks'] + nr_cores_per_task = settings['nr_cores_per_task'] + else: + nr_tasks = 10 * DEFAULT_TASKS_PER_NODE + nr_cores_per_task = DEFAULT_CORES_PER_TASK + + # There's a lot of stuff in settings that's only relevant to the imaging pipelines + # otherSettings = { key: settings[key] for key not in imagingPipelineKeys } + + writeFolderStart(ofile, blockNr - 1, settings["packageName"], settings["packageDescription"], + settings["processing"]) + + if settings["set_starttime"]: + settings["startTimeObs"] = settings["startTime"] + else: + settings["startTimeObs"] = 0 + + imaging_pipe_inputs = [[] for i in range(settings["nr_beams"])] + imaging_pipe_predecessors = [[] for i in range(settings["nr_beams"])] + + miscParametersKeys = ["storagemanager"] + miscParameters = {key: value for (key, value) in settings.iteritems() if key in miscParametersKeys} + + blockTopo = "B%i." % (blockNr - 1,) + for repeatNr in range(1, settings["nrRepeats"] + 1): + imaging_pipe_inputs, imaging_pipe_predecessors, settings["startTimeObs"] = writeRepeat(ofile, + projectName, blockTopo, + repeatNr, settings, + imaging_pipe_inputs, + imaging_pipe_predecessors, + status, nr_tasks, + nr_cores_per_task, + miscParameters) + + if settings["do_imaging"]: + imagingPipelineKeys = ["imaging_pipe_type", "imaging_pipe_default_template", "imaging_pipe_duration", + "nrSubbandsPerImage", "maxBaseline", "fieldOfView", "weightingScheme", + "robustParameter", "nrOfIterations", "cleaningThreshold", + "uvMin", "uvMax", "stokesToImage"] + for key in imagingPipelineKeys: # Can this be done with list comprehension as well? + if key not in settings.keys(): + settings[key] = '' + ##imagingPipelineSettings = { key: settings[key] for key in imagingPipelineKeys } + imagingPipelineSettings = {} + for key in imagingPipelineKeys: + imagingPipelineSettings[key] = settings[key] + writeImagingPipeline(ofile, + settings["nr_beams"], + settings["targetBeams"], + blockTopo, + settings["nrRepeats"], + imaging_pipe_inputs, + imaging_pipe_predecessors, + settings["writePackageTag"], + settings["packageTag"], + settings["nrImages"], + imagingPipelineSettings, + settings["imagingBBS"], + settings["cluster"], + status, nr_tasks, + nr_cores_per_task, + miscParameters) + writeFolderEnd(ofile) + + def main(argv): - try: - inputfile, outputfile, status = parseOptions(argv) - ofile = open(outputfile, 'w') - - header, blocks = processInput(inputfile) - - projectName, mainFolderName, mainFolderDescription = processHeader(header) - writeProjectStart(ofile, VERSION, projectName) - if mainFolderName: - writeMainFolderStart(ofile, mainFolderName, mainFolderDescription) - for index, block in enumerate(blocks): - printMessage("\nProcessing BLOCK %i" % (index+1)) - settings = readBlock(block, projectName, index+1) - settings = checkSettings(settings, index+1) - writeBlock(ofile, settings, projectName, index+1, status) - if mainFolderName: - writeMainFolderEnd(ofile) - writeProjectEnd(ofile) - #TODO make things not write to the ofile directly - # for b in block: - # output += generateBlock() - # ofile.write(output) - ofile.close() - except: - import traceback - traceback.print_exc(file=sys.stdout) - print "something went wrong here, now aborting" - exit(1) - - + try: + inputfile, outputfile, status = parseOptions(argv) + ofile = open(outputfile, 'w') + + header, blocks = processInput(inputfile) + + projectName, mainFolderName, mainFolderDescription = processHeader(header) + writeProjectStart(ofile, VERSION, projectName) + if mainFolderName: + writeMainFolderStart(ofile, mainFolderName, mainFolderDescription) + for index, block in enumerate(blocks): + printMessage("\nProcessing BLOCK %i" % (index + 1)) + settings = readBlock(block, projectName, index + 1) + settings = checkSettings(settings, index + 1) + writeBlock(ofile, settings, projectName, index + 1, status) + if mainFolderName: + writeMainFolderEnd(ofile) + writeProjectEnd(ofile) + # TODO make things not write to the ofile directly + # for b in block: + # output += generateBlock() + # ofile.write(output) + ofile.close() + except: + import traceback + traceback.print_exc(file=sys.stdout) + print "something went wrong here, now aborting" + exit(1) + + if __name__ == "__main__": - main(sys.argv[1:]) - + main(sys.argv[1:]) diff --git a/SAS/XML_generator/test/test_regression.in_data/txt/test_dysco.txt b/SAS/XML_generator/test/test_regression.in_data/txt/test_dysco.txt new file mode 100644 index 0000000000000000000000000000000000000000..9600e27f0f736c437f9c52ec42e3433dc1c399ae --- /dev/null +++ b/SAS/XML_generator/test/test_regression.in_data/txt/test_dysco.txt @@ -0,0 +1,184 @@ +## PARAMETER FILE SETUP +projectName=LC4_022 +mainFolderName=20151018_3C58 +mainFolderDescription=HBA_DUAL_INNER, 110-190 MHz, av: 16/2, imaging, 20SB/chk + +BLOCK + +storagemanager= # turn dysco off in first BLOCK + +split_targets = F # T/F +calibration = external # internal / external / none +create_calibrator_observations = T # create cal-observations before target (T/F ignored if calibration is none) +create_target_cal_beam = F # create cal-beam in target observation (T/F ignored if calibration is none) +processing=Imaging # can be one of Calibration, Preprocessing, Imaging, Pulsar, LongBaseline, none + +imagingPipeline=standard # can be one of MSSS, standard, none +nrSubbandsPerImage=20 +fieldOfView_deg=5.0 + +repeat=1 # the number of repeats of this block + +packageName=3C196/3C58 # name of package folder containing observation/pipelines +packageDescription=3C196/3C58 Scan +packageTag= # optional tag to be prepended before every obs/pipeline name/description (max 8 chars) + +antennaMode=HBA Dual Inner +clock=200 MHz +instrumentFilter=110-190 MHz +numberOfBitsPerSample=8 # Change to 16 if requested +integrationTime=2.0 +channelsPerSubband=64 +stationList=nl +tbbPiggybackAllowed=T +aartfaacPiggybackAllowed=T + +###### Which data types should be produced: ###### + +correlatedData=T +coherentStokesData=F +incoherentStokesData=F +flysEye=F +coherentDedisperseChannels=False + +###### Coherent Stokes parameters ###### +#subbandsPerFileCS= +#numberCollapsedChannelsCS= +#stokesDownsamplingStepsCS= +#whichCS= +###### Coherent Stokes parameters ###### +#subbandsPerFileIS= +#numberCollapsedChannelsIS= +#stokesDownsamplingStepsIS= +#whichIS= + +flaggingStrategy=HBAdefault +calibratorDuration_s=600 # duration of calibration observations in seconds +targetDuration_s=20400 # duration of target observations in seconds + +###### Pipeline settings ###### +## GENERAL INFO +## Processing mode: Imaging + + +Global_Demix=16;2;16;2;;; +Global_Subbands=100..339;240 + +# startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss +# un-comment the startTimeUTC to have the observation start times generated +startTimeUTC=2015-10-18 21:00:00 +# timeStep's in seconds +timeStep1=60 +timeStep2=60 + +calibratorBeam= +08:13:36.07;+48:13:02.6;3C196;;;;;T;1000 +BBS=3C196;;;T +#Demix=8;2;64;10;[CasA,CygA];; +#TAB: +#c;05:34:51.94;+22:00:52.2 + +# target beams and target pipelines +# ra ;dec; targetname; subbandList; nrSubbands; nrTABrings; TABringSize; create_pipeline [;pipeline duration seconds] +# optionally followed by BBS and/or demixing settings +# BBS: SkyModel;BBS_baselines;BBS_correlations;BBS_beamModelEnable;BBS_solveParms;BBS_solveUVRange;BBS_strategyBaselines;BBS_strategyTimeRange +# Demix: avg freq step; avg time step; demix freq step; demix time step; demix_always; demix_if_needed; ignore_target +# Pulsar: pulsar; single-pulse; raw-to-8bit; dspsr-extra-opts; prepdata-extra-opts; 8bit-conversion-sigma; tsubint; norfi; nofold; nopdmp; skip-dspsr; rrats; 2bf2fits-extra-opts; decode-sigma; decode-nblocks; rfifind-extra-opts; prepfold-extra-opts; prepsubband-extra-opts; dynamic-spectrum-time-average; skip-dynamic-spectrum; skip-prepfold + +targetBeams= +02:05:38.00;+64:49:42.0;3C58;;;;;T;50000 +#Pulsar=B0531+21;;T;;;;;;;;;;;;;;;;;; +#0.0417300951946;0.00558069028325;Sun;54..297;244;;;T;8100 +#Demix=8;2;64;10;[CasA,CygA];; + +#subbandsPerSubbandGroup = 16 # the number of subbands that will be concatenated in a subband-group +#subbandGroupsPerMS = 1 # the number of subband-groups that will be (virually) concatenated in each measurement set + +BLOCK1 + +split_targets = F # T/F +calibration = none # internal / external / none +create_calibrator_observations = F # create cal-observations before target (T/F ignored if calibration is none) +create_target_cal_beam = F # create cal-beam in target observation (T/F ignored if calibration is none) +processing=Preprocessing # can be one of Calibration, Preprocessing, Imaging, Pulsar, LongBaseline, none + +#imagingPipeline=standard # can be one of MSSS, standard, none +#nrSubbandsPerImage=20 +#fieldOfView_deg=5.0 + +repeat=1 # the number of repeats of this block + +packageName=3C196 # name of package folder containing observation/pipelines +packageDescription=3C196 Bookend Scan +packageTag= # optional tag to be prepended before every obs/pipeline name/description (max 8 chars) + +antennaMode=HBA Dual Inner +clock=200 MHz +instrumentFilter=110-190 MHz +numberOfBitsPerSample=8 # Change to 16 if requested +integrationTime=2.0 +channelsPerSubband=64 +stationList=nl +tbbPiggybackAllowed=T +aartfaacPiggybackAllowed=T + +###### Which data types should be produced: ###### + +correlatedData=T +coherentStokesData=F +incoherentStokesData=F +flysEye=F +coherentDedisperseChannels=False + +###### Coherent Stokes parameters ###### +#subbandsPerFileCS= +#numberCollapsedChannelsCS= +#stokesDownsamplingStepsCS= +#whichCS= +###### Coherent Stokes parameters ###### +#subbandsPerFileIS= +#numberCollapsedChannelsIS= +#stokesDownsamplingStepsIS= +#whichIS= + +flaggingStrategy=HBAdefault +#calibratorDuration_s=3600 # duration of calibration observations in seconds +targetDuration_s=600 # duration of target observations in seconds + +###### Pipeline settings ###### +## GENERAL INFO +## Processing mode: Imaging + + +Global_Demix=16;2;16;2;;; +Global_Subbands=100..339;240 + +# startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss +# un-comment the startTimeUTC to have the observation start times generated +startTimeUTC=2015-10-19 02:52:00 +# timeStep's in seconds +timeStep1=60 +timeStep2=60 + +#calibratorBeam= +#08:13:36.07;+48:13:02.6;3C196;;;;;T;50000 +#BBS=3C196;;;T +#Demix=8;2;64;10;[CasA,CygA];; +#TAB: +#c;05:34:51.94;+22:00:52.2 + +# target beams and target pipelines +# ra ;dec; targetname; subbandList; nrSubbands; nrTABrings; TABringSize; create_pipeline [;pipeline duration seconds] +# optionally followed by BBS and/or demixing settings +# BBS: SkyModel;BBS_baselines;BBS_correlations;BBS_beamModelEnable;BBS_solveParms;BBS_solveUVRange;BBS_strategyBaselines;BBS_strategyTimeRange +# Demix: avg freq step; avg time step; demix freq step; demix time step; demix_always; demix_if_needed; ignore_target +# Pulsar: pulsar; single-pulse; raw-to-8bit; dspsr-extra-opts; prepdata-extra-opts; 8bit-conversion-sigma; tsubint; norfi; nofold; nopdmp; skip-dspsr; rrats; 2bf2fits-extra-opts; decode-sigma; decode-nblocks; rfifind-extra-opts; prepfold-extra-opts; prepsubband-extra-opts; dynamic-spectrum-time-average; skip-dynamic-spectrum; skip-prepfold + +targetBeams= +08:13:36.07;+48:13:02.6;3C196;;;;;T;1000 +#Pulsar=B0531+21;;T;;;;;;;;;;;;;;;;;; +#0.0417300951946;0.00558069028325;Sun;54..297;244;;;T;8100 +#Demix=8;2;64;10;[CasA,CygA];; + +#subbandsPerSubbandGroup = 16 # the number of subbands that will be concatenated in a subband-group +#subbandGroupsPerMS = 1 # the number of subband-groups that will be (virually) concatenated in each measurement set \ No newline at end of file diff --git a/SAS/XML_generator/test/test_regression.in_data/xml/20150713_4C17.31.xml b/SAS/XML_generator/test/test_regression.in_data/xml/20150713_4C17.31.xml index fb10b81a7aacae26681cd305d5d5f60c00532f1f..ea7f02e7bbd1ec71f9275888774b62c0b1faf6c9 100644 --- a/SAS/XML_generator/test/test_regression.in_data/xml/20150713_4C17.31.xml +++ b/SAS/XML_generator/test/test_regression.in_data/xml/20150713_4C17.31.xml @@ -1,8 +1,8 @@ <?xml version="1.0" encoding="UTF-8"?> <lofar:project xmlns:lofar="http://www.astron.nl/MoM2-Lofar" xmlns:mom2="http://www.astron.nl/MoM2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.astron.nl/MoM2-Lofar http://lofar.astron.nl:8080/mom3/schemas/LofarMoM2.xsd http://www.astron.nl/MoM2 http://lofar.astron.nl:8080/mom3/schemas/MoM2.xsd "> - <version>2.17.0</version> - <template version="2.17.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> - <description>XML Template generator version 2.17.0</description> + <version>2.23.0</version> + <template version="2.23.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> + <description>XML Template generator version 2.23.0</description> </template> <name>LC4_035</name> <children> @@ -238,6 +238,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -292,6 +293,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -346,6 +348,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> diff --git a/SAS/XML_generator/test/test_regression.in_data/xml/20150731_G46_run1_HBA.xml b/SAS/XML_generator/test/test_regression.in_data/xml/20150731_G46_run1_HBA.xml index 6d3b41d26a09b1f2aa41d2369293d84dd330cd8e..08b6b8cdf3ee018588670738a30b9ae97d28d852 100644 --- a/SAS/XML_generator/test/test_regression.in_data/xml/20150731_G46_run1_HBA.xml +++ b/SAS/XML_generator/test/test_regression.in_data/xml/20150731_G46_run1_HBA.xml @@ -1,8 +1,8 @@ <?xml version="1.0" encoding="UTF-8"?> <lofar:project xmlns:lofar="http://www.astron.nl/MoM2-Lofar" xmlns:mom2="http://www.astron.nl/MoM2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.astron.nl/MoM2-Lofar http://lofar.astron.nl:8080/mom3/schemas/LofarMoM2.xsd http://www.astron.nl/MoM2 http://lofar.astron.nl:8080/mom3/schemas/MoM2.xsd "> - <version>2.17.0</version> - <template version="2.17.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> - <description>XML Template generator version 2.17.0</description> + <version>2.23.0</version> + <template version="2.23.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> + <description>XML Template generator version 2.23.0</description> </template> <name>LC4_010</name> <children> @@ -194,6 +194,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -248,6 +249,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -500,6 +502,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -554,6 +557,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -608,6 +612,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -816,6 +821,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -870,6 +876,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> diff --git a/SAS/XML_generator/test/test_regression.in_data/xml/20150810_P247P244.xml b/SAS/XML_generator/test/test_regression.in_data/xml/20150810_P247P244.xml index 03c03f0b40ad7c4ee585384415e5c45494b889b6..eb40756020d4ec5a74acd1ba2f5a3586e087a6b5 100644 --- a/SAS/XML_generator/test/test_regression.in_data/xml/20150810_P247P244.xml +++ b/SAS/XML_generator/test/test_regression.in_data/xml/20150810_P247P244.xml @@ -1,8 +1,8 @@ <?xml version="1.0" encoding="UTF-8"?> <lofar:project xmlns:lofar="http://www.astron.nl/MoM2-Lofar" xmlns:mom2="http://www.astron.nl/MoM2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.astron.nl/MoM2-Lofar http://lofar.astron.nl:8080/mom3/schemas/LofarMoM2.xsd http://www.astron.nl/MoM2 http://lofar.astron.nl:8080/mom3/schemas/MoM2.xsd "> - <version>2.17.0</version> - <template version="2.17.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> - <description>XML Template generator version 2.17.0</description> + <version>2.23.0</version> + <template version="2.23.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> + <description>XML Template generator version 2.23.0</description> </template> <name>LC4_034</name> <children> @@ -150,6 +150,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget>false</ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -358,6 +359,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget>false</ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -412,6 +414,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget>false</ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -576,6 +579,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget>false</ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> diff --git a/SAS/XML_generator/test/test_regression.in_data/xml/2MASS_1.xml b/SAS/XML_generator/test/test_regression.in_data/xml/2MASS_1.xml index 4cedd99e1d8cdef9e16542361987d863d4e3c0b1..f261de3b7f6e348d7e28aee48b07d839e750a203 100644 --- a/SAS/XML_generator/test/test_regression.in_data/xml/2MASS_1.xml +++ b/SAS/XML_generator/test/test_regression.in_data/xml/2MASS_1.xml @@ -1,8 +1,8 @@ <?xml version="1.0" encoding="UTF-8"?> <lofar:project xmlns:lofar="http://www.astron.nl/MoM2-Lofar" xmlns:mom2="http://www.astron.nl/MoM2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.astron.nl/MoM2-Lofar http://lofar.astron.nl:8080/mom3/schemas/LofarMoM2.xsd http://www.astron.nl/MoM2 http://lofar.astron.nl:8080/mom3/schemas/MoM2.xsd "> - <version>2.17.0</version> - <template version="2.17.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> - <description>XML Template generator version 2.17.0</description> + <version>2.23.0</version> + <template version="2.23.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> + <description>XML Template generator version 2.23.0</description> </template> <name>LC4_031</name> <children> @@ -150,6 +150,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -305,6 +306,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -469,6 +471,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> diff --git a/SAS/XML_generator/test/test_regression.in_data/xml/B1834620_HBA_LB_20150817.xml b/SAS/XML_generator/test/test_regression.in_data/xml/B1834620_HBA_LB_20150817.xml index ed08ef0f6d97eae9086c3a7732ef7cd88e4f7b8f..96437c007189a9bc0a93ad70c5f34dda0662afb3 100644 --- a/SAS/XML_generator/test/test_regression.in_data/xml/B1834620_HBA_LB_20150817.xml +++ b/SAS/XML_generator/test/test_regression.in_data/xml/B1834620_HBA_LB_20150817.xml @@ -1,8 +1,8 @@ <?xml version="1.0" encoding="UTF-8"?> <lofar:project xmlns:lofar="http://www.astron.nl/MoM2-Lofar" xmlns:mom2="http://www.astron.nl/MoM2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.astron.nl/MoM2-Lofar http://lofar.astron.nl:8080/mom3/schemas/LofarMoM2.xsd http://www.astron.nl/MoM2 http://lofar.astron.nl:8080/mom3/schemas/MoM2.xsd "> - <version>2.17.0</version> - <template version="2.17.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> - <description>XML Template generator version 2.17.0</description> + <version>2.23.0</version> + <template version="2.23.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> + <description>XML Template generator version 2.23.0</description> </template> <name>Commissioning2015</name> <children> @@ -160,6 +160,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -379,6 +380,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -448,6 +450,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -508,6 +511,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -608,6 +612,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -819,6 +824,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -1038,6 +1044,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -1107,6 +1114,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -1167,6 +1175,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -1267,6 +1276,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> diff --git a/SAS/XML_generator/test/test_regression.in_data/xml/LC4_020_20150813.xml b/SAS/XML_generator/test/test_regression.in_data/xml/LC4_020_20150813.xml index ab0125cf361245b9348a2f65f9a2c7f3613c5d9c..3c70a02fba888b1f053f01ac10e362b188c018af 100644 --- a/SAS/XML_generator/test/test_regression.in_data/xml/LC4_020_20150813.xml +++ b/SAS/XML_generator/test/test_regression.in_data/xml/LC4_020_20150813.xml @@ -1,8 +1,8 @@ <?xml version="1.0" encoding="UTF-8"?> <lofar:project xmlns:lofar="http://www.astron.nl/MoM2-Lofar" xmlns:mom2="http://www.astron.nl/MoM2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.astron.nl/MoM2-Lofar http://lofar.astron.nl:8080/mom3/schemas/LofarMoM2.xsd http://www.astron.nl/MoM2 http://lofar.astron.nl:8080/mom3/schemas/MoM2.xsd "> - <version>2.17.0</version> - <template version="2.17.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> - <description>XML Template generator version 2.17.0</description> + <version>2.23.0</version> + <template version="2.23.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> + <description>XML Template generator version 2.23.0</description> </template> <name>LC4_020</name> <children> @@ -194,6 +194,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -248,6 +249,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> diff --git a/SAS/XML_generator/test/test_regression.in_data/xml/LC4_022_3C58_HBA_parameters.xml b/SAS/XML_generator/test/test_regression.in_data/xml/LC4_022_3C58_HBA_parameters.xml index f54d2db6c2683cd6b42bfc3ef9c4f2f468344f49..f9ba83ea836cdc75bbdcb53ec0aca751150d6ed5 100644 --- a/SAS/XML_generator/test/test_regression.in_data/xml/LC4_022_3C58_HBA_parameters.xml +++ b/SAS/XML_generator/test/test_regression.in_data/xml/LC4_022_3C58_HBA_parameters.xml @@ -1,8 +1,8 @@ <?xml version="1.0" encoding="UTF-8"?> <lofar:project xmlns:lofar="http://www.astron.nl/MoM2-Lofar" xmlns:mom2="http://www.astron.nl/MoM2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.astron.nl/MoM2-Lofar http://lofar.astron.nl:8080/mom3/schemas/LofarMoM2.xsd http://www.astron.nl/MoM2 http://lofar.astron.nl:8080/mom3/schemas/MoM2.xsd "> - <version>2.17.0</version> - <template version="2.17.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> - <description>XML Template generator version 2.17.0</description> + <version>2.23.0</version> + <template version="2.23.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> + <description>XML Template generator version 2.23.0</description> </template> <name>LC4_022</name> <children> @@ -160,6 +160,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -335,6 +336,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -565,6 +567,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> diff --git a/SAS/XML_generator/test/test_regression.in_data/xml/LT5_006_4C43_HBA_calSearch_parameters.xml b/SAS/XML_generator/test/test_regression.in_data/xml/LT5_006_4C43_HBA_calSearch_parameters.xml index feae5d0c82876d762be89b32e32c5fc84c56f4f0..dc2724133d9c41519632758e210658499075df4f 100644 --- a/SAS/XML_generator/test/test_regression.in_data/xml/LT5_006_4C43_HBA_calSearch_parameters.xml +++ b/SAS/XML_generator/test/test_regression.in_data/xml/LT5_006_4C43_HBA_calSearch_parameters.xml @@ -1,8 +1,8 @@ <?xml version="1.0" encoding="UTF-8"?> <lofar:project xmlns:lofar="http://www.astron.nl/MoM2-Lofar" xmlns:mom2="http://www.astron.nl/MoM2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.astron.nl/MoM2-Lofar http://lofar.astron.nl:8080/mom3/schemas/LofarMoM2.xsd http://www.astron.nl/MoM2 http://lofar.astron.nl:8080/mom3/schemas/MoM2.xsd "> - <version>2.17.0</version> - <template version="2.17.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> - <description>XML Template generator version 2.17.0</description> + <version>2.23.0</version> + <template version="2.23.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> + <description>XML Template generator version 2.23.0</description> </template> <name>LT5_006</name> <children> @@ -160,6 +160,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -2975,6 +2976,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -3044,6 +3046,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -3113,6 +3116,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -3182,6 +3186,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -3251,6 +3256,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -3320,6 +3326,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -3389,6 +3396,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -3458,6 +3466,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -3527,6 +3536,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -3596,6 +3606,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -3665,6 +3676,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -3734,6 +3746,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -3803,6 +3816,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -3872,6 +3886,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -3941,6 +3956,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -4010,6 +4026,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -4079,6 +4096,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -4148,6 +4166,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -4217,6 +4236,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -4286,6 +4306,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -4355,6 +4376,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -4424,6 +4446,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -4493,6 +4516,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -4562,6 +4586,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -4631,6 +4656,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -4700,6 +4726,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -4769,6 +4796,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -4838,6 +4866,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -4907,6 +4936,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -4976,6 +5006,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -5045,6 +5076,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -5114,6 +5146,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -5183,6 +5216,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -5252,6 +5286,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -5321,6 +5356,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -5390,6 +5426,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -5459,6 +5496,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -5528,6 +5566,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -5597,6 +5636,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -5666,6 +5706,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -5735,6 +5776,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -5804,6 +5846,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -5873,6 +5916,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -5942,6 +5986,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -6011,6 +6056,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -6080,6 +6126,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -6149,6 +6196,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -6218,6 +6266,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -6287,6 +6336,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -6356,6 +6406,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -6425,6 +6476,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -6494,6 +6546,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -6563,6 +6616,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -6632,6 +6686,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -6701,6 +6756,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -6770,6 +6826,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -6839,6 +6896,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -6908,6 +6966,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -6977,6 +7036,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -7046,6 +7106,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -7106,6 +7167,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -7206,6 +7268,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -7306,6 +7369,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -7406,6 +7470,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -7506,6 +7571,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -7606,6 +7672,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -7706,6 +7773,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -7806,6 +7874,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -7906,6 +7975,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -8006,6 +8076,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -8106,6 +8177,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -8206,6 +8278,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -8306,6 +8379,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -8406,6 +8480,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -8506,6 +8581,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -8606,6 +8682,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -8706,6 +8783,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -8806,6 +8884,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -8906,6 +8985,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -9006,6 +9086,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -9106,6 +9187,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -9206,6 +9288,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -9306,6 +9389,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -9406,6 +9490,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -9506,6 +9591,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -9606,6 +9692,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -9706,6 +9793,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -9806,6 +9894,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -9906,6 +9995,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -10006,6 +10096,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -10106,6 +10197,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -10206,6 +10298,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -10306,6 +10399,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -10406,6 +10500,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -10506,6 +10601,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -10606,6 +10702,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -10706,6 +10803,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -10806,6 +10904,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -10906,6 +11005,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -11006,6 +11106,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -11106,6 +11207,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -11206,6 +11308,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -11306,6 +11409,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -11406,6 +11510,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -11506,6 +11611,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -11606,6 +11712,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -11706,6 +11813,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -11806,6 +11914,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -11906,6 +12015,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -12006,6 +12116,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -12106,6 +12217,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -12206,6 +12318,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -12306,6 +12419,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -12406,6 +12520,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -12506,6 +12621,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -12606,6 +12722,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -12706,6 +12823,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -12806,6 +12924,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -12906,6 +13025,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -13006,6 +13126,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> diff --git a/SAS/XML_generator/test/test_regression.in_data/xml/MSSS_20151207_testmultipipe.xml b/SAS/XML_generator/test/test_regression.in_data/xml/MSSS_20151207_testmultipipe.xml index aa93772eac3f9ed01234788a7be9b6dd4a38bda0..aed9df4dbeb04f5a820efedcb8baec0394bdf785 100644 --- a/SAS/XML_generator/test/test_regression.in_data/xml/MSSS_20151207_testmultipipe.xml +++ b/SAS/XML_generator/test/test_regression.in_data/xml/MSSS_20151207_testmultipipe.xml @@ -1,8 +1,8 @@ <?xml version="1.0" encoding="UTF-8"?> <lofar:project xmlns:lofar="http://www.astron.nl/MoM2-Lofar" xmlns:mom2="http://www.astron.nl/MoM2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.astron.nl/MoM2-Lofar http://lofar.astron.nl:8080/mom3/schemas/LofarMoM2.xsd http://www.astron.nl/MoM2 http://lofar.astron.nl:8080/mom3/schemas/MoM2.xsd "> - <version>2.17.0</version> - <template version="2.17.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> - <description>XML Template generator version 2.17.0</description> + <version>2.23.0</version> + <template version="2.23.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> + <description>XML Template generator version 2.23.0</description> </template> <name>LOFAROBS</name> <children> @@ -458,6 +458,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -512,6 +513,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -566,6 +568,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -620,6 +623,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -674,6 +678,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -728,6 +733,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -782,6 +788,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -836,6 +843,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -890,6 +898,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -944,6 +953,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -998,6 +1008,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -1052,6 +1063,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -1106,6 +1118,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -1160,6 +1173,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -1214,6 +1228,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -1268,6 +1283,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> diff --git a/SAS/XML_generator/test/test_regression.in_data/xml/Ticket_6923.xml b/SAS/XML_generator/test/test_regression.in_data/xml/Ticket_6923.xml index b9dbab3bb3a8e9300bbd618111260eb165ae0e00..50250f2f61318fecd1e2608a5b06fd9c75f692dc 100644 --- a/SAS/XML_generator/test/test_regression.in_data/xml/Ticket_6923.xml +++ b/SAS/XML_generator/test/test_regression.in_data/xml/Ticket_6923.xml @@ -1,8 +1,8 @@ <?xml version="1.0" encoding="UTF-8"?> <lofar:project xmlns:lofar="http://www.astron.nl/MoM2-Lofar" xmlns:mom2="http://www.astron.nl/MoM2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.astron.nl/MoM2-Lofar http://lofar.astron.nl:8080/mom3/schemas/LofarMoM2.xsd http://www.astron.nl/MoM2 http://lofar.astron.nl:8080/mom3/schemas/MoM2.xsd "> - <version>2.17.0</version> - <template version="2.17.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> - <description>XML Template generator version 2.17.0</description> + <version>2.23.0</version> + <template version="2.23.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> + <description>XML Template generator version 2.23.0</description> </template> <name>LC2_040</name> <children> diff --git a/SAS/XML_generator/test/test_regression.in_data/xml/hd156279b_reference.xml b/SAS/XML_generator/test/test_regression.in_data/xml/hd156279b_reference.xml index 5a8ddd8875a4e03eb1fcc75b043a6bc5db36a4d4..24c634cd5483ed757c45b520309d9440bc61a34f 100644 --- a/SAS/XML_generator/test/test_regression.in_data/xml/hd156279b_reference.xml +++ b/SAS/XML_generator/test/test_regression.in_data/xml/hd156279b_reference.xml @@ -1,8 +1,8 @@ <?xml version="1.0" encoding="UTF-8"?> <lofar:project xmlns:lofar="http://www.astron.nl/MoM2-Lofar" xmlns:mom2="http://www.astron.nl/MoM2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.astron.nl/MoM2-Lofar http://lofar.astron.nl:8080/mom3/schemas/LofarMoM2.xsd http://www.astron.nl/MoM2 http://lofar.astron.nl:8080/mom3/schemas/MoM2.xsd "> - <version>2.17.0</version> - <template version="2.17.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> - <description>XML Template generator version 2.17.0</description> + <version>2.23.0</version> + <template version="2.23.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> + <description>XML Template generator version 2.23.0</description> </template> <name>LC4_012</name> <children> @@ -204,6 +204,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -278,6 +279,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> diff --git a/SAS/XML_generator/test/test_regression.in_data/xml/lc4_019_1.xml b/SAS/XML_generator/test/test_regression.in_data/xml/lc4_019_1.xml index 881e4dd8561f716970e07d9ee874c2ff84058a27..f5163c09d2932171d15715203c9166e0398917ee 100644 --- a/SAS/XML_generator/test/test_regression.in_data/xml/lc4_019_1.xml +++ b/SAS/XML_generator/test/test_regression.in_data/xml/lc4_019_1.xml @@ -1,8 +1,8 @@ <?xml version="1.0" encoding="UTF-8"?> <lofar:project xmlns:lofar="http://www.astron.nl/MoM2-Lofar" xmlns:mom2="http://www.astron.nl/MoM2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.astron.nl/MoM2-Lofar http://lofar.astron.nl:8080/mom3/schemas/LofarMoM2.xsd http://www.astron.nl/MoM2 http://lofar.astron.nl:8080/mom3/schemas/MoM2.xsd "> - <version>2.17.0</version> - <template version="2.17.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> - <description>XML Template generator version 2.17.0</description> + <version>2.23.0</version> + <template version="2.23.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> + <description>XML Template generator version 2.23.0</description> </template> <name>LC4_019</name> <children> @@ -162,10 +162,12 @@ <rfifindExtraOpts></rfifindExtraOpts> <rrats></rrats> <singlePulse></singlePulse> - <skipDsps></skipDsps> + <skipDspsr></skipDspsr> <skipDynamicSpectrum></skipDynamicSpectrum> <skipPrepfold></skipPrepfold> <tsubint></tsubint> +<misc>{"storagemanager": "dysco"}</misc> + </pipelineAttributes> <usedDataProducts> <item> diff --git a/SAS/XML_generator/test/test_regression.in_data/xml/test_LB.xml b/SAS/XML_generator/test/test_regression.in_data/xml/test_LB.xml index fcc3c8fa79e389053cbeb6352a450cb4bd1cb039..0fce3773a7232e2acbe75b8986d90083c595e9df 100644 --- a/SAS/XML_generator/test/test_regression.in_data/xml/test_LB.xml +++ b/SAS/XML_generator/test/test_regression.in_data/xml/test_LB.xml @@ -1,8 +1,8 @@ <?xml version="1.0" encoding="UTF-8"?> <lofar:project xmlns:lofar="http://www.astron.nl/MoM2-Lofar" xmlns:mom2="http://www.astron.nl/MoM2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.astron.nl/MoM2-Lofar http://lofar.astron.nl:8080/mom3/schemas/LofarMoM2.xsd http://www.astron.nl/MoM2 http://lofar.astron.nl:8080/mom3/schemas/MoM2.xsd "> - <version>2.17.0</version> - <template version="2.17.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> - <description>XML Template generator version 2.17.0</description> + <version>2.23.0</version> + <template version="2.23.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> + <description>XML Template generator version 2.23.0</description> </template> <name>test-lofar</name> <children> @@ -160,6 +160,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -335,6 +336,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -395,6 +397,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -606,6 +609,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -781,6 +785,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -841,6 +846,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> diff --git a/SAS/XML_generator/test/test_regression.in_data/xml/test_dysco.xml b/SAS/XML_generator/test/test_regression.in_data/xml/test_dysco.xml new file mode 100644 index 0000000000000000000000000000000000000000..96e9907a126bf03a56906fedd9f6bb0fb640abcd --- /dev/null +++ b/SAS/XML_generator/test/test_regression.in_data/xml/test_dysco.xml @@ -0,0 +1,600 @@ +<?xml version="1.0" encoding="UTF-8"?> + <lofar:project xmlns:lofar="http://www.astron.nl/MoM2-Lofar" xmlns:mom2="http://www.astron.nl/MoM2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.astron.nl/MoM2-Lofar http://lofar.astron.nl:8080/mom3/schemas/LofarMoM2.xsd http://www.astron.nl/MoM2 http://lofar.astron.nl:8080/mom3/schemas/MoM2.xsd "> + <version>2.23.0</version> + <template version="2.23.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> + <description>XML Template generator version 2.23.0</description> + </template> + <name>LC4_022</name> + <children> + <item index="0"> + <lofar:folder topology_parent="false"> + <name>20151018_3C58</name> + <description>HBA_DUAL_INNER, 110-190 MHz, av: 16/2, imaging, 20SB/chk</description> + <children> + <item index="0"> + <lofar:folder topology_parent="true"> + <topology>0</topology> + <name>3C196/3C58</name> + <description>3C196/3C58 Scan (Imaging)</description> + <children> + <item index="0"> + <lofar:observation> + <name>3C196/1/CO</name> + <description>3C196/1/CO (Calibration Observation)</description> + <topology>B0.1.C</topology> + <predecessor_topology></predecessor_topology> + <currentStatus> + <mom2:openedStatus/> + </currentStatus> + <lofar:observationAttributes> + <observationId> + </observationId> + <name>3C196/1/CO</name> + <projectName>LC4_022</projectName> + <instrument>Beam Observation</instrument> + <defaultTemplate>BeamObservation</defaultTemplate> + <tbbPiggybackAllowed>true</tbbPiggybackAllowed> + <aartfaacPiggybackAllowed>true</aartfaacPiggybackAllowed> + <userSpecification> + <correlatedData>true</correlatedData> + <coherentStokesData>false</coherentStokesData> + <incoherentStokesData>false</incoherentStokesData> + <antenna>HBA Dual Inner</antenna> + <clock mode="200 MHz"/> + <instrumentFilter>110-190 MHz</instrumentFilter> + <integrationInterval>2.0</integrationInterval> + <channelsPerSubband>64</channelsPerSubband> + <coherentDedisperseChannels>false</coherentDedisperseChannels> + <tiedArrayBeams> + <flyseye>false</flyseye> + </tiedArrayBeams> + <stokes> + <integrateChannels>false</integrateChannels> + <subbandsPerFileCS></subbandsPerFileCS> + <numberCollapsedChannelsCS></numberCollapsedChannelsCS> + <stokesDownsamplingStepsCS></stokesDownsamplingStepsCS> + <whichCS></whichCS> + <subbandsPerFileIS></subbandsPerFileIS> + <numberCollapsedChannelsIS></numberCollapsedChannelsIS> + <stokesDownsamplingStepsIS></stokesDownsamplingStepsIS> + <whichIS></whichIS> + </stokes> + <stationSet>Custom</stationSet> + <stations>CS001,CS002,CS003,CS004,CS005,CS006,CS007,CS011,CS013,CS017,CS021,CS024,CS026,CS028,CS030,CS031,CS032,CS101,CS103,CS201,CS301,CS302,CS401,CS501,RS106,RS205,RS208,RS210,RS305,RS306,RS307,RS310,RS406,RS407,RS409,RS503,RS508,RS509</stations> + <timeFrame>UT</timeFrame> + <startTime>2015-10-18T21:00:00</startTime> + <endTime>2015-10-18T21:10:00</endTime> + <duration>600</duration> + <bypassPff>false</bypassPff> + <enableSuperterp>false</enableSuperterp> + <numberOfBitsPerSample>8</numberOfBitsPerSample> + </userSpecification> + </lofar:observationAttributes> + <children> +<item index="0"> + <lofar:measurement xsi:type="lofar:BFMeasurementType"> + <name>3C196</name> + <description>3C196</description> + <topology>B0.1.C.SAP000</topology> + <currentStatus> + <mom2:openedStatus/> + </currentStatus> + <lofar:bfMeasurementAttributes> + <measurementType>Calibration</measurementType> + <specification> + <targetName>3C196</targetName> + <ra>123.400291667</ra> + <dec>48.2173888889</dec> + <equinox>J2000</equinox> + <duration>0</duration> + <subbandsSpecification> + <subbands>100..339</subbands> + </subbandsSpecification> + <tiedArrayBeams> + <flyseye>false</flyseye> + <nrTabRings></nrTabRings> + <tabRingSize></tabRingSize> + <tiedArrayBeamList> + + </tiedArrayBeamList> + </tiedArrayBeams> + </specification> + </lofar:bfMeasurementAttributes> + <resultDataProducts> + <item> + <lofar:uvDataProduct> + <name>B0.1.C.SAP000.uv.dps</name> + <topology>B0.1.C.SAP000.uv.dps</topology> + <status>no_data</status> + <storageCluster> + <name>CEP4</name> + <partition>/data/projects/</partition> + </storageCluster> + </lofar:uvDataProduct> + </item> + </resultDataProducts> + </lofar:measurement> + </item> +</children> + </lofar:observation> + </item> + <item index="0"> + <lofar:pipeline xsi:type="lofar:CalibrationPipelineType"> + <topology>B0.1.CPC</topology> + <predecessor_topology>B0.1.C</predecessor_topology> + <name>3C196/1/CPC</name> + <description>3C196/1/CPC (Cal Pipe Calibrator)</description> + <currentStatus> + <mom2:openedStatus/> + </currentStatus> + <processingCluster> + <name>CEP4</name> + <partition>cpu</partition> + <numberOfTasks>110</numberOfTasks> + <minRAMPerTask unit="byte">1000000000</minRAMPerTask> + <minScratchPerTask unit="byte">100000000</minScratchPerTask> + <maxDurationPerTask>P7DT0S</maxDurationPerTask> + <numberOfCoresPerTask>2</numberOfCoresPerTask> + <runSimultaneous>true</runSimultaneous> + </processingCluster> + <pipelineAttributes> + <defaultTemplate>Calibrator Pipeline (export)</defaultTemplate> + <flaggingStrategy>HBAdefault</flaggingStrategy> + <duration>1000</duration> + <skyModelDatabase>3C196</skyModelDatabase> + <demixingParameters> + <averagingFreqStep>16</averagingFreqStep> + <averagingTimeStep>2</averagingTimeStep> + <demixFreqStep>16</demixFreqStep> + <demixTimeStep>2</demixTimeStep> + <demixAlways></demixAlways> + <demixIfNeeded></demixIfNeeded> + <ignoreTarget></ignoreTarget> + </demixingParameters> + <bbsParameters> + <baselines></baselines> + <correlations></correlations> + <beamModelEnable>true</beamModelEnable> + <solveParms></solveParms> + <solveUVRange></solveUVRange> + <strategyBaselines></strategyBaselines> + <strategyTimeRange></strategyTimeRange> + </bbsParameters> +<misc>{"storagemanager": ""}</misc> +</pipelineAttributes> + <usedDataProducts> + <item> + <lofar:uvDataProduct topology="B0.1.C.SAP000.uv.dps"> + </lofar:uvDataProduct> + </item> + </usedDataProducts> + <resultDataProducts> + <item> + <lofar:instrumentModelDataProduct> + <name>B0.1.CPC.inst.dps</name> + <topology>B0.1.CPC.inst.dps</topology> + <status>no_data</status> + <storageCluster> + <name>CEP4</name> + <partition>/data/projects/</partition> + </storageCluster> + </lofar:instrumentModelDataProduct> + </item> + <item> + <lofar:uvDataProduct> + <name>B0.1.CPC.uv.dps</name> + <topology>B0.1.CPC.uv.dps</topology> + <status>no_data</status> + <storageCluster> + <name>CEP4</name> + <partition>/data/projects/</partition> + </storageCluster> + </lofar:uvDataProduct> + </item> + </resultDataProducts> + </lofar:pipeline> + </item> + <item index="0"> + <lofar:observation> + <name>3C58/1/TO</name> + <description>3C58/1/TO (Target Observation)</description> + <topology>B0.1.T</topology> + <predecessor_topology>B0.1.C</predecessor_topology> + <currentStatus> + <mom2:openedStatus/> + </currentStatus> + <lofar:observationAttributes> + <observationId> + </observationId> + <name>3C58/1/TO</name> + <projectName>LC4_022</projectName> + <instrument>Beam Observation</instrument> + <defaultTemplate>BeamObservation</defaultTemplate> + <tbbPiggybackAllowed>true</tbbPiggybackAllowed> + <aartfaacPiggybackAllowed>true</aartfaacPiggybackAllowed> + <userSpecification> + <correlatedData>true</correlatedData> + <coherentStokesData>false</coherentStokesData> + <incoherentStokesData>false</incoherentStokesData> + <antenna>HBA Dual Inner</antenna> + <clock mode="200 MHz"/> + <instrumentFilter>110-190 MHz</instrumentFilter> + <integrationInterval>2.0</integrationInterval> + <channelsPerSubband>64</channelsPerSubband> + <coherentDedisperseChannels>false</coherentDedisperseChannels> + <tiedArrayBeams> + <flyseye>false</flyseye> + </tiedArrayBeams> + <stokes> + <integrateChannels>false</integrateChannels> + <subbandsPerFileCS></subbandsPerFileCS> + <numberCollapsedChannelsCS></numberCollapsedChannelsCS> + <stokesDownsamplingStepsCS></stokesDownsamplingStepsCS> + <whichCS></whichCS> + <subbandsPerFileIS></subbandsPerFileIS> + <numberCollapsedChannelsIS></numberCollapsedChannelsIS> + <stokesDownsamplingStepsIS></stokesDownsamplingStepsIS> + <whichIS></whichIS> + </stokes> + <stationSet>Custom</stationSet> + <stations>CS001,CS002,CS003,CS004,CS005,CS006,CS007,CS011,CS013,CS017,CS021,CS024,CS026,CS028,CS030,CS031,CS032,CS101,CS103,CS201,CS301,CS302,CS401,CS501,RS106,RS205,RS208,RS210,RS305,RS306,RS307,RS310,RS406,RS407,RS409,RS503,RS508,RS509</stations> + <timeFrame>UT</timeFrame> + <startTime>2015-10-18T21:11:00</startTime> + <endTime>2015-10-19T02:51:00</endTime> + <duration>20400</duration> + <bypassPff>false</bypassPff> + <enableSuperterp>false</enableSuperterp> + <numberOfBitsPerSample>8</numberOfBitsPerSample> + </userSpecification> + </lofar:observationAttributes> + <children> +<item index="0"> + <lofar:measurement xsi:type="lofar:BFMeasurementType"> + <name>3C58</name> + <description>3C58</description> + <topology>B0.1.T.SAP000</topology> + <currentStatus> + <mom2:openedStatus/> + </currentStatus> + <lofar:bfMeasurementAttributes> + <measurementType>Target</measurementType> + <specification> + <targetName>3C58</targetName> + <ra>31.4083333333</ra> + <dec>64.8283333333</dec> + <equinox>J2000</equinox> + <duration>0</duration> + <subbandsSpecification> + <subbands>100..339</subbands> + </subbandsSpecification> + <tiedArrayBeams> + <flyseye>false</flyseye> + <nrTabRings>0</nrTabRings> + <tabRingSize></tabRingSize> + <tiedArrayBeamList> + + </tiedArrayBeamList> + </tiedArrayBeams> + </specification> + </lofar:bfMeasurementAttributes> + <resultDataProducts> + <item> + <lofar:uvDataProduct> + <name>B0.1.T.SAP000.uv.dps</name> + <topology>B0.1.T.SAP000.uv.dps</topology> + <status>no_data</status> + <storageCluster> + <name>CEP4</name> + <partition>/data/projects/</partition> + </storageCluster> + </lofar:uvDataProduct> + </item> + </resultDataProducts> + </lofar:measurement> + </item> +</children> + </lofar:observation> + </item> +<item index="0"> + <lofar:pipeline xsi:type="lofar:CalibrationPipelineType"> + <topology>B0.1.PT0</topology> + <predecessor_topology>B0.1.T,B0.1.CPC</predecessor_topology> + <name>3C58/1.0/TP</name> + <description>3C58/1.0/TP (Target Pipeline)</description> + <currentStatus> + <mom2:openedStatus/> + </currentStatus> + <processingCluster> + <name>CEP4</name> + <partition>cpu</partition> + <numberOfTasks>110</numberOfTasks> + <minRAMPerTask unit="byte">1000000000</minRAMPerTask> + <minScratchPerTask unit="byte">100000000</minScratchPerTask> + <maxDurationPerTask>P7DT0S</maxDurationPerTask> + <numberOfCoresPerTask>2</numberOfCoresPerTask> + <runSimultaneous>true</runSimultaneous> + </processingCluster> + <pipelineAttributes> + <defaultTemplate>Calibration Pipeline Target</defaultTemplate> + <flaggingStrategy>HBAdefault</flaggingStrategy> + <duration>50000</duration> + <demixingParameters> + <averagingFreqStep>16</averagingFreqStep> + <averagingTimeStep>2</averagingTimeStep> + <demixFreqStep>16</demixFreqStep> + <demixTimeStep>2</demixTimeStep> + <demixAlways></demixAlways> + <demixIfNeeded></demixIfNeeded> + <ignoreTarget></ignoreTarget> + </demixingParameters> + <bbsParameters> + <baselines></baselines> + <correlations></correlations> + <beamModelEnable>true</beamModelEnable> + <solveParms></solveParms> + <solveUVRange></solveUVRange> + <strategyBaselines></strategyBaselines> + <strategyTimeRange></strategyTimeRange> + </bbsParameters> +<misc>{"storagemanager": ""}</misc> +</pipelineAttributes> + <usedDataProducts> + <item> + <lofar:uvDataProduct topology="B0.1.T.SAP000.uv.dps"> + <name>B0.1.T.SAP000.uv.dps</name> + </lofar:uvDataProduct> + </item> + <item> + <lofar:instrumentModelDataProduct topology="B0.1.CPC.inst.dps"> + <name>B0.1.CPC.inst.dps</name> + </lofar:instrumentModelDataProduct> + </item> + </usedDataProducts> + <resultDataProducts> + <item> + <lofar:uvDataProduct> + <name>B0.1.PT0.uv.dps</name> + <topology>B0.1.PT0.uv.dps</topology> + <status>no_data</status> + <storageCluster> + <name>CEP4</name> + <partition>/data/projects/</partition> + </storageCluster> + </lofar:uvDataProduct> + </item> + </resultDataProducts> + </lofar:pipeline> + </item> +<item index="0"> + <lofar:pipeline xsi:type="lofar:ImagingPipelineType"> + <topology>B0.PI0</topology> + <predecessor_topology>B0.1.PT0</predecessor_topology> + <name>3C58/IM</name> + <description>3C58/IM (Imaging pipeline beam 0)</description> + <currentStatus> + <mom2:openedStatus/> + </currentStatus> + <processingCluster> + <name>CEP4</name> + <partition>cpu</partition> + <numberOfTasks>110</numberOfTasks> + <minRAMPerTask unit="byte">1000000000</minRAMPerTask> + <minScratchPerTask unit="byte">100000000</minScratchPerTask> + <maxDurationPerTask>P7DT0S</maxDurationPerTask> + <numberOfCoresPerTask>2</numberOfCoresPerTask> + <runSimultaneous>true</runSimultaneous> + </processingCluster> + <imagingPipelineAttributes> + <defaultTemplate>Imaging Pipeline HBA</defaultTemplate> + <duration>0</duration> + <nrOfOutputSkyImage>12</nrOfOutputSkyImage> + <imagingParameters> + <nrSlicesPerImage>1</nrSlicesPerImage> + <nrSubbandsPerImage>20</nrSubbandsPerImage> + <maxBaseline></maxBaseline> + <fieldOfView>5.0</fieldOfView> + <weight></weight> + <robust></robust> + <iterations></iterations> + <threshold></threshold> + <uvMin></uvMin> + <uvMax></uvMax> + <stokes></stokes> + </imagingParameters> + + </imagingPipelineAttributes> + <usedDataProducts> + <item> + <lofar:uvDataProduct topology="B0.1.PT0.uv.dps"> + <name>B0.1.PT0.uv.dps</name> + </lofar:uvDataProduct> + </item> + </usedDataProducts> + <resultDataProducts> + <item> + <lofar:skyImageDataProduct> + <name>B0.PI0.dps</name> + <topology>B0.PI0.dps</topology> + <status>no_data</status> + <storageCluster> + <name>CEP4</name> + <partition>/data/projects/</partition> + </storageCluster> + </lofar:skyImageDataProduct> + </item> + </resultDataProducts> + </lofar:pipeline> + </item> +</children> + </lofar:folder> + </item> + <item index="0"> + <lofar:folder topology_parent="true"> + <topology>1</topology> + <name>3C196</name> + <description>3C196 Bookend Scan (Preprocessing)</description> + <children> + <item index="0"> + <lofar:observation> + <name>3C196/1/TO</name> + <description>3C196/1/TO (Target Observation)</description> + <topology>B1.1.T</topology> + <predecessor_topology></predecessor_topology> + <currentStatus> + <mom2:openedStatus/> + </currentStatus> + <lofar:observationAttributes> + <observationId> + </observationId> + <name>3C196/1/TO</name> + <projectName>LC4_022</projectName> + <instrument>Beam Observation</instrument> + <defaultTemplate>BeamObservation</defaultTemplate> + <tbbPiggybackAllowed>true</tbbPiggybackAllowed> + <aartfaacPiggybackAllowed>true</aartfaacPiggybackAllowed> + <userSpecification> + <correlatedData>true</correlatedData> + <coherentStokesData>false</coherentStokesData> + <incoherentStokesData>false</incoherentStokesData> + <antenna>HBA Dual Inner</antenna> + <clock mode="200 MHz"/> + <instrumentFilter>110-190 MHz</instrumentFilter> + <integrationInterval>2.0</integrationInterval> + <channelsPerSubband>64</channelsPerSubband> + <coherentDedisperseChannels>false</coherentDedisperseChannels> + <tiedArrayBeams> + <flyseye>false</flyseye> + </tiedArrayBeams> + <stokes> + <integrateChannels>false</integrateChannels> + <subbandsPerFileCS></subbandsPerFileCS> + <numberCollapsedChannelsCS></numberCollapsedChannelsCS> + <stokesDownsamplingStepsCS></stokesDownsamplingStepsCS> + <whichCS></whichCS> + <subbandsPerFileIS></subbandsPerFileIS> + <numberCollapsedChannelsIS></numberCollapsedChannelsIS> + <stokesDownsamplingStepsIS></stokesDownsamplingStepsIS> + <whichIS></whichIS> + </stokes> + <stationSet>Custom</stationSet> + <stations>CS001,CS002,CS003,CS004,CS005,CS006,CS007,CS011,CS013,CS017,CS021,CS024,CS026,CS028,CS030,CS031,CS032,CS101,CS103,CS201,CS301,CS302,CS401,CS501,RS106,RS205,RS208,RS210,RS305,RS306,RS307,RS310,RS406,RS407,RS409,RS503,RS508,RS509</stations> + <timeFrame>UT</timeFrame> + <startTime>2015-10-19T02:52:00</startTime> + <endTime>2015-10-19T03:02:00</endTime> + <duration>600</duration> + <bypassPff>false</bypassPff> + <enableSuperterp>false</enableSuperterp> + <numberOfBitsPerSample>8</numberOfBitsPerSample> + </userSpecification> + </lofar:observationAttributes> + <children> +<item index="0"> + <lofar:measurement xsi:type="lofar:BFMeasurementType"> + <name>3C196</name> + <description>3C196</description> + <topology>B1.1.T.SAP000</topology> + <currentStatus> + <mom2:openedStatus/> + </currentStatus> + <lofar:bfMeasurementAttributes> + <measurementType>Target</measurementType> + <specification> + <targetName>3C196</targetName> + <ra>123.400291667</ra> + <dec>48.2173888889</dec> + <equinox>J2000</equinox> + <duration>0</duration> + <subbandsSpecification> + <subbands>100..339</subbands> + </subbandsSpecification> + <tiedArrayBeams> + <flyseye>false</flyseye> + <nrTabRings>0</nrTabRings> + <tabRingSize></tabRingSize> + <tiedArrayBeamList> + + </tiedArrayBeamList> + </tiedArrayBeams> + </specification> + </lofar:bfMeasurementAttributes> + <resultDataProducts> + <item> + <lofar:uvDataProduct> + <name>B1.1.T.SAP000.uv.dps</name> + <topology>B1.1.T.SAP000.uv.dps</topology> + <status>no_data</status> + <storageCluster> + <name>CEP4</name> + <partition>/data/projects/</partition> + </storageCluster> + </lofar:uvDataProduct> + </item> + </resultDataProducts> + </lofar:measurement> + </item> +</children> + </lofar:observation> + </item> + <item index="0"> + <lofar:pipeline xsi:type="lofar:AveragingPipelineType"> + <topology>B1.1.PT0</topology> + <predecessor_topology>B1.1.T</predecessor_topology> + <name>3C196/1.0/TP</name> + <description>3C196/1.0/TP (Preprocessing)</description> + <currentStatus> + <mom2:openedStatus/> + </currentStatus> + <processingCluster> + <name>CEP4</name> + <partition>cpu</partition> + <numberOfTasks>110</numberOfTasks> + <minRAMPerTask unit="byte">1000000000</minRAMPerTask> + <minScratchPerTask unit="byte">100000000</minScratchPerTask> + <maxDurationPerTask>P7DT0S</maxDurationPerTask> + <numberOfCoresPerTask>2</numberOfCoresPerTask> + <runSimultaneous>true</runSimultaneous> + </processingCluster> + <pipelineAttributes> + <defaultTemplate>Preprocessing Pipeline</defaultTemplate> + <flaggingStrategy>HBAdefault</flaggingStrategy> + <duration>1000</duration> + <demixingParameters> + <averagingFreqStep>16</averagingFreqStep> + <averagingTimeStep>2</averagingTimeStep> + <demixFreqStep>16</demixFreqStep> + <demixTimeStep>2</demixTimeStep> + <demixAlways></demixAlways> + <demixIfNeeded></demixIfNeeded> + <ignoreTarget></ignoreTarget> + </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> +</pipelineAttributes> + <usedDataProducts> + <item> + <lofar:uvDataProduct topology="B1.1.T.SAP000.uv.dps"> + </lofar:uvDataProduct> + </item> + </usedDataProducts> + <resultDataProducts> + <item> + <lofar:uvDataProduct> + <name>B1.1.PT0.uv.dps</name> + <topology>B1.1.PT0.uv.dps</topology> + <status>no_data</status> + <storageCluster> + <name>CEP4</name> + <partition>/data/projects/</partition> + </storageCluster> + </lofar:uvDataProduct> + </item> + </resultDataProducts> + </lofar:pipeline> + </item> +</children> + </lofar:folder> + </item> +</children> + </lofar:folder> + </item> + </children> + </lofar:project> diff --git a/SAS/XML_generator/test/test_regression.in_data/xml/test_input.xml b/SAS/XML_generator/test/test_regression.in_data/xml/test_input.xml index 555b18f784c8e3f3f927e12de6bf788315b1d52d..653cb363b4fe194ea0fdb257a2439c05701c6b3c 100644 --- a/SAS/XML_generator/test/test_regression.in_data/xml/test_input.xml +++ b/SAS/XML_generator/test/test_regression.in_data/xml/test_input.xml @@ -1,8 +1,8 @@ <?xml version="1.0" encoding="UTF-8"?> <lofar:project xmlns:lofar="http://www.astron.nl/MoM2-Lofar" xmlns:mom2="http://www.astron.nl/MoM2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.astron.nl/MoM2-Lofar http://lofar.astron.nl:8080/mom3/schemas/LofarMoM2.xsd http://www.astron.nl/MoM2 http://lofar.astron.nl:8080/mom3/schemas/MoM2.xsd "> - <version>2.17.0</version> - <template version="2.17.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> - <description>XML Template generator version 2.17.0</description> + <version>2.23.0</version> + <template version="2.23.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> + <description>XML Template generator version 2.23.0</description> </template> <name>test-lofar</name> <children> @@ -164,10 +164,12 @@ <rfifindExtraOpts></rfifindExtraOpts> <rrats></rrats> <singlePulse></singlePulse> - <skipDsps></skipDsps> + <skipDspsr></skipDspsr> <skipDynamicSpectrum></skipDynamicSpectrum> <skipPrepfold></skipPrepfold> <tsubint></tsubint> +<misc>{"storagemanager": "dysco"}</misc> + </pipelineAttributes> <usedDataProducts> <item> @@ -342,10 +344,12 @@ <rfifindExtraOpts></rfifindExtraOpts> <rrats></rrats> <singlePulse></singlePulse> - <skipDsps></skipDsps> + <skipDspsr></skipDspsr> <skipDynamicSpectrum></skipDynamicSpectrum> <skipPrepfold></skipPrepfold> <tsubint></tsubint> +<misc>{"storagemanager": "dysco"}</misc> + </pipelineAttributes> <usedDataProducts> <item> diff --git a/SAS/XML_generator/test/test_regression.in_data/xml/test_input_cep4.xml b/SAS/XML_generator/test/test_regression.in_data/xml/test_input_cep4.xml index 555b18f784c8e3f3f927e12de6bf788315b1d52d..653cb363b4fe194ea0fdb257a2439c05701c6b3c 100644 --- a/SAS/XML_generator/test/test_regression.in_data/xml/test_input_cep4.xml +++ b/SAS/XML_generator/test/test_regression.in_data/xml/test_input_cep4.xml @@ -1,8 +1,8 @@ <?xml version="1.0" encoding="UTF-8"?> <lofar:project xmlns:lofar="http://www.astron.nl/MoM2-Lofar" xmlns:mom2="http://www.astron.nl/MoM2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.astron.nl/MoM2-Lofar http://lofar.astron.nl:8080/mom3/schemas/LofarMoM2.xsd http://www.astron.nl/MoM2 http://lofar.astron.nl:8080/mom3/schemas/MoM2.xsd "> - <version>2.17.0</version> - <template version="2.17.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> - <description>XML Template generator version 2.17.0</description> + <version>2.23.0</version> + <template version="2.23.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> + <description>XML Template generator version 2.23.0</description> </template> <name>test-lofar</name> <children> @@ -164,10 +164,12 @@ <rfifindExtraOpts></rfifindExtraOpts> <rrats></rrats> <singlePulse></singlePulse> - <skipDsps></skipDsps> + <skipDspsr></skipDspsr> <skipDynamicSpectrum></skipDynamicSpectrum> <skipPrepfold></skipPrepfold> <tsubint></tsubint> +<misc>{"storagemanager": "dysco"}</misc> + </pipelineAttributes> <usedDataProducts> <item> @@ -342,10 +344,12 @@ <rfifindExtraOpts></rfifindExtraOpts> <rrats></rrats> <singlePulse></singlePulse> - <skipDsps></skipDsps> + <skipDspsr></skipDspsr> <skipDynamicSpectrum></skipDynamicSpectrum> <skipPrepfold></skipPrepfold> <tsubint></tsubint> +<misc>{"storagemanager": "dysco"}</misc> + </pipelineAttributes> <usedDataProducts> <item> diff --git a/SAS/XML_generator/test/test_regression.in_data/xml/test_input_commensal_obs_DRAGNET.xml b/SAS/XML_generator/test/test_regression.in_data/xml/test_input_commensal_obs_DRAGNET.xml index 91a039e2ac03eef0d7fc553f47d94fed311e4c2e..ed66278340b611477c389e371d59ae2faf632d4b 100644 --- a/SAS/XML_generator/test/test_regression.in_data/xml/test_input_commensal_obs_DRAGNET.xml +++ b/SAS/XML_generator/test/test_regression.in_data/xml/test_input_commensal_obs_DRAGNET.xml @@ -1,8 +1,8 @@ <?xml version="1.0" encoding="UTF-8"?> <lofar:project xmlns:lofar="http://www.astron.nl/MoM2-Lofar" xmlns:mom2="http://www.astron.nl/MoM2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.astron.nl/MoM2-Lofar http://lofar.astron.nl:8080/mom3/schemas/LofarMoM2.xsd http://www.astron.nl/MoM2 http://lofar.astron.nl:8080/mom3/schemas/MoM2.xsd "> - <version>2.21.0</version> - <template version="2.21.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> - <description>XML Template generator version 2.21.0</description> + <version>2.23.0</version> + <template version="2.23.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> + <description>XML Template generator version 2.23.0</description> </template> <name>test-lofar</name> <children> diff --git a/SAS/XML_generator/test/test_regression.in_data/xml/test_input_long_baseline_pipeline.xml b/SAS/XML_generator/test/test_regression.in_data/xml/test_input_long_baseline_pipeline.xml index b87acee6c5a42e511aac4474f60e35d1a0f1d916..22b30c3fb12868c25112e75e0aaa61347fa95b3d 100644 --- a/SAS/XML_generator/test/test_regression.in_data/xml/test_input_long_baseline_pipeline.xml +++ b/SAS/XML_generator/test/test_regression.in_data/xml/test_input_long_baseline_pipeline.xml @@ -1,8 +1,8 @@ <?xml version="1.0" encoding="UTF-8"?> <lofar:project xmlns:lofar="http://www.astron.nl/MoM2-Lofar" xmlns:mom2="http://www.astron.nl/MoM2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.astron.nl/MoM2-Lofar http://lofar.astron.nl:8080/mom3/schemas/LofarMoM2.xsd http://www.astron.nl/MoM2 http://lofar.astron.nl:8080/mom3/schemas/MoM2.xsd "> - <version>2.17.0</version> - <template version="2.17.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> - <description>XML Template generator version 2.17.0</description> + <version>2.23.0</version> + <template version="2.23.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> + <description>XML Template generator version 2.23.0</description> </template> <name>test-lofar</name> <children> @@ -170,6 +170,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -416,6 +417,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -490,6 +492,7 @@ <strategyBaselines></strategyBaselines> <strategyTimeRange></strategyTimeRange> </bbsParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> @@ -550,6 +553,7 @@ <demixIfNeeded></demixIfNeeded> <ignoreTarget></ignoreTarget> </demixingParameters> +<misc>{"storagemanager": "dysco"}</misc> </pipelineAttributes> <usedDataProducts> <item> diff --git a/SAS/XML_generator/test/test_regression.py b/SAS/XML_generator/test/test_regression.py index 427789a190b8346dc5e383e31119286074773e40..c891d0031839d56946d64e6c8ffb19531ae737ed 100755 --- a/SAS/XML_generator/test/test_regression.py +++ b/SAS/XML_generator/test/test_regression.py @@ -1,5 +1,6 @@ #! /usr/bin/env python -import sys, os, subprocess, difflib +import sys, os, subprocess, difflib, shutil + # diff should only be something like: # 3,5c3,5 @@ -11,62 +12,72 @@ import sys, os, subprocess, difflib # > <template version="2.12.0" author="Alwin de Jong,Adriaan Renting" changedBy="Adriaan Renting"> # > <description>XML Template generator version 2.12.0</description> def checkDiff(diff): - if len(diff) == 8 or len(diff) == 0: - return True - return False + if len(diff) == 8 or len(diff) == 0: + return True + return False + + +def main(verbose_tests=False, regenerate_golden_output=False): + ''' + :param verbose_tests: print stdout and stderr of the generator when return code non-zero + :param regenerate_golden_output: overwrite the golden output files with the current generator output + ''' + os.chdir('test_regression.in_data') + infiles = os.listdir("txt") + results = [] + for infile in infiles: + if infile.startswith("old") or infile.startswith("."): + continue # pre 2.6 files that no longer have valid syntax + name, ext = os.path.splitext(infile) + outfile = name + ".xml" + print "\n" + print "*** Processing %s ***" % infile + cmd = ["xmlgen", "-i", "./txt/%s" % infile, "-o", "test.xml"] + p = subprocess.Popen(cmd, stdin=open('/dev/null'), stdout=subprocess.PIPE, stderr=subprocess.PIPE) + out, err = p.communicate() + if verbose_tests and p.returncode == 1: + print out + print err + logs = out.splitlines() # stdout + print "xmlgen ran with return code: %s" % p.returncode + xmlgen = p.returncode + if p.returncode: + for l in logs: print l + results.append((infile, xmlgen, -1, False)) + continue + else: + cmd = ["diff", "-w", "-I", r"^[[:space:]]*$", "./xml/%s.xml" % name, "test.xml"] + ## -w ignores differences in whitespace + ## -I '^[[:space:]]*$' because -B doesn't work for blank lines (on OSX?) + p = subprocess.Popen(cmd, stdin=open('/dev/null'), stdout=subprocess.PIPE, stderr=subprocess.PIPE) + logs = p.communicate() + diffs = logs[0].splitlines() # stdout + print "diff reply was %i lines long" % len(diffs) + check = checkDiff(diffs) and len(logs[1]) == 0 + if not check: + for l in diffs: print l + print logs[1] + results.append((infile, xmlgen, p.returncode, check)) -def main(): - os.chdir('test_regression.in_data') - infiles = os.listdir("txt") - results = [] - for infile in infiles: - if infile.startswith("old") or infile.startswith("."): - continue # pre 2.6 files that no longer have valid syntax - name, ext = os.path.splitext(infile) - outfile = name + ".xml" - print "\n" - print "*** Processing %s ***" % infile - cmd = ["xmlgen", "-i", "./txt/%s" % infile, "-o", "test.xml"] - p = subprocess.Popen(cmd, stdin=open('/dev/null'), stdout=subprocess.PIPE, stderr=subprocess.PIPE) - logs = p.communicate()[0].splitlines() #stdout - print "xmlgen ran with return code: %s" % p.returncode - xmlgen = p.returncode - if p.returncode: - for l in logs: print l - results.append((infile, xmlgen, -1, False)) - continue + if regenerate_golden_output: + testdir = os.environ.get('srcdir', os.path.dirname(os.path.abspath(__file__))) + outfile = "%s/test_regression.in_data/xml/%s.xml" % (testdir, name) + print 'Overwriting golden XML:', os.path.abspath(outfile) + shutil.copy('test.xml', outfile) + + os.remove("test.xml") + print "\nResults:" + success = True + for r in results: + print "%s: xmlgen: %i diff: %i, %s" % r + success = success and r[3] + if success: + print "success" + return 0 else: - #import fileinput - #for line in fileinput.FileInput("test.xml",inplace=1): - # if line.rstrip(): - # print line, - #cmd = ["sed", "-i", "'/^$/d'", "test.xml"] - #p = subprocess.Popen(cmd, stdin=open('/dev/null'), stdout=subprocess.PIPE, stderr=subprocess.PIPE) - cmd = ["diff", "-w", "-I", r"^[[:space:]]*$", "./xml/%s.xml" % name, "test.xml"] - #cmd = ["diff -w -I '^[[:space:]]*$' ./xml/%s.xml test.xml" % name] - ## -w ignores differences in whitespace - ## -I '^[[:space:]]*$' because -B doesn't work for blank lines (on OSX?) - p = subprocess.Popen(cmd, stdin=open('/dev/null'), stdout=subprocess.PIPE, stderr=subprocess.PIPE) - logs = p.communicate() - diffs = logs[0].splitlines() #stdout - print "diff reply was %i lines long" % len(diffs) - check = checkDiff(diffs) and len(logs[1]) == 0 - if not check: - for l in diffs: print l - print logs[1] - results.append((infile, xmlgen, p.returncode, check)) - os.remove("test.xml") - print "\nResults:" - success = True - for r in results: - print "%s: xmlgen: %i diff: %i, %s" % r - success = success and r[3] - if success: - print "success" - return 0 - else: - print "failure" - return 1 + print "failure" + return 1 + if __name__ == "__main__": - sys.exit(main()) + sys.exit(main()) diff --git a/SAS/XSD/MoM/LofarMoM2.xsd b/SAS/XSD/MoM/LofarMoM2.xsd index 3339e92b558bcabe99f0ed5a6b2a537ae8ba2118..fc438f5dea1f50cea4f9205ff886edd869045679 100644 --- a/SAS/XSD/MoM/LofarMoM2.xsd +++ b/SAS/XSD/MoM/LofarMoM2.xsd @@ -863,7 +863,46 @@ </xsd:sequence> </xsd:extension> </xsd:complexContent> - </xsd:complexType><!-- Imaging Pipeline related declarations --> + </xsd:complexType> + + <!-- Calibration Pipeline related declarations --> + <xsd:complexType name="CalibrationPipelineType"> + <xsd:complexContent> + <xsd:restriction base="LofarPipelineType"> + <xsd:sequence> + <xsd:element minOccurs="0" name="topology" type="xsd:string"/> + <xsd:element minOccurs="0" name="predecessor_topology" type="xsd:string"/> + <xsd:element minOccurs="0" name="name" type="xsd:string"/> + <xsd:element minOccurs="0" name="description" type="xsd:string"/> + <xsd:element minOccurs="0" name="processingCluster" type="ProcessingClusterType"/> + <xsd:element minOccurs="0" name="parent" type="mom2:CollectionRefType"/> + <xsd:element minOccurs="0" name="ownerProject" type="mom2:ProjectRefType"/> + <xsd:element minOccurs="0" name="statusHistory" type="mom2:StatusHistoryType"/> + <xsd:element minOccurs="0" name="currentStatus" type="mom2:CurrentStatusType"/> + <xsd:element minOccurs="0" name="generalRemarks" nillable="true" type="mom2:IndexedGeneralRemarkListType"/> + <xsd:element minOccurs="0" name="inspectionReports" nillable="true" type="mom2:IndexedInspectionReportListType"/> + <xsd:element minOccurs="0" name="specificationRemarks" nillable="true" type="mom2:IndexedSpecificationRemarkListType"/> + <xsd:element minOccurs="0" name="schedulingRemarks" nillable="true" type="mom2:IndexedSchedulingRemarkListType"/> + <xsd:choice minOccurs="0"> + <xsd:element ref="calibrationPipelineAttributes"/> + </xsd:choice> + <xsd:element minOccurs="0" name="usedDataProducts" nillable="true" type="mom2:IndexedDataProductListType"/> + <xsd:element minOccurs="0" name="resultDataProducts" nillable="true" type="mom2:IndexedDataProductListType"/><!-- xsd:element minOccurs="0" name="children" type="IndexedLofarPipelineChildListType"/ --><!--WTF--> + </xsd:sequence> + <xsd:attribute name="mom2Id" type="xsd:nonNegativeInteger"/> + </xsd:restriction> + </xsd:complexContent> + </xsd:complexType> + <xsd:element name="calibrationPipelineAttributes" substitutionGroup="mom2:specificAttributes" type="CalibrationPipelineAttributesType"/> + <xsd:complexType name="CalibrationPipelineAttributesType"> + <xsd:complexContent> + <xsd:extension base="AveragingPipelineAttributesType"> + <xsd:sequence/> + </xsd:extension> + </xsd:complexContent> + </xsd:complexType> + + <!-- Imaging Pipeline related declarations --> <xsd:complexType name="ImagingPipelineType"> <xsd:complexContent> <xsd:restriction base="LofarPipelineType"> diff --git a/SAS/XSD/SAS/LofarBase.xsd b/SAS/XSD/SAS/LofarBase.xsd index a80a5a692737881632a3c10500e6e614ff2c4161..20fa87e604336a740fd1857daaf192e74ee72524 100644 --- a/SAS/XSD/SAS/LofarBase.xsd +++ b/SAS/XSD/SAS/LofarBase.xsd @@ -404,15 +404,22 @@ <xsd:element name="ignoreTarget" type="xsd:boolean"/> </xsd:sequence> </xsd:complexType> +<!-- Calibration Pipeline related declarations --> +<!-- FIXME: The Calibration Pipeline should have BbsParameters while the Averaging Pipeline does not, but for backward +compatibility with MoM it is currently not defined that way, making them identical. When support for MoM 3 can be dropped +this should be fixed. --> + <xsd:complexType name="CalibrationPipeline"> + <xsd:complexContent> + <xsd:extension base="AveragingPipeline"><!-- Untested if this works properly --> + <xsd:sequence/> + </xsd:extension> + </xsd:complexContent> + </xsd:complexType> <!-- Imaging Pipeline related declarations --> <xsd:complexType name="ImagingPipeline"> <xsd:complexContent> - <xsd:extension base="Pipeline"><!-- Should maybe derive from AveragingPipeline or CalibrationPipeline--> + <xsd:extension base="CalibrationPipeline"><!-- Untested if this works properly --> <xsd:sequence> - <xsd:element name="name" type="xsd:string"/> - <xsd:element name="description" type="xsd:string"/> - <xsd:element name="processingCluster" type="ProcessingCluster"/> - <xsd:element name="defaultTemplate" type="xsd:string"/> <xsd:element minOccurs="0" name="imagerIntegrationTime" type="xsd:double"/> </xsd:sequence> </xsd:extension> diff --git a/SubSystems/Online_Cobalt/test/Beamformer/tFlysEye_3sec_2st_3sb.output/Beam_0.float.raw b/SubSystems/Online_Cobalt/test/Beamformer/tFlysEye_3sec_2st_3sb.output/Beam_0.float.raw index 6e98f92d8b754f622d9330e4210b84e3fc5152df..60063a18dc6163d77805d4fbf8255c0608519b05 100644 Binary files a/SubSystems/Online_Cobalt/test/Beamformer/tFlysEye_3sec_2st_3sb.output/Beam_0.float.raw and b/SubSystems/Online_Cobalt/test/Beamformer/tFlysEye_3sec_2st_3sb.output/Beam_0.float.raw differ diff --git a/SubSystems/Online_Cobalt/test/Beamformer/tFlysEye_3sec_2st_3sb.output/Beam_1.float.raw b/SubSystems/Online_Cobalt/test/Beamformer/tFlysEye_3sec_2st_3sb.output/Beam_1.float.raw index 766c9637fc5242b533c224b60616f5d266775065..74689401483d7b44c9c8d6e97d56f0ad4cdcd944 100644 Binary files a/SubSystems/Online_Cobalt/test/Beamformer/tFlysEye_3sec_2st_3sb.output/Beam_1.float.raw and b/SubSystems/Online_Cobalt/test/Beamformer/tFlysEye_3sec_2st_3sb.output/Beam_1.float.raw differ diff --git a/SubSystems/Online_Cobalt/test/Beamformer/tFlysEye_3sec_2st_3sb.parset b/SubSystems/Online_Cobalt/test/Beamformer/tFlysEye_3sec_2st_3sb.parset index cb9346f83dca658a6b3f053d8f9219dd693552b3..7f0dc9a23d731beaa7c0acb2528aabea72a9e43b 100644 --- a/SubSystems/Online_Cobalt/test/Beamformer/tFlysEye_3sec_2st_3sb.parset +++ b/SubSystems/Online_Cobalt/test/Beamformer/tFlysEye_3sec_2st_3sb.parset @@ -1,5 +1,5 @@ Cobalt.BeamFormer.nrDelayCompensationChannels = 64 -Cobalt.BeamFormer.nrHighResolutionChannels = 4096 +Cobalt.BeamFormer.nrHighResolutionChannels = 64 ApplCtrl.application=CorrAppl ApplCtrl.processes=[CorrProc] ApplCtrl.resultfile=/opt/lofar/var/run/ACC_CCU001:OnlineControl[0]{192676}_CorrAppl_result.param diff --git a/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_1sec_1st_5sb_noflagging.output/queues/lofar.task.feedback.processing b/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_1sec_1st_5sb_noflagging.output/queues/lofar.task.feedback.processing index d3a3da80b831bede2d6a739ef50572de44656695..bdd31462a7943569a596b683ec1695d65b32df0d 100644 --- a/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_1sec_1st_5sb_noflagging.output/queues/lofar.task.feedback.processing +++ b/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_1sec_1st_5sb_noflagging.output/queues/lofar.task.feedback.processing @@ -25,6 +25,7 @@ Observation.Correlator.integrationInterval=0.25165824 Observation.DataProducts.nrOfOutput_Beamformed_=0 Observation.DataProducts.nrOfOutput_Correlated_=5 _isCobalt=T +feedback_version=03.01.00 </payload> </message> diff --git a/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_1st_5sb.output/SB0.cfloat.raw b/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_1st_5sb.output/SB0.cfloat.raw index 9c4d1945e26c0c7fda0b2909072a881bec532d4c..405c42e29d6d4a26ce3c5f36b9ff03addb90f336 100644 Binary files a/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_1st_5sb.output/SB0.cfloat.raw and b/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_1st_5sb.output/SB0.cfloat.raw differ diff --git a/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_1st_5sb.output/SB1.cfloat.raw b/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_1st_5sb.output/SB1.cfloat.raw index 069d8ad84894168d6826b72c57cb28c370f2a693..c50bfebd12c60d7f8478f1898035eb506e709e27 100644 Binary files a/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_1st_5sb.output/SB1.cfloat.raw and b/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_1st_5sb.output/SB1.cfloat.raw differ diff --git a/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_1st_5sb.output/SB2.cfloat.raw b/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_1st_5sb.output/SB2.cfloat.raw index e3f3dade194d0a4c658aa70eb2b02afa23fbf422..79aaad3efb77ab9bd6a7642c2757e827267ce7ee 100644 Binary files a/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_1st_5sb.output/SB2.cfloat.raw and b/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_1st_5sb.output/SB2.cfloat.raw differ diff --git a/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_1st_5sb.output/SB3.cfloat.raw b/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_1st_5sb.output/SB3.cfloat.raw index a87b4786cbb3eef12f8cd7a851f29e9000f93292..a889923e623ac9a39ea5689929bc876694ee1151 100644 Binary files a/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_1st_5sb.output/SB3.cfloat.raw and b/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_1st_5sb.output/SB3.cfloat.raw differ diff --git a/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_1st_5sb.output/SB4.cfloat.raw b/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_1st_5sb.output/SB4.cfloat.raw index 071c0acadd7ea0c26cc6b554d88d56c26fcb958b..63f0f779518aac1391d8fbfe2cbccb074e48273a 100644 Binary files a/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_1st_5sb.output/SB4.cfloat.raw and b/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_1st_5sb.output/SB4.cfloat.raw differ diff --git a/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_2st_5sb.output/SB0.cfloat.raw b/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_2st_5sb.output/SB0.cfloat.raw index e3e98d0a30710f631f2dee91fce29546cfc1b72e..03ecd19891188abdc58e97573a25585145260679 100644 Binary files a/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_2st_5sb.output/SB0.cfloat.raw and b/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_2st_5sb.output/SB0.cfloat.raw differ diff --git a/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_2st_5sb.output/SB1.cfloat.raw b/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_2st_5sb.output/SB1.cfloat.raw index fd209305dc1456733965c6be8559d1a12448e95c..185d8e57b651353b3d2db147a85fd8386a563274 100644 Binary files a/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_2st_5sb.output/SB1.cfloat.raw and b/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_2st_5sb.output/SB1.cfloat.raw differ diff --git a/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_2st_5sb.output/SB2.cfloat.raw b/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_2st_5sb.output/SB2.cfloat.raw index 818e64fbecaf7572c048ed33cf667564678d6c6a..2360f74ef22b7f1712c8069b67fd5fb3ba034d6e 100644 Binary files a/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_2st_5sb.output/SB2.cfloat.raw and b/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_2st_5sb.output/SB2.cfloat.raw differ diff --git a/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_2st_5sb.output/SB3.cfloat.raw b/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_2st_5sb.output/SB3.cfloat.raw index dd047432f7a5fbb0a45c35db8622296bf7699d7f..b64f608cc89e10a6aa967a2a714d8beca241f392 100644 Binary files a/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_2st_5sb.output/SB3.cfloat.raw and b/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_2st_5sb.output/SB3.cfloat.raw differ diff --git a/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_2st_5sb.output/SB4.cfloat.raw b/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_2st_5sb.output/SB4.cfloat.raw index e4bce7b606e42b929c43829e9c941f5a351c740e..80a78bb5a73d3c89b3307f3fe3ab4160e36ce231 100644 Binary files a/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_2st_5sb.output/SB4.cfloat.raw and b/SubSystems/Online_Cobalt/test/Correlator/tCorrelate_3sec_2st_5sb.output/SB4.cfloat.raw differ diff --git a/SubSystems/RAServices/CMakeLists.txt b/SubSystems/RAServices/CMakeLists.txt index 0c4007137eda58bbf76faa9440bc54c478ee95a5..d5ac998bfedb3319ea9de8b9964d4ec424220bb2 100644 --- a/SubSystems/RAServices/CMakeLists.txt +++ b/SubSystems/RAServices/CMakeLists.txt @@ -21,7 +21,8 @@ lofar_package(RAServices QPIDInfrastructure RAScripts StaticMetaData - RACommon) + RACommon + ltastorageoverview) # supervisord config files install(FILES diff --git a/SubSystems/RAServices/RAServices.ini b/SubSystems/RAServices/RAServices.ini index 686332c1c2ea3bdadea3d00740a5f60b5877995d..be46875b92d9119f5a459463562a6bee85ddfb14 100644 --- a/SubSystems/RAServices/RAServices.ini +++ b/SubSystems/RAServices/RAServices.ini @@ -10,3 +10,6 @@ programs=TriggerService [group:MAC] programs=PipelineControl + +[group:LTA] +programs=ltastorageoverviewscraper,ltastorageoverviewwebservice diff --git a/doc/doxygen/styling/customdoxygen.css.in b/doc/doxygen/styling/customdoxygen.css.in index 3f4d48c3b1b74f28124a996f808e06c7aad34e99..d5697196592a2b43fe1d16385e04d8eacd89fbc6 100644 --- a/doc/doxygen/styling/customdoxygen.css.in +++ b/doc/doxygen/styling/customdoxygen.css.in @@ -93,9 +93,14 @@ font-size: 1.15em !important; border: 0px solid #222 !important; height: 150px; } +/* +Commented out to avoid wrapping the header after the paragraph number. Not +deleted, because disabling this might break some intended behavior in tables. + table{ white-space:pre-wrap !important; } +*/ /* =========================== */ diff --git a/lofar_config.h.cmake b/lofar_config.h.cmake index 6ff3b27b94375aef27646ac65500b94fd0e3de61..7d66024d4e0a7e4c9cc7bbf1398eed833026e1da 100644 --- a/lofar_config.h.cmake +++ b/lofar_config.h.cmake @@ -174,6 +174,9 @@ /* Define if WCSLIB is installed */ #cmakedefine HAVE_WCSLIB 1 +/* Define if python3 is installed */ +#cmakedefine PYTHON_VERSION_MAJOR @PYTHON_VERSION_MAJOR@ + /*-------------------------------------------------------------------------*\ | Defines for the presence or absence of (system) functions |