Skip to content
Snippets Groups Projects
Select Git revision
  • 2deafa38cddc951e4091f012f8610b80147b2daa
  • master default protected
  • L2SS-1914-fix_job_dispatch
  • TMSS-3170
  • TMSS-3167
  • TMSS-3161
  • TMSS-3158-Front-End-Only-Allow-Changing-Again
  • TMSS-3133
  • TMSS-3319-Fix-Templates
  • test-fix-deploy
  • TMSS-3134
  • TMSS-2872
  • defer-state
  • add-custom-monitoring-points
  • TMSS-3101-Front-End-Only
  • TMSS-984-choices
  • SDC-1400-Front-End-Only
  • TMSS-3079-PII
  • TMSS-2936
  • check-for-max-244-subbands
  • TMSS-2927---Front-End-Only-PXII
  • Before-Remove-TMSS
  • LOFAR-Release-4_4_318 protected
  • LOFAR-Release-4_4_317 protected
  • LOFAR-Release-4_4_316 protected
  • LOFAR-Release-4_4_315 protected
  • LOFAR-Release-4_4_314 protected
  • LOFAR-Release-4_4_313 protected
  • LOFAR-Release-4_4_312 protected
  • LOFAR-Release-4_4_311 protected
  • LOFAR-Release-4_4_310 protected
  • LOFAR-Release-4_4_309 protected
  • LOFAR-Release-4_4_308 protected
  • LOFAR-Release-4_4_307 protected
  • LOFAR-Release-4_4_306 protected
  • LOFAR-Release-4_4_304 protected
  • LOFAR-Release-4_4_303 protected
  • LOFAR-Release-4_4_302 protected
  • LOFAR-Release-4_4_301 protected
  • LOFAR-Release-4_4_300 protected
  • LOFAR-Release-4_4_299 protected
41 results

holography_process.py

Blame
  • Code owners
    Assign users and groups as approvers for specific file changes. Learn more.
    holography_process.py 4.55 KiB
    #!/usr/bin/env python3
    import argparse
    import sys
    import os
    import lofar.calibration.common.datacontainers as datacontainers
    import lofar.calibration.processing as processing
    import logging
    
    logger = logging.getLogger('holography_process')
    
    
    def setup_argument_parser():
        """
    
        :return:
        """
        parser = argparse.ArgumentParser(
            description='Computes the gains per each antenna from the given Holography Dataset')
        parser.add_argument('input_path', help='Input holography dataset', type=str)
        parser.add_argument('output_path', help='Output holography dataset', type=str)
        parser.add_argument('verbose', help='Set verbose logging', action='store_false')
        return parser
    
    
    def parse_command_arguments(arguments):
        """
    
        :param arguments:
        :type arguments: list
        :return:
        """
        parser = setup_argument_parser()
        return parser.parse_args(arguments)
    
    
    def loading(path):
        """
    
        :param path:
        :type path: str
        :return:
        :rtype: datacontainers.HolographyDataset
        """
    
        logger.info('loading dataset from %s', path)
        if not os.path.exists(path):
            raise Exception('Path not found')
    
        dataset = datacontainers.HolographyDataset.load_from_file(path)
        logger.info('dataset from path %s loaded', path)
        return dataset
    
    
    def normalize_step(dataset, input_datatable):
        """
    
        :param dataset:
        :type dataset: datacontainers.HolographyDataset
        :param input_datatable: dict(dict(dict(numpy.ndarray)))
        :return:
        :rtype: dict(dict(dict(numpy.ndarray)))
        """
        logger.info('normalizing dataset')
        output_datatable = \
            processing.normalize_beams_by_central_one(dataset, input_datatable, dataset.central_beamlets)
        if dataset.derived_data is None:
            dataset.derived_data = dict()
    
        dataset.derived_data['NORMALIZED'] = output_datatable
        logger.info('dataset normalized')
        return output_datatable
    
    
    def time_averaging_step(dataset, input_datatable):
        """
    
        :param dataset:
        :type dataset: datacontainers.HolographyDataset
        :param input_datatable: dict(dict(dict(numpy.ndarray)))
        :return:
        :rtype: dict(dict(dict(numpy.ndarray)))
        """
        logger.info('time averaging dataset')
        output_datable = \
            processing.averaging.average_data(input_datatable)
        if dataset.derived_data is None:
            dataset.derived_data = dict()
        logger.info('dataset time averaged')
    
        dataset.derived_data['TIME_AVERAGED'] = output_datable
        return output_datable
    
    
    def station_averaging_step(dataset, input_datatable):
        """
    
        :param dataset:
        :type dataset: datacontainers.HolographyDataset
        :param input_datatable: dict(dict(dict(numpy.ndarray)))
        :return:
        :rtype: dict(dict(dict(numpy.ndarray)))
        """
        logger.info('averaging dataset per reference station')
        output_datable = \
            processing.averaging.weighted_average_dataset_per_station(dataset, input_datatable)
        if dataset.derived_data is None:
            dataset.derived_data = dict()
        logger.info('dataset averaged for reference station')
        dataset.derived_data['STATION_AVERAGED'] = output_datable
        return output_datable
    
    
    def compute_gains_step(dataset, input_datatable, direct_complex=True):
        """
    
        :param dataset:
        :type dataset: datacontainers.HolographyDataset
        :param input_datatable: dict(dict(dict(numpy.ndarray)))
        :return:
        :rtype: dict(dict(dict(numpy.ndarray)))
        """
        logger.info('computing gains per dataset')
        output_datable = \
            processing.solver.solve_gains_per_datatable(dataset, input_datatable, direct_complex=True)
        if dataset.derived_data is None:
            dataset.derived_data = dict()
        logger.info('gains per dataset computed')
        dataset.derived_data['GAINS'] = output_datable
        return output_datable
    
    
    def execute_processing(arguments):
        dataset = loading(arguments.input_path)
    
        normalized_data = normalize_step(dataset, dataset.data)
        averaged_data = time_averaging_step(dataset, normalized_data)
        station_averaged_data = station_averaging_step(dataset, averaged_data)
    
        gains = compute_gains_step(dataset, station_averaged_data)
        logger.info('storing datatafile in %s', os.path.abspath(arguments.output_path))
        dataset.store_to_file(arguments.output_path)
    
    
    def setup_logger(arguments):
        logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    
        if arguments.verbose:
            logger.setLevel(logging.DEBUG)
        else:
            logger.setLevel(logging.INFO)
    
    
    if __name__ == '__main__':
        arguments = parse_command_arguments(sys.argv[1:])
        setup_logger(arguments)
        execute_processing(arguments)