Skip to content
Snippets Groups Projects
Select Git revision
  • 9168a14dd139f4b2578b7862e7a332ba62e6e8d3
  • master default protected
  • dither_on_off_disabled
  • yocto
  • pypcc2
  • pypcc3
  • 2020-12-07-the_only_working_copy
  • v2.0
  • v1.0
  • v0.9
  • Working-RCU_ADC,ID
  • 2020-12-11-Holiday_Season_release
12 results

testCLK.py

Blame
  • Code owners
    Assign users and groups as approvers for specific file changes. Learn more.
    holography_process.py 4.55 KiB
    #!/usr/bin/env python3
    import argparse
    import sys
    import os
    import lofar.calibration.common.datacontainers as datacontainers
    import lofar.calibration.processing as processing
    import logging
    
    logger = logging.getLogger('holography_process')
    
    
    def setup_argument_parser():
        """
    
        :return:
        """
        parser = argparse.ArgumentParser(
            description='Computes the gains per each antenna from the given Holography Dataset')
        parser.add_argument('input_path', help='Input holography dataset', type=str)
        parser.add_argument('output_path', help='Output holography dataset', type=str)
        parser.add_argument('verbose', help='Set verbose logging', action='store_false')
        return parser
    
    
    def parse_command_arguments(arguments):
        """
    
        :param arguments:
        :type arguments: list
        :return:
        """
        parser = setup_argument_parser()
        return parser.parse_args(arguments)
    
    
    def loading(path):
        """
    
        :param path:
        :type path: str
        :return:
        :rtype: datacontainers.HolographyDataset
        """
    
        logger.info('loading dataset from %s', path)
        if not os.path.exists(path):
            raise Exception('Path not found')
    
        dataset = datacontainers.HolographyDataset.load_from_file(path)
        logger.info('dataset from path %s loaded', path)
        return dataset
    
    
    def normalize_step(dataset, input_datatable):
        """
    
        :param dataset:
        :type dataset: datacontainers.HolographyDataset
        :param input_datatable: dict(dict(dict(numpy.ndarray)))
        :return:
        :rtype: dict(dict(dict(numpy.ndarray)))
        """
        logger.info('normalizing dataset')
        output_datatable = \
            processing.normalize_beams_by_central_one(dataset, input_datatable, dataset.central_beamlets)
        if dataset.derived_data is None:
            dataset.derived_data = dict()
    
        dataset.derived_data['NORMALIZED'] = output_datatable
        logger.info('dataset normalized')
        return output_datatable
    
    
    def time_averaging_step(dataset, input_datatable):
        """
    
        :param dataset:
        :type dataset: datacontainers.HolographyDataset
        :param input_datatable: dict(dict(dict(numpy.ndarray)))
        :return:
        :rtype: dict(dict(dict(numpy.ndarray)))
        """
        logger.info('time averaging dataset')
        output_datable = \
            processing.averaging.average_data(input_datatable)
        if dataset.derived_data is None:
            dataset.derived_data = dict()
        logger.info('dataset time averaged')
    
        dataset.derived_data['TIME_AVERAGED'] = output_datable
        return output_datable
    
    
    def station_averaging_step(dataset, input_datatable):
        """
    
        :param dataset:
        :type dataset: datacontainers.HolographyDataset
        :param input_datatable: dict(dict(dict(numpy.ndarray)))
        :return:
        :rtype: dict(dict(dict(numpy.ndarray)))
        """
        logger.info('averaging dataset per reference station')
        output_datable = \
            processing.averaging.weighted_average_dataset_per_station(dataset, input_datatable)
        if dataset.derived_data is None:
            dataset.derived_data = dict()
        logger.info('dataset averaged for reference station')
        dataset.derived_data['STATION_AVERAGED'] = output_datable
        return output_datable
    
    
    def compute_gains_step(dataset, input_datatable, direct_complex=True):
        """
    
        :param dataset:
        :type dataset: datacontainers.HolographyDataset
        :param input_datatable: dict(dict(dict(numpy.ndarray)))
        :return:
        :rtype: dict(dict(dict(numpy.ndarray)))
        """
        logger.info('computing gains per dataset')
        output_datable = \
            processing.solver.solve_gains_per_datatable(dataset, input_datatable, direct_complex=True)
        if dataset.derived_data is None:
            dataset.derived_data = dict()
        logger.info('gains per dataset computed')
        dataset.derived_data['GAINS'] = output_datable
        return output_datable
    
    
    def execute_processing(arguments):
        dataset = loading(arguments.input_path)
    
        normalized_data = normalize_step(dataset, dataset.data)
        averaged_data = time_averaging_step(dataset, normalized_data)
        station_averaged_data = station_averaging_step(dataset, averaged_data)
    
        gains = compute_gains_step(dataset, station_averaged_data)
        logger.info('storing datatafile in %s', os.path.abspath(arguments.output_path))
        dataset.store_to_file(arguments.output_path)
    
    
    def setup_logger(arguments):
        logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    
        if arguments.verbose:
            logger.setLevel(logging.DEBUG)
        else:
            logger.setLevel(logging.INFO)
    
    
    if __name__ == '__main__':
        arguments = parse_command_arguments(sys.argv[1:])
        setup_logger(arguments)
        execute_processing(arguments)