Skip to content
Snippets Groups Projects
Commit 2a17f973 authored by Mattia Mancini's avatar Mattia Mancini
Browse files

SSB-44: refactored to address reviewer suggestions

parent 3cd0bb67
No related branches found
No related tags found
1 merge request!44Merge back holography to master
......@@ -3,12 +3,36 @@ import argparse
import sys
import os
import lofar.calibration.common.datacontainers as datacontainers
import lofar.calibration.processing as processing
from lofar.calibration.processing.solver import solve_gains_per_datatable
from lofar.calibration.processing.averaging import average_data,\
weighted_average_dataset_per_station
from lofar.calibration.processing.normalize import normalize_beams_by_central_one
import logging
import functools
logger = logging.getLogger('holography_process')
def log_step_execution(step_description):
def log_step_decorator(function):
@functools.wraps(function)
def wrapper(*args, **kwargs):
logger.info('Performing step %s', step_description)
try:
result = function(*args, **kwargs)
logger.info('Step %s performed', step_description)
return result
except Exception as e:
logger.exception('exception occurred performing step %s: %s',
step_description, e)
raise e
return wrapper
return log_step_decorator
def setup_argument_parser():
"""
......@@ -33,6 +57,7 @@ def parse_command_arguments(arguments):
return parser.parse_args(arguments)
@log_step_execution('load')
def loading(path):
"""
......@@ -41,16 +66,11 @@ def loading(path):
:return:
:rtype: datacontainers.HolographyDataset
"""
logger.info('loading dataset from %s', path)
if not os.path.exists(path):
raise Exception('Path not found')
dataset = datacontainers.HolographyDataset.load_from_file(path)
logger.info('dataset from path %s loaded', path)
return dataset
@log_step_execution('normalize')
def normalize_step(dataset, input_datatable):
"""
......@@ -60,17 +80,14 @@ def normalize_step(dataset, input_datatable):
:return:
:rtype: dict(dict(dict(numpy.ndarray)))
"""
logger.info('normalizing dataset')
output_datatable = \
processing.normalize_beams_by_central_one(dataset, input_datatable, dataset.central_beamlets)
if dataset.derived_data is None:
dataset.derived_data = dict()
output_datatable = normalize_beams_by_central_one(dataset,
input_datatable,
dataset.central_beamlets)
dataset.derived_data['NORMALIZED'] = output_datatable
logger.info('dataset normalized')
return output_datatable
@log_step_execution('time average')
def time_averaging_step(dataset, input_datatable):
"""
......@@ -80,17 +97,12 @@ def time_averaging_step(dataset, input_datatable):
:return:
:rtype: dict(dict(dict(numpy.ndarray)))
"""
logger.info('time averaging dataset')
output_datable = \
processing.averaging.average_data(input_datatable)
if dataset.derived_data is None:
dataset.derived_data = dict()
logger.info('dataset time averaged')
output_datable = average_data(input_datatable)
dataset.derived_data['TIME_AVERAGED'] = output_datable
return output_datable
@log_step_execution('station average')
def station_averaging_step(dataset, input_datatable):
"""
......@@ -100,16 +112,14 @@ def station_averaging_step(dataset, input_datatable):
:return:
:rtype: dict(dict(dict(numpy.ndarray)))
"""
logger.info('averaging dataset per reference station')
output_datable = \
processing.averaging.weighted_average_dataset_per_station(dataset, input_datatable)
output_datable = weighted_average_dataset_per_station(dataset, input_datatable)
if dataset.derived_data is None:
dataset.derived_data = dict()
logger.info('dataset averaged for reference station')
dataset.derived_data['STATION_AVERAGED'] = output_datable
return output_datable
@log_step_execution('solving gains')
def compute_gains_step(dataset, input_datatable, direct_complex=True):
"""
......@@ -119,32 +129,36 @@ def compute_gains_step(dataset, input_datatable, direct_complex=True):
:return:
:rtype: dict(dict(dict(numpy.ndarray)))
"""
logger.info('computing gains per dataset')
output_datable = \
processing.solver.solve_gains_per_datatable(dataset, input_datatable, direct_complex=direct_complex)
output_datable = solve_gains_per_datatable(dataset, input_datatable, direct_complex=direct_complex)
if dataset.derived_data is None:
dataset.derived_data = dict()
logger.info('gains per dataset computed')
dataset.derived_data['GAINS'] = output_datable
return output_datable
@log_step_execution('save')
def store_step(dataset, filepath):
absolute_filepath = os.path.abspath(filepath)
logger.info('storing dataset in path %s', filepath)
dataset.store_to_file(absolute_filepath)
logger.info('stored dataset in path %s', filepath)
def prepare_dataset_for_processing(dataset: datacontainers.HolographyDataset):
"""
:param dataset:
:return: None
"""
if dataset.derived_data is None:
dataset.derived_data = dict()
def execute_processing(arguments):
dataset = loading(arguments.input_path)
prepare_dataset_for_processing(dataset)
normalized_data = normalize_step(dataset, dataset.data)
averaged_data = time_averaging_step(dataset, normalized_data)
station_averaged_data = station_averaging_step(dataset, averaged_data)
logger.info('storing datatafile in %s', os.path.abspath(arguments.output_path))
store_step(dataset, arguments.output_path)
gains = compute_gains_step(dataset, station_averaged_data)
_ = compute_gains_step(dataset, station_averaged_data)
store_step(dataset, arguments.output_path)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment