Skip to content
Snippets Groups Projects
Commit d0a58c05 authored by Mattia Mancini's avatar Mattia Mancini
Browse files

add support for long list

parent 78f9e935
No related branches found
No related tags found
No related merge requests found
...@@ -9,8 +9,11 @@ import re ...@@ -9,8 +9,11 @@ import re
def parse_args(): def parse_args():
parser = ArgumentParser(description='Gather the results of tasks with a give SAS ID') parser = ArgumentParser(description='Gather the results of tasks with a give SAS ID')
parser.add_argument('--atdb_url', help='url to the atdb website', default='https://sdc.astron.nl:5554/atdb') parser.add_argument('--atdb_url', help='url to the atdb website', default='https://sdc.astron.nl:5554/atdb')
parser.add_argument('sas_id', help='sas id of the observation') parser.add_argument('--sas_id', help='sas id of the observation')
parser.add_argument('--filter', help='query by filter', default=None)
parser.add_argument('--status', help='query by status', default=None)
parser.add_argument('--print_first', help='print first result', action='store_true') parser.add_argument('--print_first', help='print first result', action='store_true')
parser.add_argument('--to_gftp_path', help='convert srm url to gftp', action='store_true')
parser.add_argument('paths', help='path to the tasks to extract (e.g. outputs.uv_coverage)', nargs='+') parser.add_argument('paths', help='path to the tasks to extract (e.g. outputs.uv_coverage)', nargs='+')
return parser.parse_args() return parser.parse_args()
...@@ -31,7 +34,12 @@ def path_to_nested_dict(content: dict, path: str): ...@@ -31,7 +34,12 @@ def path_to_nested_dict(content: dict, path: str):
raise MissingFieldException(item) raise MissingFieldException(item)
if isinstance(leaf, list): if isinstance(leaf, list):
return list(map(lambda x: path_to_nested_dict(x, uri[-1]), leaf)) return list(map(lambda x: path_to_nested_dict(x, uri[-1]), leaf))
return leaf[uri[-1]]
try:
result = leaf[uri[-1]]
except KeyError:
raise MissingFieldException
return result
def _get_paginated(url, params, partial_result=None): def _get_paginated(url, params, partial_result=None):
...@@ -51,29 +59,50 @@ def _get_paginated(url, params, partial_result=None): ...@@ -51,29 +59,50 @@ def _get_paginated(url, params, partial_result=None):
return partial_result return partial_result
def get_tasks(url: str, sas_id: Dict): def get_tasks(url: str, sas_id: int = None, filter: str = None, status: str = None):
tasks = _get_paginated(f'{url}/tasks', params=dict(sas_id=sas_id)) parameters = {}
if sas_id is not None:
parameters['sas_id'] = sas_id
if filter is not None:
parameters['filter'] = filter
if status is not None:
parameters['status'] = status
tasks = _get_paginated(f'{url}/tasks', params=parameters)
return tasks return tasks
def flatten_nested_list(nested_list):
copy_of = list(nested_list)
while isinstance(copy_of[0], list):
copy_of = reduce(lambda a, b: a + b, copy_of, [])
return copy_of
def extract_result_from_tasks(tasks, path): def extract_result_from_tasks(tasks, path):
result = reduce( result = flatten_nested_list(
lambda a, b: a + b, list(map(
map(
lambda item: path_to_nested_dict(item, path), lambda item: path_to_nested_dict(item, path),
tasks tasks
), )))
[])
return result return result
def convert_srm_to_gftp(path):
if 'srm' in path:
path = re.sub('^srm://srm.grid.sara.nl[:0-9]*', 'gsiftp://gridftp.grid.sara.nl', path)
return path
def main(): def main():
args = parse_args() args = parse_args()
tasks = get_tasks(args.atdb_url, args.sas_id) tasks = get_tasks(args.atdb_url, sas_id=args.sas_id, filter=args.filter, status=args.status)
if args.print_first: if args.print_first:
print(json.dumps(tasks[0], indent=4)) print(json.dumps(tasks[0], indent=4))
else: else:
results = [extract_result_from_tasks(tasks, path) for path in args.paths] results = [extract_result_from_tasks(tasks, path) for path in args.paths]
for result in zip(*results): for result in zip(*results):
if args.to_gftp_path:
result = list(map(convert_srm_to_gftp, result))
print(*result) print(*result)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment