Skip to content
Snippets Groups Projects

Main

Merged
Maaijke Meviusrequested to merge
mevius/scintillation_pipeline:main into main
1 open thread

Files

+ 9
7
@@ -24,6 +24,7 @@ def parse_args():
parser.add_argument('--config', help='path to configuration file', type=str)
parser.add_argument('workflow', help='workflow uri', type=str)
parser.add_argument('filter', help='filter string', type=str)
parser.add_argument('--averaging_window', help='size of window in s', type=float, default=1)
parser.add_argument('sas_ids', help='list of SAS IDS', nargs='+')
return parser.parse_args()
@@ -44,7 +45,6 @@ SELECT FO.OBJECT_ID AS OBJECT_ID,
def read_user_credentials(file_paths: Union[str, List[str]]) -> Dict:
file_paths = list(map(os.path.expanduser, map(os.path.expandvars, file_paths)))
parser = ConfigParser()
parser.read(file_paths)
credentials = {'user': parser['global']['database_user'], 'password': parser['global']['database_password']}
@@ -85,15 +85,17 @@ def parse_surl_for_info(surl):
return site, result
def create_payload_from_entry(entry, filter_str, workflow, purge_policy='no', predecessor=None):
def create_payload_from_entry(entry, filter_str, workflow, avg_window, purge_policy='no', predecessor=None):
size = entry['filesize']
site, payload = parse_surl_for_info(entry['primary_url'])
inputs_payload = [
{'size': size,
inputs_payload = {
'surls':
[{'size': size,
'surl': entry['primary_url'],
'type': 'File',
'location': site}
]
'location': site}],
'averaging_window': avg_window
}
## default values
payload['task_type'] = 'regular'
@@ -148,7 +150,7 @@ def main():
table = find_surl_and_size_per_sasid(engine, sas_id)
for index, line in table.iterrows():
logging.info('creating task for sas_id %s - dataset %s', sas_id, index)
payload = create_payload_from_entry(line, args.filter, args.workflow)
payload = create_payload_from_entry(line, args.filter, args.workflow, args.averaging_window)
atdb_interface.submit_task(payload)
Loading