From 9ec0b12f44fd031f60b9bccd33fe511e793b676f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rn=20K=C3=BCnsem=C3=B6ller?= <jkuensem@physik.uni-bielefeld.de> Date: Wed, 30 Jan 2019 15:37:13 +0000 Subject: [PATCH] Task SW-560: Some more bugfixes, dump should work now, but storage map setting still broken --- MAC/Services/TBB/TBBServer/lib/tbbservice.py | 27 +++++++++++++++----- MAC/TBB/lib/tbb_freeze.py | 1 - MAC/TBB/lib/tbb_set_storage.py | 1 + SAS/TriggerServices/lib/trigger_service.py | 12 +++++---- 4 files changed, 28 insertions(+), 13 deletions(-) diff --git a/MAC/Services/TBB/TBBServer/lib/tbbservice.py b/MAC/Services/TBB/TBBServer/lib/tbbservice.py index d43fd1eba9e..9820bbde45f 100644 --- a/MAC/Services/TBB/TBBServer/lib/tbbservice.py +++ b/MAC/Services/TBB/TBBServer/lib/tbbservice.py @@ -150,16 +150,24 @@ class TBBControlService: # But I assume this is required to allow prefixes or sth like that. # Note: ICD states that non-filled-in parameters should/can be present, and should contain 0 by default. + if isinstance(parset, parameterset): + parset = parset.dict() + + if isinstance(updates, parameterset): + updates = updates.dict() + for dk, dv in updates.items(): found_in_parset = False - for k, v in parset.dict().items(): + for k, v in parset.items(): if dk in k: found_in_parset = True - parset.replace[k] = dv + #parset.replace(k, dv) <- does not work with parameterset during live testing for some reason + parset[k] = dv if not found_in_parset: - parset.add(dk, dv) + # parset.add(dk, dv) + parset[dk] = dv - return parset + return parameterset(parset) def _get_parset_of_running_obs(self): """ @@ -456,7 +464,7 @@ class TBBControlService: :return: """ - log_message = "Performing TBB data dump to CEP for trigger %s and project %s " % (project, triggerid) + log_message = "Performing TBB data dump to CEP for trigger %s and project %s " % (triggerid, project) logger.info(log_message + "...") # todo @@ -494,8 +502,13 @@ class TBBControlService: nodes = [node for node in nodes if node in datawriter_nodes] # create mapping for storage nodes - storage_map = create_mapping(lcus, nodes) - self.set_storage(storage_map) + try: + storage_map = create_mapping(lcus, nodes) + self.set_storage(storage_map) + except: + logger.exception('Could not create storage map. Will try to dump anyway.') + + # start upload self.upload_data(lcus_str, dm, starttime, duration, subbands, waittime, boards) self._add_meta_data_to_h5_files(output_path) diff --git a/MAC/TBB/lib/tbb_freeze.py b/MAC/TBB/lib/tbb_freeze.py index 5c9fab55b41..802d1b02fbf 100755 --- a/MAC/TBB/lib/tbb_freeze.py +++ b/MAC/TBB/lib/tbb_freeze.py @@ -17,7 +17,6 @@ from lofar.mac.tbb.tbb_util import split_stations_by_boardnumber, wrap_remote_co # we cannot use the following here because we need to execute things in parallel on all stations (->lcurun) # from lofar.common.lcu_utils import wrap_command_in_lcu_station_ssh_call - def freeze_tbb(stations, dm, timesec, timensec): """ :param stations: comma-separated list of stations diff --git a/MAC/TBB/lib/tbb_set_storage.py b/MAC/TBB/lib/tbb_set_storage.py index 57ec70dc496..50b129b983a 100755 --- a/MAC/TBB/lib/tbb_set_storage.py +++ b/MAC/TBB/lib/tbb_set_storage.py @@ -55,6 +55,7 @@ def create_mapping(stations, nodes): """ # zip truncates to shortest list, so make sure there are enough nodes, then map each station to a node + logging.info("Mapping stations %s on %s nodes " % (stations, nodes)) nodes *= (len(stations) // len(nodes) + 1) map = dict(zip(stations, nodes)) logging.debug('Stations were mapped to nodes as follows: %s' % map) diff --git a/SAS/TriggerServices/lib/trigger_service.py b/SAS/TriggerServices/lib/trigger_service.py index 58538e4cf73..4f09b0ecadc 100644 --- a/SAS/TriggerServices/lib/trigger_service.py +++ b/SAS/TriggerServices/lib/trigger_service.py @@ -269,10 +269,12 @@ class ALERTHandler(VOEventListenerInterface): # _send_notification('ALERT Broker', ALERT_BROKER_HOST, self.project, triggerid, voevent_xml) # todo: do we want that? do we want it on same bus? logger.info('ALERT event %s is accepted. Initiating TBB dump: starttime %s, duration %ssec, dm %s' % (triggerid, starttime, duration, dm)) available_stations = self._determine_station_lists()['available'] + lcus = [stationname2hostname(station) for station in available_stations] + lcu_str = ','.join(lcus) # do a fast direct freeze call here, so the boards still contain data for this event. # if we freeze via rpc/service calls, that takes time, so we might loose precious data from the buffers. - freeze_tbb(available_stations, dm, starttime_sec , starttime_nsec) + freeze_tbb(lcu_str, dm, starttime_sec , starttime_nsec) # initiate the dumping via an rpc call to the tbbservice which takes care of all bookkeeping. with TBBRPC() as rpc: @@ -335,12 +337,12 @@ class ALERTHandler(VOEventListenerInterface): logger.warning('No observations running at %s, so TBB\'s are not recording', stoptime) return False - station_sets = self._determine_station_lists() + station_lists = self._determine_station_lists() - if len(station_sets['available']) > 0: - logger.info('Enough TBB stations available: %s', station_sets['available']) + if len(station_lists['available']) > 0: + logger.info('Enough TBB stations available: %s', station_lists['available']) else: - logger.warning('No TBB stations available. requested=%s active=%s', station_sets['requested'], station_sets['active']) + logger.warning('No TBB stations available. requested=%s active=%s', station_lists['requested'], station_lists['active']) return False # all prerequisites are met. -- GitLab