Skip to content
Snippets Groups Projects
Commit 5c57b99f authored by Alexander van Amesfoort's avatar Alexander van Amesfoort
Browse files

Task #9939: Resource estimate fmt: pass single root_resource_group instead of...

Task #9939: Resource estimate fmt: pass single root_resource_group instead of list: enough for DRAGNET work.
parent 5e0bfb95
No related branches found
No related tags found
No related merge requests found
...@@ -495,8 +495,7 @@ class ResourceAssigner(): ...@@ -495,8 +495,7 @@ class ResourceAssigner():
# Compile list of resource group IDs whose resources we care about, # Compile list of resource group IDs whose resources we care about,
# then filter db resource list with it. # then filter db resource list with it.
# TODO: tmp: deal with [0] only needed_root_resource_group_name = needed_res['root_resource_group']
needed_root_resource_group_name = needed_res['root_resource_groups'][0]
target_resgids = self._get_subtree_resource_group_ids(db_rgp2rgp, needed_root_resource_group_name) target_resgids = self._get_subtree_resource_group_ids(db_rgp2rgp, needed_root_resource_group_name)
db_resource_list.extend([r for r in db_needed_resource_list if db_r2rgp[r['id']]['parent_group_ids'][0] in target_resgids]) db_resource_list.extend([r for r in db_needed_resource_list if db_r2rgp[r['id']]['parent_group_ids'][0] in target_resgids])
......
...@@ -65,7 +65,7 @@ class ObservationResourceEstimator(BaseResourceEstimator): ...@@ -65,7 +65,7 @@ class ObservationResourceEstimator(BaseResourceEstimator):
The following return value example is for an obs duration of 240.0 s and 3 data product types for 2 clusters. The following return value example is for an obs duration of 240.0 s and 3 data product types for 2 clusters.
Here we have UV data for CEP4 (100 files), and we have for DRAGNET 2 IS (IQUV) and 7 CS TABs (also IQUV), Here we have UV data for CEP4 (100 files), and we have for DRAGNET 2 IS (IQUV) and 7 CS TABs (also IQUV),
each split across 5 parts (i.e. 7*4*5 + 2*4*5 = 180 files). each split across 5 parts (i.e. 7*4*5 + 2*4*5 = 180 files).
If estimate sizes and root_resource_groups are the same, estimates can be combined into 1 estimate (partly done here). If estimate sizes and root_resource_group are the same, estimates can be combined into 1 estimate (partly done here).
(Not shown here, but note that for CS complex voltage data (XXYY), we must produce 1 estimate per 4 files (XXYY), (Not shown here, but note that for CS complex voltage data (XXYY), we must produce 1 estimate per 4 files (XXYY),
such that the XXYY files for a part end up on the same storage. Such a beam can still be split across >1 parts.) such that the XXYY files for a part end up on the same storage. Such a beam can still be split across >1 parts.)
...@@ -77,7 +77,7 @@ class ObservationResourceEstimator(BaseResourceEstimator): ...@@ -77,7 +77,7 @@ class ObservationResourceEstimator(BaseResourceEstimator):
'errors': [], 'errors': [],
'estimates': [{ 'estimates': [{
'resource_types': {'bandwidth': 536870912, 'storage': 128849018880}, 'resource_types': {'bandwidth': 536870912, 'storage': 128849018880},
'count': 1, 'root_resource_groups': ['CEP4'], # count is per root resource group! 'count': 1, 'root_resource_group': 'CEP4',
'output_files': { 'output_files': {
'uv': {'nr_of_uv_files': 100, 'uv_file_size': 1073741824, 'identifications': [...]}, 'uv': {'nr_of_uv_files': 100, 'uv_file_size': 1073741824, 'identifications': [...]},
'saps': [{'sap_nr': 0, 'properties': {'nr_of_uv_files': 80, 'start_sb_nr': 0}}, 'saps': [{'sap_nr': 0, 'properties': {'nr_of_uv_files': 80, 'start_sb_nr': 0}},
...@@ -86,7 +86,7 @@ class ObservationResourceEstimator(BaseResourceEstimator): ...@@ -86,7 +86,7 @@ class ObservationResourceEstimator(BaseResourceEstimator):
] ]
} }
}, {'resources': {'bandwidth': 8947849, 'storage': 2147483648}, }, {'resources': {'bandwidth': 8947849, 'storage': 2147483648},
'count': 180, 'root_resource_groups': ['DRAGNET'], # count is per root resource group! 'count': 180, 'root_resource_group': 'DRAGNET',
'output_files': { 'output_files': {
'cs': {'nr_of_cs_files': 140, 'cs_file_size': 2147483648, 'nr_of_cs_stokes': 4, 'identifications': [...]}, 'cs': {'nr_of_cs_files': 140, 'cs_file_size': 2147483648, 'nr_of_cs_stokes': 4, 'identifications': [...]},
'is': {'nr_of_is_files': 40, 'is_file_size': 2147483648, 'nr_of_is_stokes': 4, 'identifications': [...]}, 'is': {'nr_of_is_files': 40, 'is_file_size': 2147483648, 'nr_of_is_stokes': 4, 'identifications': [...]},
...@@ -199,7 +199,7 @@ class ObservationResourceEstimator(BaseResourceEstimator): ...@@ -199,7 +199,7 @@ class ObservationResourceEstimator(BaseResourceEstimator):
total_files = 1 total_files = 1
estimate = {'resource_types': {'bandwidth': bandwidth, 'storage': file_size}, estimate = {'resource_types': {'bandwidth': bandwidth, 'storage': file_size},
'count': total_files, 'root_resource_groups': [root_resource_group], 'count': total_files, 'root_resource_group': root_resource_group,
'output_files': output_files} 'output_files': output_files}
logger.debug("Correlated data estimate: {}".format(estimate)) logger.debug("Correlated data estimate: {}".format(estimate))
return estimate return estimate
...@@ -289,7 +289,7 @@ class ObservationResourceEstimator(BaseResourceEstimator): ...@@ -289,7 +289,7 @@ class ObservationResourceEstimator(BaseResourceEstimator):
total_files = 1 total_files = 1
estimate = {'resource_types': {'bandwidth': bandwidth, 'storage': file_size}, estimate = {'resource_types': {'bandwidth': bandwidth, 'storage': file_size},
'count': total_files, 'root_resource_groups': [root_resource_group], 'count': total_files, 'root_resource_group': root_resource_group,
'output_files': output_files} 'output_files': output_files}
estimate['output_files']['cs']['nr_of_cs_stokes'] = nr_coherent estimate['output_files']['cs']['nr_of_cs_stokes'] = nr_coherent
...@@ -367,7 +367,7 @@ class ObservationResourceEstimator(BaseResourceEstimator): ...@@ -367,7 +367,7 @@ class ObservationResourceEstimator(BaseResourceEstimator):
total_files = 1 total_files = 1
estimate = {'resource_types': {'bandwidth': bandwidth, 'storage': file_size}, estimate = {'resource_types': {'bandwidth': bandwidth, 'storage': file_size},
'count': total_files, 'root_resource_groups': [root_resource_group], 'count': total_files, 'root_resource_group': root_resource_group,
'output_files': output_files} 'output_files': output_files}
estimate['output_files']['is']['nr_of_is_stokes'] = nr_incoherent estimate['output_files']['is']['nr_of_is_stokes'] = nr_incoherent
...@@ -375,17 +375,17 @@ class ObservationResourceEstimator(BaseResourceEstimator): ...@@ -375,17 +375,17 @@ class ObservationResourceEstimator(BaseResourceEstimator):
return estimate return estimate
def _merge_estimates(self, estimates): def _merge_estimates(self, estimates):
""" Estimates can only be merged if same root_resource_groups and bandwidth and storage, """ Estimates can only be merged if same root_resource_group and bandwidth and storage,
or if the root_resource_groups have a (single) global filesystem. or if the root_resource_group has a (single) global filesystem.
NOTE: assumed good enough to only merge conseq pairs, not all pairs. NOTE: assumed good enough to only merge conseq pairs, not all pairs.
""" """
i = 1 i = 1
while i < len(estimates): # careful iterating while modifying while i < len(estimates): # careful iterating while modifying
if estimates[i-1]['root_resource_groups'] == estimates[i]['root_resource_groups'] and \ if estimates[i-1]['root_resource_group'] == estimates[i]['root_resource_group'] and \
((estimates[i-1]['resource_types']['bandwidth'] == estimates[i]['resource_types']['bandwidth'] and \ ((estimates[i-1]['resource_types']['bandwidth'] == estimates[i]['resource_types']['bandwidth'] and \
estimates[i-1]['resource_types']['storage'] == estimates[i]['resource_types']['storage']) or \ estimates[i-1]['resource_types']['storage'] == estimates[i]['resource_types']['storage']) or \
all(self._hasGlobalStorage(rg) for rg in root_resource_groups) self._hasGlobalStorage(estimates[i]['root_resource_group'])
): ):
# Mergeable. Add uv, cs, is from estimates[i] into estimates[i-1] # Mergeable. Add uv, cs, is from estimates[i] into estimates[i-1]
if 'uv' in estimates[i]['output_files']: if 'uv' in estimates[i]['output_files']:
...@@ -414,7 +414,7 @@ class ObservationResourceEstimator(BaseResourceEstimator): ...@@ -414,7 +414,7 @@ class ObservationResourceEstimator(BaseResourceEstimator):
else: else:
j += 1 j += 1
if all(self._hasGlobalStorage(rg) for rg in estimates[i]['root_resource_groups']): if self._hasGlobalStorage(estimates[i]['root_resource_group']):
# for global fs, collapse regardless # for global fs, collapse regardless
estimates[i-1]['resource_types']['bandwidth'] *= estimates[i-1]['count'] # *= 1, but to be robust and clear estimates[i-1]['resource_types']['bandwidth'] *= estimates[i-1]['count'] # *= 1, but to be robust and clear
estimates[i-1]['resource_types']['bandwidth'] += estimates[i]['resource_types']['bandwidth'] * estimates[i]['count'] estimates[i-1]['resource_types']['bandwidth'] += estimates[i]['resource_types']['bandwidth'] * estimates[i]['count']
...@@ -422,7 +422,7 @@ class ObservationResourceEstimator(BaseResourceEstimator): ...@@ -422,7 +422,7 @@ class ObservationResourceEstimator(BaseResourceEstimator):
estimates[i-1]['resource_types']['storage'] += estimates[i]['resource_types']['storage'] * estimates[i]['count'] estimates[i-1]['resource_types']['storage'] += estimates[i]['resource_types']['storage'] * estimates[i]['count']
estimates[i-1]['count'] = 1 # already 1, but to be robust and clear estimates[i-1]['count'] = 1 # already 1, but to be robust and clear
else: else:
# root_resource_groups and values of bandwidth and storage are equal for both estimates # root_resource_group and values of bandwidth and storage are equal for both estimates
estimates[i-1]['count'] += estimates[i]['count'] estimates[i-1]['count'] += estimates[i]['count']
logger.info('Merged observation resource estimate {} into {}'.format(i, i-1)) logger.info('Merged observation resource estimate {} into {}'.format(i, i-1))
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment