From a25c9ef240e5f77a78e2b86c3ad6d08c02c493e5 Mon Sep 17 00:00:00 2001
From: Jorrit Schaap <schaap@astron.nl>
Date: Thu, 12 Dec 2024 12:33:23 +0100
Subject: [PATCH] TMSS-2964: limit max number of workers

---
 SAS/TMSS/backend/services/scheduling/lib/constraints.py | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/SAS/TMSS/backend/services/scheduling/lib/constraints.py b/SAS/TMSS/backend/services/scheduling/lib/constraints.py
index c05ce1b7fb4..bcf05939f89 100644
--- a/SAS/TMSS/backend/services/scheduling/lib/constraints.py
+++ b/SAS/TMSS/backend/services/scheduling/lib/constraints.py
@@ -385,7 +385,7 @@ def filter_scheduling_units_using_constraints(scheduling_units: [models.Scheduli
         else:
             # filter in parallel, accepting the little overhead of multiprocessing startup etc
             django.db.close_old_connections() # django reopens them (quickly and automatically), and we don't want this process's connections to be shared with the multiprocessing connections (which causes mayhem)
-            executor = ProcessPoolExecutor(initializer=django.setup)
+            executor = ProcessPoolExecutor(initializer=django.setup, max_workers=min(int(round(0.8*os.cpu_count())), 20))
             # prepare a parallel map iterator, looping over the scheduling_units in parallel
             map_iter = executor.map(_filter_single_scheduling_unit_using_constraints,
                                     [scheduling_unit for scheduling_unit in scheduling_units],
@@ -1649,7 +1649,7 @@ def evaluate_constraint_on_missing_gridded_timestamps(scheduling_unit: models.Sc
             else:
                 # evaluate_constraint in parallel, accepting the little overhead of multiprocessing startup etc
                 django.db.close_old_connections() # django reopens them (quickly and automatically), and we don't want this process's connections to be shared with the multiprocessing connections (which causes mayhem)
-                executor = ProcessPoolExecutor(initializer=django.setup)
+                executor = ProcessPoolExecutor(initializer=django.setup, max_workers=min(int(round(0.8*os.cpu_count())), 20))
                 # prepare a parallel map iterator, looping over the scheduling_units in parallel
                 map_iter = executor.map(evaluate_constraint,
                                         [scheduling_unit for _ in to_be_evaluated_timestamps],
@@ -1885,7 +1885,7 @@ def compute_start_times_for_units(scheduling_units: [models.SchedulingUnitBluepr
         else:
             # compute_scheduling_unit_start_time in parallel, accepting the little overhead of multiprocessing startup etc
             django.db.close_old_connections()  # django reopens them (quickly and automatically), and we don't want this process's connections to be shared with the multiprocessing connections (which causes mayhem)
-            executor = ProcessPoolExecutor(initializer=django.setup)
+            executor = ProcessPoolExecutor(initializer=django.setup, max_workers=min(int(round(0.8*os.cpu_count())), 20))
             # prepare a parallel map iterator, looping over the scheduling_units in parallel
             map_iter = executor.map(compute_scheduling_unit_start_time,
                                     [scheduling_unit for scheduling_unit in scheduling_units],
-- 
GitLab