diff --git a/atdb/atdb/static/taskdatabase/ATDB-LDV Workflow Diagram.png b/atdb/atdb/static/taskdatabase/ATDB-LDV Workflow Diagram.png
index 0035bc3bb0fbaa4f4814048548d46304af040a54..f57179df55cb89959c52682822691ceefb0d7176 100644
Binary files a/atdb/atdb/static/taskdatabase/ATDB-LDV Workflow Diagram.png and b/atdb/atdb/static/taskdatabase/ATDB-LDV Workflow Diagram.png differ
diff --git a/atdb/atdb/static/taskdatabase/style.css b/atdb/atdb/static/taskdatabase/style.css
index 5a9474dc821e986f99447d450413324c3d2bf9b7..5e4bce6b5f3f230602792cbd6eddbac6d8018acd 100644
--- a/atdb/atdb/static/taskdatabase/style.css
+++ b/atdb/atdb/static/taskdatabase/style.css
@@ -3,12 +3,12 @@ TD {
   font-size: 12pt;
 }
 
-.defining,.staging,.fetching,.processing,.storing,.scrub,.scrubbing,.archiving,.discarding,.pre_archiving {
+.defining,.staging,.fetching,.processing,.storing,.scrub,.scrubbing,.archiving,.aggregating {
   font-style: italic;
   color: green;
 }
 
-.defined,.staged,.fetched,.processed,.stored,.validated,.scrubbed,.archived,.pre_archived,.finished {
+.defined,.staged,.fetched,.processed,.stored,.validated,.scrubbed,.archived,.finished,.aggregated {
   background-color: lemonchiffon;
   color: blue;
 }
@@ -32,10 +32,9 @@ TD {
   background-color: lightgreen;
 }
 
-.aggregate_failed {
-  font-weight: bold;
+.aggregate_failed,.aggregating_failed {
   color: red;
-  background-color: lightgreen;
+  font-weight: bold;
 }
 
 
@@ -149,3 +148,8 @@ p.title {
   display: inline-block;
   vertical-align: middle;
 }
+
+.img {
+    color: white;
+    font-family: "Courier New";
+}
\ No newline at end of file
diff --git a/atdb/docs/ATDB-LDV Data Model.png b/atdb/docs/ATDB-LDV Data Model.png
index b82814904d74f2303a3a6cf5dd27c2927c289a49..e75767f34cc841cfcb4275218997e7c2baeeb0a5 100644
Binary files a/atdb/docs/ATDB-LDV Data Model.png and b/atdb/docs/ATDB-LDV Data Model.png differ
diff --git a/atdb/docs/ATDB-LDV Workflow Diagram.png b/atdb/docs/ATDB-LDV Workflow Diagram.png
index eeed7db92827385b647afd4d7cd5bbe68545db9b..f63f4d68e76c9c708ab045188e6f0431a26905f7 100644
Binary files a/atdb/docs/ATDB-LDV Workflow Diagram.png and b/atdb/docs/ATDB-LDV Workflow Diagram.png differ
diff --git a/atdb/dumpdata_configuration.bat b/atdb/dumpdata_configuration.bat
index a017b5d85f0f55b3e66173277bd910504947b461..27742ef1353b2fae9d7ffc22f8f6bf0bd22a46aa 100644
--- a/atdb/dumpdata_configuration.bat
+++ b/atdb/dumpdata_configuration.bat
@@ -1,3 +1,5 @@
+REM dump the configuration table as a json file (fixture)
+
 python manage.py dumpdata taskdatabase.configuration --indent 4 --settings=atdb.settings.dev > configuration_fixture.json
 
 REM sdc-dev / sdc
diff --git a/atdb/dumpdata_workflows.bat b/atdb/dumpdata_workflows.bat
index d7463c45b3a44224cf2f70694046c09dff19df10..4d7cedf6a665cafd73d4ff9c8d43c94ee0b46cd4 100644
--- a/atdb/dumpdata_workflows.bat
+++ b/atdb/dumpdata_workflows.bat
@@ -1,3 +1,5 @@
+REM dump the workflows tables as a json file (fixture)
+
 python manage.py dumpdata taskdatabase.workflow --indent 4 --settings=atdb.settings.dev > workflow_fixture.json
 
 REM sdc-dev / sdc
diff --git a/atdb/loaddata_configuration.bat b/atdb/loaddata_configuration.bat
index 59c194ddcd26a993889abb28f34934c84bcbaa86..f8cd885d46256bfa5898ae21dc793f77220b1d10 100644
--- a/atdb/loaddata_configuration.bat
+++ b/atdb/loaddata_configuration.bat
@@ -1,3 +1,5 @@
+REM load the configuration_fixture.json from a json file
+
 python manage.py loaddata configuration_fixture.json --settings=atdb.settings.dev
 
 REM sdc-dev / sdc
diff --git a/atdb/loaddata_workflow.bat b/atdb/loaddata_workflow.bat
index f32410337e36943951a33cf999a32b5ebbbfa60d..aeea94e487038072ad0c01bf970cbb966f2a1d6c 100644
--- a/atdb/loaddata_workflow.bat
+++ b/atdb/loaddata_workflow.bat
@@ -1,3 +1,5 @@
+REM load the workflows table from a json file (fixture)
+
 python manage.py loaddata workflow_fixture.json --settings=atdb.settings.dev
 
 REM sdc-dev / sdc
diff --git a/atdb/makemigrations.bat b/atdb/makemigrations.bat
index 7f7d2616bc339244b7acf14d3755fc31ec036873..00df8ee40ca5233191013b1b9c7a910afa747a38 100644
--- a/atdb/makemigrations.bat
+++ b/atdb/makemigrations.bat
@@ -1,2 +1,3 @@
+REM create the database migration file (this does not do the actual migration, see migration.bat for that)
 python manage.py makemigrations --settings=atdb.settings.dev
 REM don't forget to add the new migration file to the repo (right click on the migration file, choose git -> add).
\ No newline at end of file
diff --git a/atdb/migrate.bat b/atdb/migrate.bat
index e628eead5a55dde18ea017e173c32b219dba76d2..9ec2e177996ca67ed8868f695a766900b4e9aefa 100644
--- a/atdb/migrate.bat
+++ b/atdb/migrate.bat
@@ -1 +1,2 @@
+REM run the database migration
 python manage.py migrate --settings=atdb.settings.dev
\ No newline at end of file
diff --git a/atdb/requirements/base.txt b/atdb/requirements/base.txt
index eb63b89a13274589c3866ed061fdba550b169cb4..fcc3da67820b11bdff135ff597f5e5ff589b0da6 100644
--- a/atdb/requirements/base.txt
+++ b/atdb/requirements/base.txt
@@ -1,7 +1,6 @@
 astronauth==0.3.3
 Django==5
 django-allauth==0.57.0  # note allauth only supports Django >= 3.2
-django-bootstrap-pagination==1.7.1
 django-bootstrap3==23.6
 django-cors-headers==3.6.0
 django-extensions==3.1.0
diff --git a/atdb/rollback.bat b/atdb/rollback.bat
index dc8c5fa1f66a7a497acde7201e0f895bb36fccf0..8602eaf2c6dc80665b00d968d1b364722f289fc0 100644
--- a/atdb/rollback.bat
+++ b/atdb/rollback.bat
@@ -1 +1,2 @@
+REM example of how to rollback a database change
 python manage.py migrate taskdatabase 0010 --settings=atdb.settings.dev
\ No newline at end of file
diff --git a/atdb/superuser.bat b/atdb/superuser.bat
index 879d8c04e782d2c0cbc7872714d6584e8e9c4f09..f8ebf6dc78e962528907b06408671faf3b792aa4 100644
--- a/atdb/superuser.bat
+++ b/atdb/superuser.bat
@@ -1 +1,2 @@
+REM add a superuser to the database
 python manage.py createsuperuser --settings=atdb.settings.dev
\ No newline at end of file
diff --git a/atdb/taskdatabase/migrations/0045_activity_is_processed.py b/atdb/taskdatabase/migrations/0045_activity_is_processed.py
new file mode 100644
index 0000000000000000000000000000000000000000..89cb21c864d689c6bfb8182cc507c08c271253d3
--- /dev/null
+++ b/atdb/taskdatabase/migrations/0045_activity_is_processed.py
@@ -0,0 +1,18 @@
+# Generated by Django 5.0 on 2024-04-02 07:29
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('taskdatabase', '0044_alter_job_type'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='activity',
+            name='is_processed',
+            field=models.BooleanField(default=False),
+        ),
+    ]
diff --git a/atdb/taskdatabase/migrations/0046_rename_is_combined_activity_is_aggregated_and_more.py b/atdb/taskdatabase/migrations/0046_rename_is_combined_activity_is_aggregated_and_more.py
new file mode 100644
index 0000000000000000000000000000000000000000..97e7ced0047d910acb32b4280ee8f501f1d62db8
--- /dev/null
+++ b/atdb/taskdatabase/migrations/0046_rename_is_combined_activity_is_aggregated_and_more.py
@@ -0,0 +1,22 @@
+# Generated by Django 5.0 on 2024-04-05 07:30
+
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('taskdatabase', '0045_activity_is_processed'),
+    ]
+
+    operations = [
+        migrations.RenameField(
+            model_name='activity',
+            old_name='is_combined',
+            new_name='is_aggregated',
+        ),
+        migrations.RemoveField(
+            model_name='activity',
+            name='is_validated',
+        ),
+    ]
diff --git a/atdb/taskdatabase/models.py b/atdb/taskdatabase/models.py
index fd3e3662ee764876b3617cfab9243a96614a9612..46809a3f7953dbc70354aaa105a8bb8681e2d2a1 100644
--- a/atdb/taskdatabase/models.py
+++ b/atdb/taskdatabase/models.py
@@ -115,11 +115,11 @@ class Activity(models.Model):
     # set by update_activity, used by Validation Page
     is_verified  = models.BooleanField(default=False)
 
-    # TODO: flag set by the 'validate' step in ATDB, used by combine service
-    is_validated = models.BooleanField(default=False)
+    # flag set by ATDB to indicate that all tasks of this Activity has been processed
+    is_processed = models.BooleanField(default=False)
 
-    # TODO: flag set (and used) by the combine service, so that it doesn't do double work
-    is_combined = models.BooleanField(default=False)
+    # flag set (and used) by the aggregator service, so that it doesn't do double work
+    is_aggregated = models.BooleanField(default=False)
 
     finished_fraction = models.FloatField(blank=True, null=True)
     ingested_fraction = models.FloatField(blank=True, null=True)
@@ -208,12 +208,8 @@ class Task(models.Model):
         return str(self.id) + ' - (' + self.task_type + ') - ' + str(self.sas_id)
 
     def save(self, *args, **kwargs):
-        # nv:1mar2023, temporary hack, set tasks 'on hold' as soon they get to 'scrubbed'
-        # (users forget to do that manually, causing unwanted ingests)
-        if (self.status != State.SCRUBBED.value) & (self.new_status == State.SCRUBBED.value):
-            self.resume = False
 
-        # nv:19jun2023, calculate the qualities for this task
+        # calculate the qualities for this task
         if (self.status != State.STORED.value) & (self.new_status == State.STORED.value):
 
             # read the quality_thresholds from the Configuration table
@@ -230,12 +226,33 @@ class Task(models.Model):
             tasks_for_this_sasid = Task.objects.filter(sas_id=self.sas_id)
             self.calculated_qualities = qualities.calculate_qualities(self, tasks_for_this_sasid, quality_thresholds)
 
-        # nv:20feb2024, check if this task is a summary task
-        if (self.status != State.STORED.value) & (self.new_status == State.STORED.value):
-            self.is_summary = check_if_summary(self)
+        # when a task goes to PROCESSED...
+
+        if (self.status != State.PROCESSED.value) & (self.new_status == State.PROCESSED.value):
+            try:
+                # ...but only when the tasks of this activity are not yet aggregated
+                # (because the aggregator service also sets tasks back to processed)
+
+                if (self.activity.status != State.AGGREGATED.value):
+                    # check if this task is a summary task
+                    self.is_summary = check_if_summary(self)
+
+                    # if so, temporarily put it on hold so that the ancillary service can grab it with it
+                    if self.is_summary:
+                        #TODO: uncomment to enable aggregator
+                        #self.resume = False
+                        pass
+
+            except Exception as error:
+                # this should never happen
+                # But it can happen that tasks are inserted directly in an advanced status without going through
+                # the proper steps (like in tests). If that happens, just log the error and continue.
+                logger.error(error)
 
         # nv:20feb2024, same as above, but for backward compatibilty reasons.
         # For tasks that are already beyond PROCESSED, but not yet ingested.
+        # But it has no use to put them on 'hold' for the ancillary service,
+        # because at this point the spider work directory has already been deleted by datamanager
         if (self.status != State.VALIDATED.value) & (self.new_status == State.VALIDATED.value):
             self.is_summary = check_if_summary(self)
 
diff --git a/atdb/taskdatabase/services/activities_handler.py b/atdb/taskdatabase/services/activities_handler.py
index fa44983408cd844a87f09f477e6a036b85c6821f..68b1666bb6b1a171878b441bfd36a04a4697dd73 100644
--- a/atdb/taskdatabase/services/activities_handler.py
+++ b/atdb/taskdatabase/services/activities_handler.py
@@ -1,5 +1,5 @@
 import logging;
-from .common import State, verified_statusses
+from .common import State, verified_statusses, processed_statusses
 from taskdatabase.models import Task, Activity
 
 logger = logging.getLogger(__name__)
@@ -79,6 +79,7 @@ def update_activity(task):
     - to STORED                                  : calculate quality
     - to SCRUBBED, ARCHIVING, ARCHIVED, FINISHED : calculate ingested_fraction
     - to _FAILED                                 : calculate finished_fraction
+    - to STORED, PROCESSED, DISCARDED            : check if all tasks are processed, set Activity to is_processed and AGGREGATE
     - always                                     : determine if a task is in a 'verified' status
 
     """
@@ -129,6 +130,33 @@ def update_activity(task):
         activity.remaining = result['remaining']
         activity.save()
 
+
+    # check if all tasks of this SAS_ID have a status that is considered 'processed'
+    # this is used as a trigger for the ancillary service
+    if task.status in processed_statusses:
+        current_is_processed = activity.is_processed
+        activity.is_processed = True
+        non_discarded_found = False
+        for t in Task.objects.filter(sas_id=task.sas_id):
+            if t.status not in processed_statusses:
+                activity.is_processed = False
+                break
+
+            # at least one of the tasks should NOT be in discarded,
+            # otherwise a fully discarded SAS_ID will also register as 'is_processed' and ready to 'AGGREGATE'
+            if t.status != State.DISCARDED.value:
+                non_discarded_found = True
+
+        # only save when changed
+        if activity.is_processed != current_is_processed:
+            # if the whole activity has become processed, then set the status of this activity to 'AGGREGATE'
+            if (activity.is_processed & non_discarded_found):
+                # TODO: uncomment to enable aggregator
+                # activity.status = State.AGGREGATE.value
+                pass
+
+            activity.save()
+
     # check if all tasks of this SAS_ID have a status that is considered 'verified'
     # this is used for the Validation Page
     current_is_verified = activity.is_verified
diff --git a/atdb/taskdatabase/services/common.py b/atdb/taskdatabase/services/common.py
index b9eaf2ebafbfca64af58e410d9ce5d548d4bb444..78e1a83cea6da9315ec9d229505639ac1aa7822f 100644
--- a/atdb/taskdatabase/services/common.py
+++ b/atdb/taskdatabase/services/common.py
@@ -8,10 +8,13 @@ from enum import Enum
 
 logger = logging.getLogger(__name__)
 class State(Enum):
+    UNKNOWN = "unknown"
     DEFINED = "defined"
     STAGED = "staged"
     FETCHED = "fetched"
     PROCESSED = "processed"
+    AGGREGATE = "aggregate"
+    AGGREGATED = "aggregated"
     STORED = 'stored'
     VALIDATED = "validated"
     SCRUBBED = "scrubbed"
@@ -28,6 +31,7 @@ class State(Enum):
 verified_statusses = [State.STORED.value, State.VALIDATED.value, State.SCRUBBED.value, State.PRE_ARCHIVED.value,
                       State.ARCHIVED.value, State.FINISHED.value, State.SUSPENDED.value, State.DISCARDED.value]
 
+processed_statusses = [State.PROCESSED.value, State.STORED.value, State.DISCARDED.value]
 
 class SummaryFlavour(Enum):
     DEFAULT = "default"
diff --git a/atdb/taskdatabase/static/taskdatabase/ATDB-LDV Workflow Diagram.png b/atdb/taskdatabase/static/taskdatabase/ATDB-LDV Workflow Diagram.png
index 0035bc3bb0fbaa4f4814048548d46304af040a54..f57179df55cb89959c52682822691ceefb0d7176 100644
Binary files a/atdb/taskdatabase/static/taskdatabase/ATDB-LDV Workflow Diagram.png and b/atdb/taskdatabase/static/taskdatabase/ATDB-LDV Workflow Diagram.png differ
diff --git a/atdb/taskdatabase/static/taskdatabase/style.css b/atdb/taskdatabase/static/taskdatabase/style.css
index 6ca2e94521d94a8ec94f9977d1fc8facfd3efcb7..5e4bce6b5f3f230602792cbd6eddbac6d8018acd 100644
--- a/atdb/taskdatabase/static/taskdatabase/style.css
+++ b/atdb/taskdatabase/static/taskdatabase/style.css
@@ -3,12 +3,12 @@ TD {
   font-size: 12pt;
 }
 
-.defining,.staging,.fetching,.processing,.storing,.scrub,.scrubbing,.archiving {
+.defining,.staging,.fetching,.processing,.storing,.scrub,.scrubbing,.archiving,.aggregating {
   font-style: italic;
   color: green;
 }
 
-.defined,.staged,.fetched,.processed,.stored,.validated,.scrubbed,.archived,.finished {
+.defined,.staged,.fetched,.processed,.stored,.validated,.scrubbed,.archived,.finished,.aggregated {
   background-color: lemonchiffon;
   color: blue;
 }
@@ -32,10 +32,9 @@ TD {
   background-color: lightgreen;
 }
 
-.aggregate_failed {
-  font-weight: bold;
+.aggregate_failed,.aggregating_failed {
   color: red;
-  background-color: lightgreen;
+  font-weight: bold;
 }
 
 
diff --git a/atdb/taskdatabase/templates/taskdatabase/index.html b/atdb/taskdatabase/templates/taskdatabase/index.html
index 05c6414f8a45eba9c646d2d09cff77a2488c5a1a..35df7c2ab88f7b2c6d3d17f6a154226f7173dbe3 100644
--- a/atdb/taskdatabase/templates/taskdatabase/index.html
+++ b/atdb/taskdatabase/templates/taskdatabase/index.html
@@ -31,7 +31,7 @@
             {% include 'taskdatabase/pagination.html' %}
         </div>
     </div>
-    <p class="footer"> Version 1 Apr 2024
+    <p class="footer"> Version 19 Apr 2024 (a)
 </div>
 
 {% include 'taskdatabase/refresh.html' %}
diff --git a/atdb/taskdatabase/templates/taskdatabase/tasks.html b/atdb/taskdatabase/templates/taskdatabase/tasks.html
index 29ef8b5f4c3639fc71878d7b7a484cca28775e67..245cf7b96dc9dde9e29317929343b0c8cae19d0a 100644
--- a/atdb/taskdatabase/templates/taskdatabase/tasks.html
+++ b/atdb/taskdatabase/templates/taskdatabase/tasks.html
@@ -75,15 +75,15 @@
 
 
             {% if user.is_authenticated %}
+                {% if not task.status == 'processed'  %}
+                    {% if task.resume  %}
+                           <a href="{% url 'task-hold-resume' task.pk 'hold' my_tasks.number %}" class="btn btn-warning btn-sm" role="button"><i class="fas fa-pause"></i> hold</a>
+                     {% endif %}
 
-                {% if task.resume  %}
-                       <a href="{% url 'task-hold-resume' task.pk 'hold' my_tasks.number %}" class="btn btn-warning btn-sm" role="button"><i class="fas fa-pause"></i> hold</a>
-                 {% endif %}
-
-                {% if not task.resume  %}
-                       <a href="{% url 'task-hold-resume' task.pk 'resume' my_tasks.number %}" class="btn btn-success btn-sm" role="button"><i class="fas fa-play"></i> start</a>
+                    {% if not task.resume  %}
+                           <a href="{% url 'task-hold-resume' task.pk 'resume' my_tasks.number %}" class="btn btn-success btn-sm" role="button"><i class="fas fa-play"></i> start</a>
+                    {% endif %}
                 {% endif %}
-
             {% endif %}
 
             {% include "taskdatabase/failures/retry_buttons.html" %}
diff --git a/atdb/taskdatabase/tests/test_models_processed_summary.py b/atdb/taskdatabase/tests/test_models_processed_summary.py
new file mode 100644
index 0000000000000000000000000000000000000000..3b996d524a57a6f7ed8e945eff0b75931e3c559e
--- /dev/null
+++ b/atdb/taskdatabase/tests/test_models_processed_summary.py
@@ -0,0 +1,52 @@
+from django.test import TestCase
+import json
+from taskdatabase.models import Task, Workflow, Activity
+from taskdatabase.services.common import State
+
+class TestProcessedSummary(TestCase):
+
+    def setUp(self):
+        """
+        initialize test data
+        """
+        self.workflow_requantisation = Workflow(id=22, workflow_uri="psrfits_requantisation")
+        self.workflow_requantisation.save()
+
+        self.task1 = Task.objects.create(sas_id=222,
+                                         filter="test_blabla",
+                                         new_status=State.PROCESSED.value,
+                                         workflow=self.workflow_requantisation,
+                                         is_summary=False)
+        self.task1.save()
+        self.task2 = Task.objects.create(sas_id=222,
+                                         new_status=State.PROCESSED.value,
+                                         workflow=self.workflow_requantisation,
+                                         outputs={"tar_archive": [{"size": 4885985280, "basename": "L185619_summaryCS.tar", "nameroot": "L185619_summaryCS"}]})
+        self.task2.save()
+
+    def test_processed_not_on_hold(self):
+        """
+        task 1 is processed, but not a summary dataproduct. Should NOT go on hold
+        """
+
+        actual = self.task1.resume
+        self.assertEqual(actual,  True)
+
+    # TODO: uncomment to enable aggregator
+    # def test_processed_on_hold(self):
+    #     """
+    #     task 2 is processed, and a summary dataproduct. Should go on hold
+    #     """
+    #
+    #     actual = self.task2.resume
+    #     # this test fails, because "self.resume = False" is still commented out in models.py L249
+    #     self.assertEqual(actual,  False)
+
+
+    def test_activity_is_processed(self):
+        """
+        both tasks are processed, the activity should have the is_processed flag now
+        """
+
+        actual = self.task1.activity.is_processed
+        self.assertEqual(actual,  True)
\ No newline at end of file
diff --git a/atdb/taskdatabase/tests/test_summary_tasks.py b/atdb/taskdatabase/tests/test_summary_tasks.py
index bf33c9f75d68202b5ecfec0e0e801e81c50a1f40..667f837a66b2b65f0f13aec05ea0e059ee0ed6b7 100644
--- a/atdb/taskdatabase/tests/test_summary_tasks.py
+++ b/atdb/taskdatabase/tests/test_summary_tasks.py
@@ -10,33 +10,68 @@ class TestSummaryTasks(TestCase):
         self.workflow_requantisation = Workflow(id=22, workflow_uri="psrfits_requantisation")
         self.workflow_requantisation.save()
 
-        self.no_summary_task = Task.objects.create(sas_id=77777, new_status=State.DEFINED.value, workflow=self.workflow_requantisation,
-                                         outputs={"tar_archive": [{"size": 4885985280, "basename": "L621240_SAP002_B073_P000_bf.tar", "nameroot": "L621240_SAP002_B073_P000_bf"}]})
-        self.summary_task_defined = Task.objects.create(sas_id=77777, new_status=State.DEFINED.value, workflow=self.workflow_requantisation,
-                                         outputs={"tar_archive": [{"size": 4885985280, "basename": "L185619_summaryCS.tar", "nameroot": "L185619_summaryCS"}]})
-        self.summary_task_stored = Task.objects.create(sas_id=77777, new_status=State.STORED.value, workflow=self.workflow_requantisation,
-                                         outputs={"tar_archive": [{"size": 4885985280, "basename": "L185619_summaryCS.tar", "nameroot": "L185619_summaryCS"}]})
+        self.no_summary_task_77777 = Task.objects.create(sas_id=77777, new_status=State.DEFINED.value, workflow=self.workflow_requantisation,
+                                                         outputs={"tar_archive": [{"size": 4885985280, "basename": "L621240_SAP002_B073_P000_bf.tar", "nameroot": "L621240_SAP002_B073_P000_bf"}]})
+        self.summary_task_defined_77777 = Task.objects.create(sas_id=77777, new_status=State.DEFINED.value, workflow=self.workflow_requantisation,
+                                                              outputs={"tar_archive": [{"size": 4885985280, "basename": "L185619_summaryCS.tar", "nameroot": "L185619_summaryCS"}]})
+        self.summary_task_validated_77777 = Task.objects.create(sas_id=77777, new_status=State.VALIDATED.value, workflow=self.workflow_requantisation,
+                                                                outputs={"tar_archive": [{"size": 4885985280, "basename": "L185619_summaryCS.tar", "nameroot": "L185619_summaryCS"}]})
+        self.summary_task_processed_77777 = Task.objects.create(sas_id=77777, new_status=State.PROCESSED.value, workflow=self.workflow_requantisation,
+                                                          outputs={"tar_archive": [{"size": 4885985280, "basename": "L185619_summaryCS.tar", "nameroot": "L185619_summaryCS"}]})
+
+        # simulate an Activity that is processed, with 1 task already in STORED, the other one in PROCESSED
+        self.summary_task_stored_88888 = Task.objects.create(sas_id=88888, new_status=State.STORED.value, workflow=self.workflow_requantisation,
+                                                          outputs={"tar_archive": [{"size": 4885985280, "basename": "L185619_summaryCS.tar", "nameroot": "L185619_summaryCS"}]})
+        self.summary_task_processed_88888 = Task.objects.create(sas_id=88888, new_status=State.PROCESSED.value, workflow=self.workflow_requantisation,
+                                                          outputs={"tar_archive": [{"size": 4885985280, "basename": "L185619_summaryCS.tar", "nameroot": "L185619_summaryCS"}]})
 
     def test_no_summary_task(self):
         """
         test task that is not a summary task
         """
+        self.assertFalse(self.no_summary_task_77777.is_summary)
 
-        actual = self.no_summary_task.is_summary
-        self.assertEqual(actual, False)
-
-    def test_summary_task_defined(self):
+    def test_task_defined_does_not_know_that_it_is_summary(self):
         """
         test summary task, but before it knows that it becomes a summary task (which it only knows when 'processed')
         """
+        self.assertFalse(self.summary_task_defined_77777.is_summary)
 
-        actual = self.summary_task_defined.is_summary
-        self.assertEqual(actual, False)
+    def test_task_validated_knows_that_it_is_summary(self):
+        """
+        test summary task, at 'stored' it should know that it is a summary task and return True)
+        """
+        self.summary_task_validated_77777.save()
+        self.assertTrue(self.summary_task_validated_77777.is_summary)
 
-    def test_summary_task_stored(self):
+    def test_task_processed_knows_that_it_is_summary(self):
         """
         test summary task, at 'stored' it should know that it is a summary task and return True)
         """
-        self.summary_task_stored.save()
-        actual = self.summary_task_stored.is_summary
-        self.assertEqual(actual, True)
+        self.summary_task_processed_77777.save()
+        self.assertTrue(self.summary_task_processed_77777.is_summary)
+
+    # TODO: uncomment to enable aggregator
+    # def test_summary_task_processed_goes_on_hold(self):
+    #     """
+    #     test summary task, at 'stored' it should know that it is a summary task and return True)
+    #     """
+    #     self.summary_task_processed_88888.save()
+    #     self.assertFalse(self.summary_task_processed_88888.resume)
+
+    def test_activity_77777_not_is_processed(self):
+        """
+        activity 77777 should not be fully processed, because some tasks have not reached 'processed' yet
+        """
+        self.summary_task_processed_77777.save()
+        activity = self.summary_task_processed_77777.activity
+        self.assertFalse(activity.is_processed)
+
+    def test_activity_88888_is_processed(self):
+        """
+        SAS_ID 88888 should be is_processed, because all its tasks have status 'processed' or 'stored'
+        """
+        self.summary_task_stored_88888.save()
+        self.summary_task_processed_88888.save()
+        activity = self.summary_task_processed_88888.activity
+        self.assertTrue(activity.is_processed)
\ No newline at end of file
diff --git a/atdb/taskdatabase/tests/test_update_activity.py b/atdb/taskdatabase/tests/test_update_activity.py
index cd015954369450045a47354dda962c44d2ecf10a..35501e48afc23c9b7aec77d6a8478abbf1c19a18 100644
--- a/atdb/taskdatabase/tests/test_update_activity.py
+++ b/atdb/taskdatabase/tests/test_update_activity.py
@@ -1,6 +1,7 @@
 from django.test import TestCase
 import json
 from taskdatabase.models import Task, Workflow, Activity
+from taskdatabase.services.common import State
 
 class TestUpdateActivity(TestCase):
 
@@ -46,6 +47,42 @@ class TestUpdateActivity(TestCase):
                                          size_to_process=1000,
                                          size_processed=500)
 
+        self.task6 = Task.objects.create(sas_id=111,
+                                         new_status='stored',
+                                         workflow=self.workflow_requantisation,
+                                         size_to_process=1000,
+                                         size_processed=500)
+        self.task6.save()
+        self.task7 = Task.objects.create(sas_id=111,
+                                         new_status='processed',
+                                         workflow=self.workflow_requantisation,
+                                         size_to_process=1000,
+                                         size_processed=500)
+        self.task7.save()
+        self.task8 = Task.objects.create(sas_id=111,
+                                         new_status='processed',
+                                         workflow=self.workflow_requantisation,
+                                         size_to_process=1000,
+                                         size_processed=500,
+                                         outputs={"tar_archive": [
+                                             {"size": 4885985280, "basename": "L185619_summaryCS.tar",
+                                              "nameroot": "L185619_summaryCS"}]}
+                                         )
+        self.task8.save()
+        self.task9 = Task.objects.create(sas_id=112,
+                                         new_status='processing',
+                                         workflow=self.workflow_requantisation,
+                                         size_to_process=1000,
+                                         size_processed=500)
+        self.task9.save()
+        self.task10 = Task.objects.create(sas_id=112,
+                                         new_status='processed',
+                                         workflow=self.workflow_requantisation,
+                                         size_to_process=1000,
+                                         size_processed=500,
+                                         outputs={"tar_archive": [{"size": 4885985280, "basename": "L185619_summaryCS.tar", "nameroot": "L185619_summaryCS"}]})
+        self.task10.save()
+
     def test_created_activity(self):
         """
         test if activity is created
@@ -137,3 +174,28 @@ class TestUpdateActivity(TestCase):
         actual = activity.workflow_id
         self.assertEqual(actual, 22)
 
+    def test_is_not_processed(self):
+        """
+        task 9 is not processed, task 10 is processed.
+        The activity.is_processed should be false
+        """
+
+        activity = self.task9.activity
+
+        actual = activity.is_processed
+        self.assertEqual(actual,  False)
+
+    # TODO: uncomment to enable aggregator
+    # def test_is_processed(self):
+    #     """
+    #     task 6, 7 and 8 are processed,
+    #     activity.is_processed should be true and activity status should go to 'aggregate'
+    #     """
+    #
+    #     activity = self.task6.activity
+    #
+    #     actual = activity.is_processed
+    #     self.assertEqual(actual, True)
+    #
+    #     actual = activity.status
+    #     self.assertEqual(actual, State.AGGREGATE.value)
\ No newline at end of file
diff --git a/atdb/taskdatabase/views.py b/atdb/taskdatabase/views.py
index e3469e622ebb30439a37691d76cdcd5b1140a1fa..0733a6e39fe64f118f28c932937fb0f3e0964cd9 100644
--- a/atdb/taskdatabase/views.py
+++ b/atdb/taskdatabase/views.py
@@ -89,6 +89,7 @@ class TaskFilter(filters.FilterSet):
             'predecessor__status': ['exact', 'icontains', 'in', 'startswith'],
             'activity' : ['isnull'],
             'activity__id': ['exact'],
+            'activity__status': ['exact', 'icontains', 'in', 'startswith'],
             'activity__ingested_fraction' : ['exact','lt', 'lte', 'gt', 'gte','isnull'],
         }
 
@@ -130,6 +131,9 @@ class ActivityFilter(filters.FilterSet):
             'finished_fraction': ['exact', 'lt', 'lte', 'gt', 'gte'],
             'total_size': ['exact', 'lt', 'lte', 'gt', 'gte'],
             'remaining': ['exact', 'lt', 'lte', 'gt', 'gte'],
+            'is_processed': ['exact'],
+            'is_verified': ['exact'],
+            'is_aggregated': ['exact'],
         }
 
 
@@ -151,6 +155,7 @@ class LogEntryFilter(filters.FilterSet):
 
         fields = {
             'task__id': ['exact'],
+            'service': ['exact', 'icontains', 'in'],
             'step_name': ['exact', 'icontains', 'in', 'startswith'],
             'status': ['exact', 'in'],
         }
diff --git a/atdb/test-latest.bat b/atdb/test-latest.bat
new file mode 100644
index 0000000000000000000000000000000000000000..cdb84165849a9ca1837636fc026e8d2e4642d508
--- /dev/null
+++ b/atdb/test-latest.bat
@@ -0,0 +1,2 @@
+REM test a single unittest
+python manage.py test taskdatabase.tests.test_update_activity.TestUpdateActivity --settings=atdb.settings.dev
\ No newline at end of file
diff --git a/atdb/test.bat b/atdb/test.bat
index fd750b2fb86c6ae4c1ef6327aa5432b77b40027b..b61cef9bdfa8e551523fb43477e8bde18fe88d12 100644
--- a/atdb/test.bat
+++ b/atdb/test.bat
@@ -1 +1,2 @@
+REM runs the unit tests for ATDB
 python manage.py test --settings=atdb.settings.dev
\ No newline at end of file
diff --git a/atdb/test_local.bat b/atdb/test_local.bat
deleted file mode 100644
index e6bbdcea84bc1b249ee5ac4f313d02d5c8e6b99f..0000000000000000000000000000000000000000
--- a/atdb/test_local.bat
+++ /dev/null
@@ -1 +0,0 @@
-python manage.py test --settings atdb.settings.test_local