Commit 0908fe88 authored by Roy de Goei's avatar Roy de Goei

TMSS-154: Add filters to query on subtasks including unittest (second commit try)

parent d632e00d
......@@ -17,7 +17,7 @@ class Migration(migrations.Migration):
import json
from lofar.sas.tmss.tmss.tmssapp.models.specification import Role, Datatype, Dataformat, CopyReason
from lofar.sas.tmss.tmss.tmssapp.models.scheduling import SubtaskState, SubtaskType, SubtaskTemplate, Subtask, StationType, Algorithm, ScheduleMethod
from lofar.sas.tmss.tmss.tmssapp.models.scheduling import SubtaskState, SubtaskType, SubtaskTemplate, Subtask, StationType, Algorithm, ScheduleMethod, Cluster
def populate_choices(apps, schema_editor):
'''
......@@ -176,8 +176,14 @@ def _populate_subtask_with_correlator_schema_example():
subtask_template = SubtaskTemplate.objects.get(name='correlator schema')
subtask_data = {"start_time": datetime.utcnow().isoformat(),
"stop_time": datetime.utcnow().isoformat(),
cluster_data = {"name": "mycluster",
"description": "the default cluster",
"location": "upstairs",
"tags": ["TMSS", "TESTING", "FAKE_DATA"]}
cluster_object = Cluster.objects.create(**cluster_data)
subtask_data = {"start_time": "2020-02-01T01:01:01",
"stop_time": "2020-02-01T13:59:59",
"state": SubtaskState.objects.all()[0],
"specifications_doc": specifications_doc,
"task_blueprint": None,
......@@ -186,7 +192,7 @@ def _populate_subtask_with_correlator_schema_example():
"do_cancel": None,
"priority": 1,
"schedule_method": ScheduleMethod.objects.all()[0],
"cluster": None,
"cluster": cluster_object,
"scheduler_input_doc": ""}
Subtask.objects.create(**subtask_data)
......
......@@ -8,6 +8,18 @@ from .lofar_viewset import LOFARViewSet
from .. import models
from .. import serializers
from django_filters import rest_framework as filters
from lofar.sas.tmss.tmss.tmssapp.models.scheduling import Subtask
class subTaskFilter(filters.FilterSet):
class Meta:
model = Subtask
fields = {
'start_time': ['lt', 'gt'],
'stop_time': ['lt', 'gt'],
'cluster__name': ['exact', 'icontains'],
}
class SubtaskConnectorViewSet(LOFARViewSet):
queryset = models.SubtaskConnector.objects.all()
......@@ -78,17 +90,10 @@ class DataproductFeedbackTemplateViewSet(LOFARViewSet):
serializer_class = serializers.DataproductFeedbackTemplateSerializer
class subTaskFilter(filters.FilterSet):
class Meta:
model = Subtask
fields = {}
class SubtaskViewSet(LOFARViewSet):
queryset = models.Subtask.objects.all()
serializer_class = serializers.SubtaskSerializer
filter_backends = (filters.DjangoFilterBackend,)
filter_class = subTaskFilter
def get_queryset(self):
......
......@@ -141,10 +141,17 @@ router.register(r'task_relation_blueprint', viewsets.TaskRelationBlueprintViewSe
# JSON
router.register(r'task_draft', viewsets.TaskDraftViewSetJSONeditorOnline)
router.register(r'scheduling_unit_draft/(?P<scheduling_unit_draft_pk>\d+)/task_draft', viewsets.TaskDraftViewSetJSONeditorOnline)
router.register(r'subtask', viewsets.SubtaskViewSetJSONeditorOnline)
#router.register(r'subtask', viewsets.SubtaskViewSetJSONeditorOnline)
router.register(r'subtask', viewsets.SubtaskViewSet)
# Rest API call to provide subtasks within window for a cluster For example:
# https://....../api/subtask?start_time__gt=2020-01-01T00:00:00&stop_time__lt=&2020-03-01-00:00:00&cluster=myCluster
#router.register(r'subtask_within_window', viewsets.SubtaskViewSetWithFilter)
urlpatterns.extend(router.urls)
# prefix everything for proxy
urlpatterns = [url(r'^api/', include(urlpatterns)), url(r'^oidc/', include('mozilla_django_oidc.urls')),]
\ No newline at end of file
urlpatterns = [url(r'^api/', include(urlpatterns)), url(r'^oidc/', include('mozilla_django_oidc.urls')),]
......@@ -289,7 +289,6 @@ def main_test_database():
print("Test-TMSS database up and running.")
print("**********************************")
print("DB Credentials ID: %s (for example to run tmms against this test db, call 'tmss -C %s')" % (db.dbcreds_id, db.dbcreds_id))
print()
print("Press Ctrl-C to exit (and remove the test database automatically)")
waitForInterrupt()
......@@ -320,7 +319,10 @@ def main_test_environment():
print("DB Credentials ID: %s" % (instance.database.dbcreds_id, ))
print("LDAP Credentials ID: %s" % (instance.django_server.ldap_dbcreds_id, ))
print("Django URL: %s" % (instance.django_server.url))
print()
print("Please Copy-Paste the next environment variables when using for example Django manage.py command line:")
print("export TMSS_DBCREDENTIALS=%s" % instance.database.dbcreds_id)
print("export TMSS_LDAPCREDENTIALS=%s" % instance.django_server.ldap_dbcreds_id)
print()
print("Press Ctrl-C to exit (and remove the test database and django server automatically)")
waitForInterrupt()
......
# Load testdata for subtask query
# Use manage.py with the next environment variables:
# export TMSS_DBCREDENTIALS=02949506-2c13-4028-a27a-c35cc69bc0ec
# export TMSS_LDAPCREDENTIALS=0a2d3d5c-f757-4476-9331-72093f754c
# Execute:
Provide model data with fixture files https://docs.djangoproject.com/en/2.2/howto/initial-data/
Use the Django manage.py to loaddata from a json 'fixture' file
First set the next environment variables:
export TMSS_DBCREDENTIALS=<DB Credentials ID>
export TMSS_LDAPCREDENTIALS=<LDAP Credentials ID>
The environment variables are provided during startup of 'tmss_test_environment'
Finally Execute (in home of your project):
/usr/bin/python3 build/gnucxx11_opt/lib/python3.6/site-packages/lofar/sas/tmss/manage.py loaddata ./SAS/TMSS/test/testdata/subtasks.json
\ No newline at end of file
[
{
"model": "tmssapp.cluster",
"pk": 2,
"fields": {
"name": "bassieenadriaan",
"description": "the next cluster",
"location": "downstairs",
"tags": [ "loaded by fixture", "TMSS-154", "manual testing" ],
"created_at": "2020-02-24T13:19:57",
"updated_at": "2020-02-24T13:19:57"
}
},
{
"model": "tmssapp.cluster",
"pk": 3,
"fields": {
"name": "peppieenkokkie",
"description": "the last cluster",
"location": "anywhere",
"tags": [ "loaded by fixture", "TMSS-154", "manual testing" ],
"created_at": "2020-02-24T13:19:57",
"updated_at": "2020-02-24T13:19:57"
}
},
{
"model": "tmssapp.subtask",
"pk": 2,
"fields" : {
"start_time": "2020-01-02T00:00:00",
"stop_time": "2020-01-02T12:00:00",
"specifications_doc": 1,
"do_cancel": null,
"priority": 1,
"scheduler_input_doc": 1,
"state": "defined",
"task_blueprint": null,
"specifications_template": 1,
"schedule_method": "manual",
"cluster": 2,
"tags": [ "loaded by fixture", "TMSS-154", "manual testing" ],
"created_at": "2020-02-24T13:19:57",
"updated_at": "2020-02-24T13:19:57"
}
},
{
"model": "tmssapp.subtask",
"pk": 3,
"fields" : {
"start_time": "2020-01-03T00:00:00",
"stop_time": "2020-01-03T12:00:00",
"specifications_doc": 1,
"do_cancel": null,
"priority": 1,
"scheduler_input_doc": 1,
"state": "defined",
"task_blueprint": null,
"specifications_template": 1,
"schedule_method": "manual",
"cluster": 3,
"tags": [ "loaded by fixture", "TMSS-154", "manual testing" ],
"created_at": "2020-02-24T13:19:57",
"updated_at": "2020-02-24T13:19:57"
}
},
{
"model": "tmssapp.subtask",
"pk": 4,
"fields" : {
"start_time": "2020-01-04T00:00:00",
"stop_time": "2020-01-04T12:00:00",
"specifications_doc": 1,
"do_cancel": null,
"priority": 1,
"scheduler_input_doc": 1,
"state": "defined",
"task_blueprint": null,
"specifications_template": 1,
"schedule_method": "manual",
"cluster": 1,
"tags": [ "loaded by fixture", "TMSS-154", "manual testing" ],
"created_at": "2020-02-24T13:19:57",
"updated_at": "2020-02-24T13:19:57"
}
}
]
\ No newline at end of file
......@@ -210,25 +210,36 @@ def SubtaskInput_test_data():
"selection_template": models.SubtaskInputSelectionTemplate.objects.create(**SubtaskInputSelectionTemplate_test_data()),
"tags":[]}
def Subtask_test_data(subtask_template: models.SubtaskTemplate=None, specifications_doc: str=None):
def Subtask_test_data(subtask_template: models.SubtaskTemplate=None, specifications_doc: str=None,
start_time=None, stop_time=None, cluster_object=None):
if subtask_template is None:
subtask_template = models.SubtaskTemplate.objects.create(**SubtaskTemplate_test_data())
if specifications_doc is None:
specifications_doc = "{}" # simplest json object
return { "start_time": datetime.utcnow().isoformat(),
"stop_time": datetime.utcnow().isoformat(),
"state": models.SubtaskState.objects.get(value='scheduling'),
"specifications_doc": specifications_doc,
"task_blueprint": models.TaskBlueprint.objects.create(**TaskBlueprint_test_data()),
"specifications_template": subtask_template,
"tags": ["TMSS", "TESTING"],
"do_cancel": datetime.utcnow().isoformat(),
"priority": 1,
"schedule_method": models.ScheduleMethod.objects.get(value='manual'),
"cluster": models.Cluster.objects.create(location="downstairs", tags=[]),
"scheduler_input_doc": "{}"}
if start_time is None:
start_time = datetime.utcnow().isoformat()
if stop_time is None:
stop_time = datetime.utcnow().isoformat()
if cluster_object is None:
cluster_object = models.Cluster.objects.create(name="dummy cluster", location="downstairs", tags=[])
return { "start_time": start_time,
"stop_time": stop_time,
"state": models.SubtaskState.objects.get(value='scheduling'),
"specifications_doc": specifications_doc,
"task_blueprint": models.TaskBlueprint.objects.create(**TaskBlueprint_test_data()),
"specifications_template": subtask_template,
"tags": ["TMSS", "TESTING"],
"do_cancel": datetime.utcnow().isoformat(),
"priority": 1,
"schedule_method": models.ScheduleMethod.objects.get(value='manual'),
"cluster": cluster_object,
"scheduler_input_doc": "{}"}
def Dataproduct_test_data():
return {"filename": "my.file",
......@@ -274,8 +285,9 @@ def Filesystem_test_data():
"cluster": models.Cluster.objects.create(**Cluster_test_data()),
"tags": ['tmss', 'testing']}
def Cluster_test_data():
return {"location": "upstairs",
def Cluster_test_data(name="default cluster"):
return {"name": name,
"location": "upstairs",
"tags": ['tmss', 'testing']}
def DataproductArchiveInfo_test_data():
......
"""
This is a stub.
Currently not used, but can be implemented to populate and validate the trigger data model.
Check views.py for data parsing and rendering on get/post.
Check filter.py for data parsing and rendering on get/post.
"""
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment