diff --git a/SAS/TMSS/src/tmss/tmssapp/populate.py b/SAS/TMSS/src/tmss/tmssapp/populate.py index f911e93ea7472c5ad9f40f900e15cd83134738c2..4ebd19677f6c961489439a61ae1a9d80fde9c4da 100644 --- a/SAS/TMSS/src/tmss/tmssapp/populate.py +++ b/SAS/TMSS/src/tmss/tmssapp/populate.py @@ -17,7 +17,7 @@ class Migration(migrations.Migration): import json from lofar.sas.tmss.tmss.tmssapp.models.specification import Role, Datatype, Dataformat, CopyReason -from lofar.sas.tmss.tmss.tmssapp.models.scheduling import SubtaskState, SubtaskType, SubtaskTemplate, Subtask, StationType, Algorithm, ScheduleMethod +from lofar.sas.tmss.tmss.tmssapp.models.scheduling import SubtaskState, SubtaskType, SubtaskTemplate, Subtask, StationType, Algorithm, ScheduleMethod, Cluster def populate_choices(apps, schema_editor): ''' @@ -176,8 +176,14 @@ def _populate_subtask_with_correlator_schema_example(): subtask_template = SubtaskTemplate.objects.get(name='correlator schema') - subtask_data = {"start_time": datetime.utcnow().isoformat(), - "stop_time": datetime.utcnow().isoformat(), + cluster_data = {"name": "mycluster", + "description": "the default cluster", + "location": "upstairs", + "tags": ["TMSS", "TESTING", "FAKE_DATA"]} + cluster_object = Cluster.objects.create(**cluster_data) + + subtask_data = {"start_time": "2020-02-01T01:01:01", + "stop_time": "2020-02-01T13:59:59", "state": SubtaskState.objects.all()[0], "specifications_doc": specifications_doc, "task_blueprint": None, @@ -186,7 +192,7 @@ def _populate_subtask_with_correlator_schema_example(): "do_cancel": None, "priority": 1, "schedule_method": ScheduleMethod.objects.all()[0], - "cluster": None, + "cluster": cluster_object, "scheduler_input_doc": ""} Subtask.objects.create(**subtask_data) diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py index c1f118930a075db792cf3c77bb9c0e1d4aa5be02..df8ca6d59312cf0d1e6f46bd449e7b5bccd90c09 100644 --- a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py @@ -8,6 +8,18 @@ from .lofar_viewset import LOFARViewSet from .. import models from .. import serializers from django_filters import rest_framework as filters +from lofar.sas.tmss.tmss.tmssapp.models.scheduling import Subtask + + +class subTaskFilter(filters.FilterSet): + class Meta: + model = Subtask + fields = { + 'start_time': ['lt', 'gt'], + 'stop_time': ['lt', 'gt'], + 'cluster__name': ['exact', 'icontains'], + } + class SubtaskConnectorViewSet(LOFARViewSet): queryset = models.SubtaskConnector.objects.all() @@ -78,17 +90,10 @@ class DataproductFeedbackTemplateViewSet(LOFARViewSet): serializer_class = serializers.DataproductFeedbackTemplateSerializer -class subTaskFilter(filters.FilterSet): - - class Meta: - model = Subtask - - fields = {} - - class SubtaskViewSet(LOFARViewSet): queryset = models.Subtask.objects.all() serializer_class = serializers.SubtaskSerializer + filter_backends = (filters.DjangoFilterBackend,) filter_class = subTaskFilter def get_queryset(self): diff --git a/SAS/TMSS/src/tmss/urls.py b/SAS/TMSS/src/tmss/urls.py index 5b81a1f8323eb75f6321db1c71bb8a0bcfe3eb04..f6bc1c202764552277116de55778297b589e9dcc 100644 --- a/SAS/TMSS/src/tmss/urls.py +++ b/SAS/TMSS/src/tmss/urls.py @@ -141,10 +141,17 @@ router.register(r'task_relation_blueprint', viewsets.TaskRelationBlueprintViewSe # JSON router.register(r'task_draft', viewsets.TaskDraftViewSetJSONeditorOnline) router.register(r'scheduling_unit_draft/(?P<scheduling_unit_draft_pk>\d+)/task_draft', viewsets.TaskDraftViewSetJSONeditorOnline) -router.register(r'subtask', viewsets.SubtaskViewSetJSONeditorOnline) +#router.register(r'subtask', viewsets.SubtaskViewSetJSONeditorOnline) +router.register(r'subtask', viewsets.SubtaskViewSet) +# Rest API call to provide subtasks within window for a cluster For example: +# https://....../api/subtask?start_time__gt=2020-01-01T00:00:00&stop_time__lt=&2020-03-01-00:00:00&cluster=myCluster +#router.register(r'subtask_within_window', viewsets.SubtaskViewSetWithFilter) + urlpatterns.extend(router.urls) # prefix everything for proxy -urlpatterns = [url(r'^api/', include(urlpatterns)), url(r'^oidc/', include('mozilla_django_oidc.urls')),] \ No newline at end of file +urlpatterns = [url(r'^api/', include(urlpatterns)), url(r'^oidc/', include('mozilla_django_oidc.urls')),] + + diff --git a/SAS/TMSS/test/t_tmssapp_scheduling_functional.py b/SAS/TMSS/test/t_tmssapp_scheduling_functional.py index 221846c23b5338464fb9759081868175c9bcdb61..ebe0f0acaccaeaa7e65faaf3ae886074f494f648 100755 --- a/SAS/TMSS/test/t_tmssapp_scheduling_functional.py +++ b/SAS/TMSS/test/t_tmssapp_scheduling_functional.py @@ -27,7 +27,7 @@ # todo: behavior in a controlled way. # todo: We should probably also fully test behavior wrt mandatory and nullable fields. -from datetime import datetime +from datetime import datetime, timedelta import unittest import logging logger = logging.getLogger(__name__) @@ -37,11 +37,15 @@ logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=loggin # use setup/teardown magic for tmss test database, ldap server and django server # (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_test_environment_unittest_setup module) from lofar.sas.tmss.test.tmss_test_environment_unittest_setup import * +from lofar.sas.tmss.test.tmss_test_data_django_models import * +from lofar.sas.tmss.tmss.tmssapp import models # import and setup test data creator from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator test_data_creator = TMSSRESTTestDataCreator(BASE_URL, AUTH) +DJANGO_TIMEFORMAT = "%Y-%m-%dT%H:%M:%S" + class SubtaskTemplateTestCase(unittest.TestCase): def test_subtask_template_list_apiformat(self): @@ -1326,6 +1330,208 @@ class DataproductArchiveInfoTestCase(unittest.TestCase): GET_and_assert_expected_response(self, dpai_test_data['dataproduct'], 200, {}) +class SubtaskQuery(unittest.TestCase): + """ + Test queries on the subtask REST api: + - query cluster only + - query start and stop time and cluster + - query start and stop time + - query start time and cluster + - query stop time and cluster + - query with incorrect input + """ + + def check_response_OK_and_result_count(self, response, expected_count): + """ + Check http response on status_code OK and the expected count number of the results list + """ + self.assertEqual(200, response.status_code) + json_response = response.json() + self.assertEqual(expected_count, json_response.get('count')) + + @staticmethod + def create_cluster_object(cluster_name): + cluster_data = Cluster_test_data(name=cluster_name) + return models.Cluster.objects.create(**cluster_data) + + @staticmethod + def create_multiple_subtask_object(total_number, cluster_name): + """ + Create multiple subtasks for a given number of days with start_time 2 hours from now and + stop_time 4 hours from now + """ + cluster_object = SubtaskQuery.create_cluster_object(cluster_name) + for day_idx in range(0, total_number): + start_time = datetime.now() + timedelta(hours=2, days=day_idx) + stop_time = datetime.now() + timedelta(hours=4, days=day_idx) + subtask_data = Subtask_test_data(start_time=start_time.strftime(DJANGO_TIMEFORMAT), + stop_time=stop_time.strftime(DJANGO_TIMEFORMAT), + cluster_object=cluster_object) + models.Subtask.objects.create(**subtask_data) + + subtasks_test_data_with_start_stop_time = {'clusterB': 50, 'clusterC': 30 } + + @classmethod + def setUpClass(cls) -> None: + """ + Setup once before test is running. Create multiple subtask objects + clusterA 1 subtasks with start and stop time now + clusterB 50 subtasks with start 2hr and stop time 4hr from now, recurring 'every day' + clusterC 30 subtasks with start 2hr and stop time 4hr from now, recurring 'every day' + """ + cluster_object = SubtaskQuery.create_cluster_object("clusterA") + subtask_data = Subtask_test_data(cluster_object=cluster_object) + models.Subtask.objects.create(**subtask_data) + for cluster_name, period_length_in_days in SubtaskQuery.subtasks_test_data_with_start_stop_time.items(): + SubtaskQuery.create_multiple_subtask_object(period_length_in_days, cluster_name) + + + def test_query_cluster_only(self): + """ + Check the query on cluster name. Check status code and response length + """ + logger.info("Check query on clusterA") + response = requests.get(BASE_URL + '/subtask/?cluster__name=clusterA', auth=AUTH) + self.check_response_OK_and_result_count(response, 1) + + for cluster_name, period_length_in_days in SubtaskQuery.subtasks_test_data_with_start_stop_time.items(): + logger.info("Check query on %s" % cluster_name) + response = requests.get(BASE_URL + '/subtask/?cluster__name=%s' % cluster_name, auth=AUTH) + self.check_response_OK_and_result_count(response, period_length_in_days) + + def test_query_start_and_stop_time_and_cluster(self): + """ + Check if I can query on the start and stop time and cluster name (B and C) over a period + Check status code and response length + """ + for cluster_name, period_length_in_days in SubtaskQuery.subtasks_test_data_with_start_stop_time.items(): + start_time = datetime.now() + stop_time = start_time + timedelta(days=period_length_in_days) + expected_count = period_length_in_days + logger.info("Check query in a period (%s until %s) for %s", + (start_time.strftime(DJANGO_TIMEFORMAT), stop_time.strftime(DJANGO_TIMEFORMAT), cluster_name)) + response = requests.get(BASE_URL + '/subtask/?start_time__gt=%s&stop_time__lt=%s&cluster__name=%s' % + (start_time, stop_time, cluster_name), auth=AUTH) + self.check_response_OK_and_result_count(response, expected_count) + + logger.info("Check number of subtasks every day for %s" % cluster_name) + for day_idx in range(0, period_length_in_days): + start_time = datetime.now() + timedelta(days=day_idx) + stop_time = start_time + timedelta(days=1) + response = requests.get(BASE_URL + '/subtask/?start_time__gt=%s&stop_time__lt=%s&cluster__name=%s' % + (start_time, stop_time, cluster_name), auth=AUTH) + self.check_response_OK_and_result_count(response, 1) + + logger.info("Check query in a period (%s until %s) for clusterNotExist" % + (start_time.strftime(DJANGO_TIMEFORMAT), stop_time.strftime(DJANGO_TIMEFORMAT))) + response = requests.get(BASE_URL + '/subtask/?start_time__gt=%s&stop_time__lt=%s&cluster__name=%s' % + (start_time, stop_time, "clusterNotExist"), auth=AUTH) + self.check_response_OK_and_result_count(response, 0) + + def test_query_start_and_stop_time(self): + """ + Check if I can query on the start and stop time over a period + Check status code and response length + """ + period_length_in_days = 50 # max(B+C) + expected_count = 80 # B+C + start_time = datetime.now() + stop_time = start_time + timedelta(days=period_length_in_days) + logger.info("Check query in a period (%s until %s)" % + (start_time.strftime(DJANGO_TIMEFORMAT), stop_time.strftime(DJANGO_TIMEFORMAT))) + response = requests.get(BASE_URL + '/subtask/?start_time__gt=%s&stop_time__lt=%s' % + (start_time, stop_time), auth=AUTH) + self.check_response_OK_and_result_count(response, expected_count) + + logger.info("Check number of subtasks every day") + for day_idx in range(0, period_length_in_days): + start_time = datetime.now() + timedelta(days=day_idx) + stop_time = start_time + timedelta(days=1) + response = requests.get(BASE_URL + '/subtask/?start_time__gt=%s&stop_time__lt=%s' % + (start_time, stop_time), auth=AUTH) + if day_idx >= 30: + expected_count = 1 # B + else: + expected_count = 2 # B+C + self.check_response_OK_and_result_count(response, expected_count) + + def test_query_start_and_cluster(self): + """ + Check if I can query on the start time and cluster name (B and C) over a period + Check status code and response length + """ + for cluster_name, period_length_in_days in SubtaskQuery.subtasks_test_data_with_start_stop_time.items(): + start_time = datetime.now() + expected_count = period_length_in_days + logger.info("Check query greater than start_time (%s) for %s " % + (start_time.strftime(DJANGO_TIMEFORMAT), cluster_name)) + response = requests.get(BASE_URL + '/subtask/?start_time__gt=%s&cluster__name=%s' % + (start_time, cluster_name), auth=AUTH) + self.check_response_OK_and_result_count(response, expected_count) + + logger.info("Check number of subtasks every day for %s" % cluster_name) + expected_count = period_length_in_days + for day_idx in range(0, period_length_in_days): + start_time = datetime.now() + timedelta(days=day_idx) + response = requests.get(BASE_URL + '/subtask/?start_time__gt=%s&cluster__name=%s' % + (start_time, cluster_name), auth=AUTH) + self.check_response_OK_and_result_count(response, expected_count) + expected_count -= 1 # every another day one less + + def test_query_stop_and_cluster(self): + """ + Check if I can query on the stop time and cluster name (B and C) over a period + Check status code and response length + """ + for cluster_name, period_length_in_days in SubtaskQuery.subtasks_test_data_with_start_stop_time.items(): + stop_time = datetime.now() + timedelta(days=period_length_in_days) + logger.info("Check query less than stop_time (%s) for %s " % + (stop_time.strftime(DJANGO_TIMEFORMAT), cluster_name)) + response = requests.get(BASE_URL + '/subtask/?stop_time__lt=%s&cluster__name=%s' % + (stop_time, cluster_name), auth=AUTH) + self.check_response_OK_and_result_count(response, period_length_in_days) + + logger.info("Check number of subtasks every day for %s" % cluster_name) + expected_count = 1 + for day_idx in range(0, period_length_in_days): + stop_time = datetime.now() + timedelta(days=day_idx+1) + response = requests.get(BASE_URL + '/subtask/?stop_time__lt=%s&cluster__name=%s' % + (stop_time, cluster_name), auth=AUTH) + self.check_response_OK_and_result_count(response, expected_count) + expected_count += 1 # every another day one more + + def test_query_wrong_input(self): + """ + Check the query when wrong input is given; + - query on a none existing cluster name + - query start time larger than stop time + - query start_time less than and stop_time greater than + - wrong query name + Note! when error in query name, REST will return ALL (in this case 82 objects) + """ + response = requests.get(BASE_URL + '/subtask/?cluster__name=clusterNotExist', auth=AUTH) + self.check_response_OK_and_result_count(response, 0) + + response = requests.get(BASE_URL + '/subtask/?cluster__error_in_query=clusterA', auth=AUTH) + self.check_response_OK_and_result_count(response, 82) + + period_length_in_days = 50 # max(B+C) + stop_time = datetime.now() + start_time = stop_time + timedelta(days=period_length_in_days) + logger.info("Check 'wrong' query in a period (%s until %s)" % + (start_time.strftime(DJANGO_TIMEFORMAT), stop_time.strftime(DJANGO_TIMEFORMAT))) + response = requests.get(BASE_URL + '/subtask/?start_time__gt=%s&stop_time__lt=%s' % + (start_time, stop_time), auth=AUTH) + self.check_response_OK_and_result_count(response, 0) + + start_time = datetime.now() + stop_time = start_time + timedelta(days=period_length_in_days) + logger.info("Check 'wrong' query in a period (%s until %s)" % + (start_time.strftime(DJANGO_TIMEFORMAT), stop_time.strftime(DJANGO_TIMEFORMAT))) + response = requests.get(BASE_URL + '/subtask/?start_time__lt=%s&stop_time__gt=%s' % + (start_time, stop_time), auth=AUTH) + self.check_response_OK_and_result_count(response, 0) + if __name__ == "__main__": unittest.main() diff --git a/SAS/TMSS/test/test_utils.py b/SAS/TMSS/test/test_utils.py index 5ded050c3fdd8527cf032678f08d0ef4b5a1c8c9..708720da3cd6d69c1c3f787d83f5e0cfcfc9f24c 100644 --- a/SAS/TMSS/test/test_utils.py +++ b/SAS/TMSS/test/test_utils.py @@ -289,7 +289,6 @@ def main_test_database(): print("Test-TMSS database up and running.") print("**********************************") print("DB Credentials ID: %s (for example to run tmms against this test db, call 'tmss -C %s')" % (db.dbcreds_id, db.dbcreds_id)) - print() print("Press Ctrl-C to exit (and remove the test database automatically)") waitForInterrupt() @@ -320,7 +319,10 @@ def main_test_environment(): print("DB Credentials ID: %s" % (instance.database.dbcreds_id, )) print("LDAP Credentials ID: %s" % (instance.django_server.ldap_dbcreds_id, )) print("Django URL: %s" % (instance.django_server.url)) - + print() + print("Please Copy-Paste the next environment variables when using for example Django manage.py command line:") + print("export TMSS_DBCREDENTIALS=%s" % instance.database.dbcreds_id) + print("export TMSS_LDAPCREDENTIALS=%s" % instance.django_server.ldap_dbcreds_id) print() print("Press Ctrl-C to exit (and remove the test database and django server automatically)") waitForInterrupt() diff --git a/SAS/TMSS/test/testdata/readme.txt b/SAS/TMSS/test/testdata/readme.txt index 04b3ab4948c81c4896050e36e3bc311112255d7d..e97209c0c283ad98f66316d53a3638b977e57ec0 100644 --- a/SAS/TMSS/test/testdata/readme.txt +++ b/SAS/TMSS/test/testdata/readme.txt @@ -1,6 +1,8 @@ -# Load testdata for subtask query -# Use manage.py with the next environment variables: -# export TMSS_DBCREDENTIALS=02949506-2c13-4028-a27a-c35cc69bc0ec -# export TMSS_LDAPCREDENTIALS=0a2d3d5c-f757-4476-9331-72093f754c -# Execute: +Provide model data with fixture files https://docs.djangoproject.com/en/2.2/howto/initial-data/ +Use the Django manage.py to loaddata from a json 'fixture' file +First set the next environment variables: +export TMSS_DBCREDENTIALS=<DB Credentials ID> +export TMSS_LDAPCREDENTIALS=<LDAP Credentials ID> +The environment variables are provided during startup of 'tmss_test_environment' +Finally Execute (in home of your project): /usr/bin/python3 build/gnucxx11_opt/lib/python3.6/site-packages/lofar/sas/tmss/manage.py loaddata ./SAS/TMSS/test/testdata/subtasks.json \ No newline at end of file diff --git a/SAS/TMSS/test/testdata/subtasks.json b/SAS/TMSS/test/testdata/subtasks.json index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..33b4b5fdd0065781e50b70b542a01b06f44d22cf 100644 --- a/SAS/TMSS/test/testdata/subtasks.json +++ b/SAS/TMSS/test/testdata/subtasks.json @@ -0,0 +1,89 @@ +[ + { + "model": "tmssapp.cluster", + "pk": 2, + "fields": { + "name": "bassieenadriaan", + "description": "the next cluster", + "location": "downstairs", + "tags": [ "loaded by fixture", "TMSS-154", "manual testing" ], + "created_at": "2020-02-24T13:19:57", + "updated_at": "2020-02-24T13:19:57" + } + }, + { + "model": "tmssapp.cluster", + "pk": 3, + "fields": { + "name": "peppieenkokkie", + "description": "the last cluster", + "location": "anywhere", + "tags": [ "loaded by fixture", "TMSS-154", "manual testing" ], + "created_at": "2020-02-24T13:19:57", + "updated_at": "2020-02-24T13:19:57" + } + }, + { + "model": "tmssapp.subtask", + "pk": 2, + "fields" : { + "start_time": "2020-01-02T00:00:00", + "stop_time": "2020-01-02T12:00:00", + "specifications_doc": 1, + "do_cancel": null, + "priority": 1, + "scheduler_input_doc": 1, + "state": "defined", + "task_blueprint": null, + "specifications_template": 1, + "schedule_method": "manual", + "cluster": 2, + "tags": [ "loaded by fixture", "TMSS-154", "manual testing" ], + "created_at": "2020-02-24T13:19:57", + "updated_at": "2020-02-24T13:19:57" + } + }, + { + "model": "tmssapp.subtask", + "pk": 3, + "fields" : { + "start_time": "2020-01-03T00:00:00", + "stop_time": "2020-01-03T12:00:00", + "specifications_doc": 1, + "do_cancel": null, + "priority": 1, + "scheduler_input_doc": 1, + "state": "defined", + "task_blueprint": null, + "specifications_template": 1, + "schedule_method": "manual", + "cluster": 3, + "tags": [ "loaded by fixture", "TMSS-154", "manual testing" ], + "created_at": "2020-02-24T13:19:57", + "updated_at": "2020-02-24T13:19:57" + } + }, + { + "model": "tmssapp.subtask", + "pk": 4, + "fields" : { + "start_time": "2020-01-04T00:00:00", + "stop_time": "2020-01-04T12:00:00", + "specifications_doc": 1, + "do_cancel": null, + "priority": 1, + "scheduler_input_doc": 1, + "state": "defined", + "task_blueprint": null, + "specifications_template": 1, + "schedule_method": "manual", + "cluster": 1, + "tags": [ "loaded by fixture", "TMSS-154", "manual testing" ], + "created_at": "2020-02-24T13:19:57", + "updated_at": "2020-02-24T13:19:57" + } + } + + + +] \ No newline at end of file diff --git a/SAS/TMSS/test/tmss_test_data_django_models.py b/SAS/TMSS/test/tmss_test_data_django_models.py index 5b415a9c864db9d270c2643ef111599bf2661532..f167a785b38e216a599e250b6019139485970414 100644 --- a/SAS/TMSS/test/tmss_test_data_django_models.py +++ b/SAS/TMSS/test/tmss_test_data_django_models.py @@ -210,25 +210,36 @@ def SubtaskInput_test_data(): "selection_template": models.SubtaskInputSelectionTemplate.objects.create(**SubtaskInputSelectionTemplate_test_data()), "tags":[]} -def Subtask_test_data(subtask_template: models.SubtaskTemplate=None, specifications_doc: str=None): +def Subtask_test_data(subtask_template: models.SubtaskTemplate=None, specifications_doc: str=None, + start_time=None, stop_time=None, cluster_object=None): + if subtask_template is None: subtask_template = models.SubtaskTemplate.objects.create(**SubtaskTemplate_test_data()) if specifications_doc is None: specifications_doc = "{}" # simplest json object - return { "start_time": datetime.utcnow().isoformat(), - "stop_time": datetime.utcnow().isoformat(), - "state": models.SubtaskState.objects.get(value='scheduling'), - "specifications_doc": specifications_doc, - "task_blueprint": models.TaskBlueprint.objects.create(**TaskBlueprint_test_data()), - "specifications_template": subtask_template, - "tags": ["TMSS", "TESTING"], - "do_cancel": datetime.utcnow().isoformat(), - "priority": 1, - "schedule_method": models.ScheduleMethod.objects.get(value='manual'), - "cluster": models.Cluster.objects.create(location="downstairs", tags=[]), - "scheduler_input_doc": "{}"} + if start_time is None: + start_time = datetime.utcnow().isoformat() + + if stop_time is None: + stop_time = datetime.utcnow().isoformat() + + if cluster_object is None: + cluster_object = models.Cluster.objects.create(name="dummy cluster", location="downstairs", tags=[]) + + return { "start_time": start_time, + "stop_time": stop_time, + "state": models.SubtaskState.objects.get(value='scheduling'), + "specifications_doc": specifications_doc, + "task_blueprint": models.TaskBlueprint.objects.create(**TaskBlueprint_test_data()), + "specifications_template": subtask_template, + "tags": ["TMSS", "TESTING"], + "do_cancel": datetime.utcnow().isoformat(), + "priority": 1, + "schedule_method": models.ScheduleMethod.objects.get(value='manual'), + "cluster": cluster_object, + "scheduler_input_doc": "{}"} def Dataproduct_test_data(): return {"filename": "my.file", @@ -274,8 +285,9 @@ def Filesystem_test_data(): "cluster": models.Cluster.objects.create(**Cluster_test_data()), "tags": ['tmss', 'testing']} -def Cluster_test_data(): - return {"location": "upstairs", +def Cluster_test_data(name="default cluster"): + return {"name": name, + "location": "upstairs", "tags": ['tmss', 'testing']} def DataproductArchiveInfo_test_data(): diff --git a/SAS/TriggerServices/django_rest/restinterface/triggerinterface/serializers.py b/SAS/TriggerServices/django_rest/restinterface/triggerinterface/serializers.py index 9b06c7cf9b29b9cb8a6fb3c520b207a85afdf843..cf3dbf5f030deff9158b5bbe71d2560067bd5e9b 100644 --- a/SAS/TriggerServices/django_rest/restinterface/triggerinterface/serializers.py +++ b/SAS/TriggerServices/django_rest/restinterface/triggerinterface/serializers.py @@ -1,7 +1,7 @@ """ This is a stub. Currently not used, but can be implemented to populate and validate the trigger data model. -Check views.py for data parsing and rendering on get/post. +Check filter.py for data parsing and rendering on get/post. """