diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 0000000000000000000000000000000000000000..e5b5dfa13a469f8f15cc845c139307952312f24e
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1 @@
+*_fixture.json filter=lfs diff=lfs merge=lfs -text
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 392bbabca29e5ddf0fd43cd09714beef9499312c..2bf042acdfc2c7b75a0e6e2b2f3624bd4c8a201e 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -9,10 +9,11 @@ workflow:
     - if: $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH
       variables:
         DOCKER_BUILD_IMAGE_TAG: ":stable"
+        DOCKER_IMAGE_TAG: ":latest"
     - if: $CI_COMMIT_REF_NAME != $CI_DEFAULT_BRANCH
       variables:
         DOCKER_BUILD_IMAGE_TAG: ":latest"
-
+        DOCKER_IMAGE_TAG: ":$CI_COMMIT_REF_NAME"
 
 test-code:
   image: python:3.10
@@ -34,11 +35,14 @@ docker-test-build:
     if: main
   # Official docker image.
   image: docker$DOCKER_BUILD_IMAGE_TAG
+  before_script:
+    - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
   stage: build
   services:
     - docker:dind
   script:
-    - docker build --pull -t "$CI_REGISTRY_IMAGE" ldvspec
+    - docker build --pull -t "$CI_REGISTRY_IMAGE$DOCKER_IMAGE_TAG" ldvspec
+    - docker push $CI_REGISTRY_IMAGE$DOCKER_IMAGE_TAG
 
 
 docker-build-master:
diff --git a/ldvspec/docker/docker-compose-production-cd.yml b/ldvspec/docker/docker-compose-production-cd.yml
index b03a4d5e8c82300328db46053203f80d738a85a8..3d737ceb63ec754646c3ceae1a4dfa185f10dc06 100644
--- a/ldvspec/docker/docker-compose-production-cd.yml
+++ b/ldvspec/docker/docker-compose-production-cd.yml
@@ -25,12 +25,33 @@ services:
       - ldv-spec-db:/var/lib/postgresql/data
     restart: always
 
+  rabbitmq:
+    image: rabbitmq:3-management
+    networks:
+      - ldv_network
+    container_name: ldv-spec-rabbit
+
+  ldv-specification-background:
+    container_name: ldv-specification-background
+    image: git.astron.nl:5000/astron-sdc/ldv-specification:${LDVSPEC_VERSION:-latest}
+    networks:
+      - ldv_network
+    depends_on:
+      - ldv-spec-db
+    environment:
+      CELERY_BROKER_URL: amqp://guest@rabbitmq:5672
+      DJANGO_SETTINGS_MODULE: 'ldvspec.settings.docker_sdc'
+    env_file:
+      - $HOME/shared/ldvspec.env
+    command: celery -A ldvspec worker -l INFO
+    restart: always
+
   ldv-specification:
     container_name: ldv-specification
-    image: git.astron.nl:5000/astron-sdc/ldv-specification:latest
+    image: git.astron.nl:5000/astron-sdc/ldv-specification:${LDVSPEC_VERSION:-latest}
 
-    expose:
-      - "8000"
+    ports:
+      - "8000:8000"
     networks:
       - traefik_proxy
       - ldv_network
@@ -43,11 +64,10 @@ services:
 
     depends_on:
       - ldv-spec-db
+    environment:
+      CELERY_BROKER_URL: amqp://guest@rabbitmq:5672
     env_file:
     - $HOME/shared/ldvspec.env
-#    command: >
-#      sh -c "python manage.py collectstatic --settings=ldvspec.settings.docker_sdc --noinput &&
-#             python manage.py migrate --settings=ldvspec.settings.docker_sdc"
     restart: always
 
 volumes:
diff --git a/ldvspec/fixtures/lofardata_fixture.json b/ldvspec/fixtures/lofardata_fixture.json
new file mode 100644
index 0000000000000000000000000000000000000000..bf606d2d037b71c80d6d68964caa3bdf9a81c974
--- /dev/null
+++ b/ldvspec/fixtures/lofardata_fixture.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0200b3e8ab5c950e1d6faa36aa125ffd6e8036a770bf2db9b106c4b5132f9611
+size 4496313
diff --git a/ldvspec/ldvspec/celery.py b/ldvspec/ldvspec/celery.py
new file mode 100644
index 0000000000000000000000000000000000000000..21c10649a3d6ecfe655c513835408406bd3df1c2
--- /dev/null
+++ b/ldvspec/ldvspec/celery.py
@@ -0,0 +1,12 @@
+import os
+
+from celery import Celery
+
+# Set the default Django settings module for the 'celery' program.
+os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ldvspec.settings.dev")
+
+app = Celery("lofardata")
+
+app.config_from_object("django.conf:settings", namespace="CELERY")
+
+app.autodiscover_tasks()
diff --git a/ldvspec/ldvspec/settings/base.py b/ldvspec/ldvspec/settings/base.py
index 24b8ce6551cf34df22f3ac1bed4f7c0485cbf939..8d54a412ed14e168cb05b599ea4f06ccdd330d7c 100644
--- a/ldvspec/ldvspec/settings/base.py
+++ b/ldvspec/ldvspec/settings/base.py
@@ -37,6 +37,7 @@ INSTALLED_APPS = [
     'corsheaders',
     'django_filters',
     'django_extensions',
+    'uws'
 ]
 
 MIDDLEWARE = [
@@ -131,4 +132,7 @@ REST_FRAMEWORK = {
     'PAGE_SIZE': 100
 }
 
-ATDB_HOST = os.environ.get('ATDB_HOST', 'http://localhost:8000/atdb/')
+# Recommended to use an environment variable to set the broker URL.
+CELERY_BROKER_URL = os.getenv("CELERY_BROKER_URL", "amqp://guest@localhost:5672")
+
+UWS_WORKERS = ['lofardata.workers.query.Echo']
\ No newline at end of file
diff --git a/ldvspec/ldvspec/settings/dev.py b/ldvspec/ldvspec/settings/dev.py
index 6abeb007aa13b0c07a87cc74e16db6af2fa4f1f3..49cd8b50b60f8e4c93a4a980d7eb5d68def29a1b 100644
--- a/ldvspec/ldvspec/settings/dev.py
+++ b/ldvspec/ldvspec/settings/dev.py
@@ -22,3 +22,4 @@ DATABASES = {
 # https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
 
 AUTH_PASSWORD_VALIDATORS = []
+
diff --git a/ldvspec/lofardata/admin.py b/ldvspec/lofardata/admin.py
index e42f80883d51fbabe15e412aaaa66445078aa39a..96c7adccc48beb8ea66b0160a75757b740106de9 100644
--- a/ldvspec/lofardata/admin.py
+++ b/ldvspec/lofardata/admin.py
@@ -1,7 +1,8 @@
 from django.contrib import admin
 
 # Register your models here.
-from .models import DataProduct, DataProductFilter
+from .models import DataProduct, DataProductFilter, ATDBProcessingSite
 
 admin.site.register(DataProduct)
 admin.site.register(DataProductFilter)
+admin.site.register(ATDBProcessingSite)
diff --git a/ldvspec/lofardata/migrations/0005_include_work_specification.py b/ldvspec/lofardata/migrations/0005_include_work_specification.py
new file mode 100644
index 0000000000000000000000000000000000000000..530c7c8ae7bef4fb31611f635f0eea6fa09392e5
--- /dev/null
+++ b/ldvspec/lofardata/migrations/0005_include_work_specification.py
@@ -0,0 +1,41 @@
+# Generated by Django 4.1 on 2022-08-10 13:23
+
+from django.conf import settings
+import django.contrib.postgres.fields
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        migrations.swappable_dependency(settings.AUTH_USER_MODEL),
+        ('lofardata', '0004_auto_20220728_1007'),
+    ]
+
+    operations = [
+        migrations.AlterField(
+            model_name='dataproduct',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='dataproductfilter',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.CreateModel(
+            name='WorkSpecification',
+            fields=[
+                ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+                ('created_on', models.DateTimeField(auto_now_add=True)),
+                ('filters', models.JSONField(null=True)),
+                ('inputs', models.JSONField(null=True)),
+                ('related_tasks', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), null=True, size=None)),
+                ('is_ready', models.BooleanField(default=False)),
+                ('is_defined', models.BooleanField(default=False)),
+                ('async_task_result', models.CharField(max_length=100, null=True)),
+                ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL)),
+            ],
+        ),
+    ]
diff --git a/ldvspec/lofardata/migrations/0006_atdbprocessingsite_workspecification_processing_site.py b/ldvspec/lofardata/migrations/0006_atdbprocessingsite_workspecification_processing_site.py
new file mode 100644
index 0000000000000000000000000000000000000000..09e505b9f4809c540161592e2ebe21b691acc084
--- /dev/null
+++ b/ldvspec/lofardata/migrations/0006_atdbprocessingsite_workspecification_processing_site.py
@@ -0,0 +1,26 @@
+# Generated by Django 4.1 on 2022-08-10 15:56
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('lofardata', '0005_include_work_specification'),
+    ]
+
+    operations = [
+        migrations.CreateModel(
+            name='ATDBProcessingSite',
+            fields=[
+                ('name', models.CharField(max_length=100, primary_key=True, serialize=False)),
+                ('url', models.URLField()),
+            ],
+        ),
+        migrations.AddField(
+            model_name='workspecification',
+            name='processing_site',
+            field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='lofardata.atdbprocessingsite'),
+        ),
+    ]
diff --git a/ldvspec/lofardata/migrations/0007_workspecification_selected_workflow.py b/ldvspec/lofardata/migrations/0007_workspecification_selected_workflow.py
new file mode 100644
index 0000000000000000000000000000000000000000..9d06758eac1d92be4ad0998756b0aeebe91a0be2
--- /dev/null
+++ b/ldvspec/lofardata/migrations/0007_workspecification_selected_workflow.py
@@ -0,0 +1,18 @@
+# Generated by Django 4.1 on 2022-08-11 09:24
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('lofardata', '0006_atdbprocessingsite_workspecification_processing_site'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='workspecification',
+            name='selected_workflow',
+            field=models.CharField(max_length=500, null=True),
+        ),
+    ]
diff --git a/ldvspec/lofardata/migrations/0008_atdbprocessingsite_access_token.py b/ldvspec/lofardata/migrations/0008_atdbprocessingsite_access_token.py
new file mode 100644
index 0000000000000000000000000000000000000000..4da68f96f8b6fdc4b7a1a7fc30c8c496a89b3b27
--- /dev/null
+++ b/ldvspec/lofardata/migrations/0008_atdbprocessingsite_access_token.py
@@ -0,0 +1,18 @@
+# Generated by Django 4.1 on 2022-08-11 11:56
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('lofardata', '0007_workspecification_selected_workflow'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='atdbprocessingsite',
+            name='access_token',
+            field=models.CharField(max_length=1000, null=True),
+        ),
+    ]
diff --git a/ldvspec/lofardata/models.py b/ldvspec/lofardata/models.py
index 5ee1fa210acf01a0d528c598e6b272345332e532..d49b10f697778bed8a6682f63642cb5c730bf25a 100644
--- a/ldvspec/lofardata/models.py
+++ b/ldvspec/lofardata/models.py
@@ -1,6 +1,8 @@
 from django.db import models
-from django_filters import rest_framework as filters
 from urllib.parse import urlsplit
+from django.contrib.auth.models import User
+from django.contrib.postgres.fields import ArrayField
+from celery.result import AsyncResult
 
 
 class DataLocation(models.Model):
@@ -74,3 +76,32 @@ class DataProductFilter(models.Model):
     field = models.CharField(max_length=100)
     name = models.CharField(max_length=20)
     lookup_type = models.CharField(max_length=100)
+
+
+class ATDBProcessingSite(models.Model):
+    name = models.CharField(primary_key=True, max_length=100)
+    url = models.URLField()
+    access_token = models.CharField(max_length=1000, null=True)
+
+
+class WorkSpecification(models.Model):
+    created_on = models.DateTimeField(auto_now_add=True)
+    created_by = models.ForeignKey(User, on_delete=models.DO_NOTHING, null=True)
+    filters = models.JSONField(null=True)
+    inputs = models.JSONField(null=True)
+    selected_workflow = models.CharField(max_length=500, null=True)
+    related_tasks = ArrayField(models.IntegerField(), null=True)
+    is_ready = models.BooleanField(default=False)
+    is_defined = models.BooleanField(default=False)
+    async_task_result = models.CharField(max_length=100, null=True)
+    processing_site = models.ForeignKey(ATDBProcessingSite, null=True, on_delete=models.DO_NOTHING)
+
+    def save(self, force_insert=False, force_update=False, using=None,
+             update_fields=None):
+        super(WorkSpecification, self).save(force_insert=force_insert, force_update=force_update, using=using,
+                                            update_fields=update_fields)
+        if self.async_task_result is None:
+            from lofardata.tasks import define_work_specification
+            res: AsyncResult = define_work_specification.delay(self.pk)
+            self.async_task_result = res.id
+            super(WorkSpecification, self).save(update_fields=['async_task_result'])
diff --git a/ldvspec/lofardata/serializers.py b/ldvspec/lofardata/serializers.py
index f685fb619d7b66f46ef00023be19a953accd413a..e04ceba8c8e40c338f76461fa6e9f7c0e80da0f0 100644
--- a/ldvspec/lofardata/serializers.py
+++ b/ldvspec/lofardata/serializers.py
@@ -1,7 +1,7 @@
 from abc import ABC
 
 from rest_framework import serializers
-from .models import DataProduct, DataLocation
+from .models import DataProduct, DataLocation, WorkSpecification
 
 
 class DataLocationSerializer(serializers.ModelSerializer):
@@ -23,3 +23,14 @@ class DataProductFlatSerializer(serializers.ModelSerializer):
 
     def create(self, validated_data):
         return DataProduct.insert_dataproduct(**validated_data)
+
+
+class WorkSpecificationSerializer(serializers.ModelSerializer):
+    class Meta:
+        model = WorkSpecification
+        fields = '__all__'
+
+    def create(self, validated_data):
+        validated_data['created_by'] = self.context['request'].user
+        instance = super().create(validated_data)
+        return instance
diff --git a/ldvspec/lofardata/tasks.py b/ldvspec/lofardata/tasks.py
new file mode 100644
index 0000000000000000000000000000000000000000..0aca20b69df114961403e2b11cad8d800ee3ddfd
--- /dev/null
+++ b/ldvspec/lofardata/tasks.py
@@ -0,0 +1,14 @@
+from ldvspec.celery import app
+from lofardata.models import WorkSpecification, DataProduct
+
+
+@app.task
+def define_work_specification(workspecification_id):
+    specification = WorkSpecification.objects.get(pk=workspecification_id)
+    filters = specification.filters
+
+    dataproducts = DataProduct.objects.filter(**filters)
+    inputs = {'surls': [dataproduct.surl for dataproduct in dataproducts]}
+    specification.inputs = inputs
+    specification.is_ready = True
+    specification.save()
diff --git a/ldvspec/lofardata/templates/lofardata/index.html b/ldvspec/lofardata/templates/lofardata/index.html
index 6b0cc1195dc4f4c1a86d794af709522ab056601f..98e5e9f5e6073ba4ab8145573428678760ecedd9 100644
--- a/ldvspec/lofardata/templates/lofardata/index.html
+++ b/ldvspec/lofardata/templates/lofardata/index.html
@@ -8,9 +8,18 @@
     <table class="table table-striped table-bordered table-sm">
 
         <tbody>
-            <tr><td>atdb_host</td><td>{{ atdb_host }}</td></tr>
+
+            <tr><td>atdb_host</td>
+                <td><ul>
+                {%  for atdb_host in atdb_hosts %}
+                    <li>{{ atdb_host.name }} ( <a href="{{ atdb_host.url }}">{{ atdb_host.url }}</a> )</li>
+                {% endfor %}
+                </ul>
+                </td>
+            </tr>
             <tr><td>api data</td><td><a href="{% url 'dataproduct' %}">{% url 'dataproduct' %}</a></td></tr>
             <tr><td>api data-location</td><td><a href="{% url 'datalocation' %}">{% url 'datalocation' %}</a></td></tr>
+            <tr><td>api work-specification</td><td><a href="{% url 'workspecification' %}">{% url 'workspecification' %}</a></td></tr>
 
             <tr><td>api-schema</td><td><a href="{% url 'openapi-schema' %}">{% url 'openapi-schema' %}</a></td></tr>
         </tbody>
diff --git a/ldvspec/lofardata/tests/test_workrequest.py b/ldvspec/lofardata/tests/test_workrequest.py
new file mode 100644
index 0000000000000000000000000000000000000000..736b91cff7b3fb6ac538ad2c392525dc0d983a2b
--- /dev/null
+++ b/ldvspec/lofardata/tests/test_workrequest.py
@@ -0,0 +1,65 @@
+import django.test as dtest
+import rest_framework.test as rtest
+import unittest.mock
+from django.contrib.auth.models import User
+import rest_framework.status as response_status
+from lofardata.models import WorkSpecification, DataProduct
+import lofardata.tasks as tasks
+
+
+class TestWorkSpecificationRequest(rtest.APITestCase):
+    def setUp(self):
+        self.user = User.objects.create_superuser('admin')
+        self.client.force_authenticate(self.user)
+
+    @unittest.mock.patch('lofardata.tasks.define_work_specification.delay')
+    def test_insert_work_request(self, mocked_task):
+        # Mocking business
+        mocked_task.return_value = unittest.mock.MagicMock()
+        mocked_task.return_value.id = 'asyncresultid'
+        # ----------------------------------
+        # Insert test data
+        DataProduct.insert_dataproduct(12345, 'lta',
+                                       'myniceinstrument', 'ms', 'blubby', 'pipline',
+                                       'srm://lofarlta/mynice_file.tar.gz', 123456, {'dysco_compression': False})
+
+        response = self.client.post('/ldvspec/api/v1/workspecification/',
+                                    data={'filters': {'obs_id': 12345}}, format='json')
+
+        self.assertEqual(201, response.status_code)
+        inserted_work_specification = response.json()
+        mocked_task.assert_called_with(inserted_work_specification['id'])
+        self.assertEqual('asyncresultid', inserted_work_specification['async_task_result'])
+        tasks.define_work_specification(inserted_work_specification['id'])
+        work_spec_object = WorkSpecification.objects.get(pk=inserted_work_specification['id'])
+        self.assertTrue(work_spec_object.is_ready)
+        self.assertEqual({'surls': ['srm://lofarlta/mynice_file.tar.gz']}, work_spec_object.inputs)
+
+    @unittest.mock.patch('lofardata.tasks.define_work_specification.delay')
+    def test_insert_work_request_nested_fields(self, mocked_task):
+        # Mocking business
+        mocked_task.return_value = unittest.mock.MagicMock()
+        mocked_task.return_value.id = 'asyncresultid'
+        # ----------------------------------
+        # Insert test data
+        DataProduct.insert_dataproduct(12345, 'lta',
+                                       'myniceinstrument', 'ms', 'blubby', 'pipline',
+                                       'srm://lofarlta/mynice_file.tar.gz', 123456, {'dysco_compression': False})
+        DataProduct.insert_dataproduct(12345, 'lta',
+                                       'myniceinstrument', 'ms', 'blubby', 'pipline',
+                                       'srm://lofarlta/mynice_file_with_rhythm.tar.gz', 123456,
+                                       {'dysco_compression': True})
+
+        response = self.client.post('/ldvspec/api/v1/workspecification/',
+                                    data={'filters': {'obs_id': 12345, 'additional_meta__dysco_compression': True}},
+                                    format='json')
+
+        self.assertEqual(201, response.status_code)
+        inserted_work_specification = response.json()
+        mocked_task.assert_called_with(inserted_work_specification['id'])
+        self.assertEqual('asyncresultid', inserted_work_specification['async_task_result'])
+        tasks.define_work_specification(inserted_work_specification['id'])
+        work_spec_object = WorkSpecification.objects.get(pk=inserted_work_specification['id'])
+        self.assertTrue(work_spec_object.is_ready)
+        self.assertEqual({'surls': ['srm://lofarlta/mynice_file_with_rhythm.tar.gz']}, work_spec_object.inputs)
+
diff --git a/ldvspec/lofardata/urls.py b/ldvspec/lofardata/urls.py
index 8095bd006459c5c072a298297afdd5723e33eaf4..e322ebe7a642f10b5c2e7ba11fa5f2e8be5f03e0 100644
--- a/ldvspec/lofardata/urls.py
+++ b/ldvspec/lofardata/urls.py
@@ -17,7 +17,10 @@ urlpatterns = [
     path('api/v1/insert_dataproduct/', views.InsertMultiDataproductView.as_view(), name='dataproduct-insert'),
     path('api/v1/data-location/', views.DataLocationView.as_view(), name='datalocation'),
     path('api/v1/data/<int:pk>/', views.DataProductDetailsView.as_view(), name='dataproduct-detail-view-api'),
+    path('api/v1/workspecification/', views.InsertWorkSpecification.as_view(),
+         name='workspecification'),
 
+    path('api/v1/uws/', include('uws.urls')),
     path('api/v1/openapi/', get_schema_view(
         title="LDV Specification",
         description="API description",
diff --git a/ldvspec/lofardata/views.py b/ldvspec/lofardata/views.py
index f45b6db780f2b06f9baa5e5806fc5c2338e2ed41..e3939b73fee4c795c888dd37d229f3533f9f6792 100644
--- a/ldvspec/lofardata/views.py
+++ b/ldvspec/lofardata/views.py
@@ -1,17 +1,15 @@
-import logging
-
 from django.shortcuts import render
 from django.conf import settings
+from django.contrib.auth.models import User
 
-from rest_framework import generics, pagination
-from rest_framework.views import APIView
-
-from rest_framework import status
-from rest_framework.response import Response
+from rest_framework import generics
 from django_filters import rest_framework as filters
+from rest_framework.schemas.openapi import AutoSchema
 
-from .models import DataProduct, DataProductFilter, DataLocation
-from .serializers import DataProductSerializer, DataProductFlatSerializer, DataLocationSerializer
+from .models import DataProduct, DataProductFilter, DataLocation, WorkSpecification, ATDBProcessingSite
+from .serializers import DataProductSerializer, \
+    DataProductFlatSerializer, DataLocationSerializer, \
+    WorkSpecificationSerializer
 
 
 class DynamicFilterSet(filters.FilterSet):
@@ -44,8 +42,8 @@ class DataProductFilterSet(DynamicFilterSet):
 # ---------- GUI Views -----------
 
 def index(request):
-    atdb_host = settings.ATDB_HOST
-    return render(request, "lofardata/index.html", {'atdb_host': atdb_host})
+    atdb_hosts = ATDBProcessingSite.objects.values('name', 'url')
+    return render(request, "lofardata/index.html", {'atdb_hosts': atdb_hosts})
 
 
 # ---------- REST API views ----------
@@ -72,15 +70,33 @@ class DataLocationView(generics.ListCreateAPIView):
     queryset = DataLocation.objects.all().order_by('name')
 
 
+class InsertWorkSpecificationSchema(AutoSchema):
+    def get_operation_id_base(self, path, method, action):
+        return 'createDataProductMulti'
+
+
 class InsertMultiDataproductView(generics.CreateAPIView):
     """
     Add single DataProduct
     """
     queryset = DataProduct.objects.all()
     serializer_class = DataProductFlatSerializer
+    schema = InsertWorkSpecificationSchema()
 
     def get_serializer(self, *args, **kwargs):
         """ if an array is passed, set serializer to many """
         if isinstance(kwargs.get('data', {}), list):
             kwargs['many'] = True
         return super().get_serializer(*args, **kwargs)
+
+
+class InsertWorkSpecification(generics.CreateAPIView, generics.ListCreateAPIView):
+    queryset = WorkSpecification.objects.all()
+    serializer_class = WorkSpecificationSerializer
+
+    def get_queryset(self):
+        current_user: User = self.request.user
+        if not current_user.is_staff or not current_user.is_superuser:
+            return self.queryset.filter(created_by=current_user.id)
+        else:
+            return self.queryset
diff --git a/ldvspec/lofardata/workers/query.py b/ldvspec/lofardata/workers/query.py
new file mode 100644
index 0000000000000000000000000000000000000000..923934916c5dd5de596428eac5562dd3fa9d7659
--- /dev/null
+++ b/ldvspec/lofardata/workers/query.py
@@ -0,0 +1,31 @@
+from uws.classes import UWSJob
+from uws.client import Client
+from uws.workers import Worker
+from lofardata.models import DataProduct, WorkSpecification
+
+
+class Echo(Worker):
+    """A worker echoing all parameters"""
+
+    def __init__(self):
+        self._type = "echo"
+
+    def run(self, job: UWSJob, job_token: str, client: Client) -> None:
+        data = [{"key": p.key, "value": p.value} for p in job.parameters]
+        client.add_results(job.jobId, data, job_token)
+
+
+class PrepareWorkSpecification(Worker):
+
+    def __init__(self):
+        self._type = "query_dataproducts"
+
+    def run(self, job: UWSJob, job_token: str, client: Client) -> None:
+        pass
+        # parameters = {p.key: p.value for p in job.parameters}
+        # specification_id = parameters.pop('specification_id')
+        work_specification = WorkSpecification.objects.get(pk=specification_id)
+
+        # dataproducts = DataProduct.objects.filter(**filters)
+        # for data in data
+        # client.add_results(job.jobId, [], job_token)
diff --git a/ldvspec/requirements/base.txt b/ldvspec/requirements/base.txt
index b0d8418bd30fe83d55dec70a510771eeef51bf61..3276dfaea43d7450f8fe414dbe87b5561e9b8754 100644
--- a/ldvspec/requirements/base.txt
+++ b/ldvspec/requirements/base.txt
@@ -1,4 +1,4 @@
-Django==3.1.4
+Django==3.2
 djangorestframework==3.12.2
 django-filter==2.3.0
 psycopg2-binary==2.9.3
@@ -11,4 +11,5 @@ six==1.15.0
 fontawesome-free==5.15.2
 pyyaml==6.0
 uritemplate==4.1.1
-sshtunnel==0.4.0
\ No newline at end of file
+sshtunnel==0.4.0
+django-uws==0.2.dev355575