Skip to content
Snippets Groups Projects
Commit 713df37a authored by Nico Vermaas's avatar Nico Vermaas
Browse files

cleaning up model and serializers

adding CI/CD pipeline
parent 740b24c1
No related branches found
No related tags found
No related merge requests found
Pipeline #6983 passed with warnings
stages:
- build
- deploy
docker-build-master:
# Official docker image.
image: docker:latest
stage: build
services:
- docker:dind
before_script:
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
script:
- docker build --pull -t "$CI_REGISTRY_IMAGE" atdb
- docker push "$CI_REGISTRY_IMAGE"
only:
- master
docker-build-branch:
# Official docker image.
image: docker:latest
stage: build
services:
- docker:dind
before_script:
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
script:
- docker build --pull -t "$CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG" atdb
- docker push "$CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG"
except:
- master
# this deploys the 'esap-gateway-query' branch (a dev/test branch) as 'vermaas'
# by running /docker_compose/esap-api-gateway/docker-compose-query-cd.yml
docker-deploy-master:
image: docker:latest
stage: deploy
before_script:
- 'command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client -y )'
- eval $(ssh-agent -s)
- echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add -
- mkdir -p ~/.ssh
- chmod 700 ~/.ssh
- ssh-keyscan sdc.astron.nl >> ~/.ssh/known_hosts
- chmod 644 ~/.ssh/known_hosts
script:
- ssh -o StrictHostKeyChecking=no vermaas@sdc.astron.nl "docker pull "$CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG""
- ssh -o StrictHostKeyChecking=no vermaas@sdc.astron.nl "docker-compose -p atdb -f /docker_compose/atdb-ldv/docker-compose-cd.yml up -d"
when: manual
only:
- master
\ No newline at end of file
......@@ -43,6 +43,7 @@ Deployment Diagram:
> export DOCKER_COMPOSE_DIR=$DOCKER_BUILD_DIR/docker
> cd $DOCKER_BUILD_DIR
> git pull
> docker build -t atdb-ldv:latest .
> cd $DOCKER_COMPOSE_DIR
> docker-compose -p atdb up -d
......
version: '3.4'
networks:
atdb_network:
traefik_proxy:
external:
name: traefik_proxy
default:
driver: bridge
services:
atdb-ldv-db:
container_name: atdb-ldv-postgres
image: atdb-ldv-postgres:latest
expose:
- 5432
networks:
- traefik_proxy
- atdb_network
volumes:
- $HOME/shared:/shared
restart: always
atdb-backend:
container_name: atdb-ldv
image: git.astron.nl:5000/astron-sdc/atdb-ldv:latest
expose:
- "8000"
networks:
- traefik_proxy
- atdb_network
labels:
- "traefik.enable=true"
- "traefik.http.routers.atdb-backend.entryPoints=atdb-ldv"
- "traefik.http.routers.atdb-backend.service=atdb-backend"
- "traefik.http.routers.atdb-backend.rule=Host(`sdc.astron.nl`) && PathPrefix(`/atdb`)"
- "traefik.http.services.atdb-backend.loadbalancer.server.port=8000"
depends_on:
- atdb-ldv-db
restart: always
FROM nginx
RUN rm /etc/nginx/conf.d/default.conf
COPY atdb.conf /etc/nginx/conf.d/
# build the image like this:
# docker build -t atdb_nginx .
# run the container from here, like this:
# docker run -d --name atdb_nginx -p 81:8011 --mount type=bind,source="$(pwd)",target=/etc/nginx/conf.d/ --restart always atdb_nginx
upstream web {
ip_hash;
#server web:8010;
server 10.87.1.7:8010;
}
server {
server_name localhost;
listen 8011;
location / {
proxy_pass http://web/;
}
location /static/ {
alias /static/;
}
# redirect server error pages to the static page /50x.html
#
error_page 500 502 503 504 /50x.html;
location = /50x.html {
root /usr/share/nginx/html;
}
}
\ No newline at end of file
upstream web {
ip_hash;
#server web:8010;
server 192.168.22.30:8010;
}
server {
server_name localhost;
listen 8011;
location / {
proxy_pass http://web/;
}
location /static/ {
alias /static/;
}
# redirect server error pages to the static page /50x.html
#
error_page 500 502 503 504 /50x.html;
location = /50x.html {
root /usr/share/nginx/html;
}
}
\ No newline at end of file
from django.db import models
from django.urls import reverse
from django.utils.timezone import datetime
# from .services import algorithms
from django.db.models import Sum
from .services.common import timeit
# constants
datetime_format_string = '%Y-%m-%dT%H:%M:%SZ'
......@@ -46,10 +44,6 @@ class TaskObject(models.Model):
my_status = models.CharField(db_index=True, max_length=50,default="defined")
node = models.CharField(max_length=10, null=True)
locality_policy = models.CharField(max_length=100, default="cold_tape")
# default 30 days (in minutes)
max_lifetime_on_disk = models.IntegerField('max_lifetime_on_disk', default=86400)
def __str__(self):
return str(self.id)
......@@ -83,52 +77,21 @@ class Observation(TaskObject):
# can be used to distinguish types of observations, like for ARTS.
process_type = models.CharField(max_length=50, default="observation")
observing_mode = models.CharField(max_length=50, default="imaging")
science_mode = models.CharField(max_length=50, default="", null = True)
# json object containing unmodelled parameters that are used by the 'executor' service
# to create the parset based on a template and these parameters
field_name = models.CharField(max_length=50, null=True)
field_ra = models.FloatField('field_ra', null = True)
field_ha = models.FloatField('field_ha', null=True)
field_dec = models.FloatField('field_dec', null = True)
field_beam = models.IntegerField('field_beam', default=0)
integration_factor = models.IntegerField('integration_factor', null = True)
central_frequency = models.FloatField('central_frequency', null = True)
control_parameters = models.CharField(max_length=255, default="unknown", null=True)
telescopes = models.CharField(max_length=100, default="all", null=True)
skip_auto_ingest = models.BooleanField(default=False)
parset_location = models.CharField(max_length=255,
default="/opt/apertif/share/parsets/parset_start_observation_atdb.template",
null=True)
delay_center_offset = models.CharField(max_length=50, null=True)
# ARTS SC1
par_file_name = models.CharField(max_length=255, default="source.par", null=True)
number_of_bins = models.IntegerField(null=True)
start_band = models.IntegerField(null=True)
end_band = models.IntegerField(null=True)
# ARTS SC4
beams = models.CharField(max_length=255, default="0..39")
quality = models.CharField(max_length=30, default="unknown")
science_observation = models.BooleanField(default=False)
filler = models.BooleanField(default=False)
ingest_progress = models.CharField(max_length=40, default="", null=True)
# several reporting queries use timestamps from the status history
# it is expensive to look for, so this is some redundancy for performance
timestamp_starting = models.DateTimeField('timestamp_starting', null=True)
timestamp_running = models.DateTimeField('timestamp_running', null=True)
timestamp_completing = models.DateTimeField('timestamp_completing', null=True)
timestamp_aborted = models.DateTimeField('timestamp_aborted', null=True)
timestamp_ingesting = models.DateTimeField('timestamp_ingesting', null=True)
timestamp_archived = models.DateTimeField('timestamp_archived', null=True)
timestamp_ingest_error = models.DateTimeField('timestamp_ingest_error', null=True)
# this translates a view-name (from urls.py) back to a url, to avoid hardcoded url's in the html templates
# bad : <td><a href="/atdb/observations/{{ observation.id }}/" target="_blank">{{ observation.taskID }} </a> </td>
# good: <td><a href="{{ observation.get_absolute_url }}" target="_blank">{{ observation.taskID }} </a> </td>
......@@ -173,157 +136,3 @@ class DataProduct(TaskObject):
def __str__(self):
return self.filename
# --- Models for atdb_reporting ---------------------------------------
def get_timestamp_status(self, taskID, status):
"""
get the timestamp of a status for an observation with this taskID
:param taskID:
:param status:
:return:
"""
# for backward compatibility... since 26 july 2019 these timestamp fields are explicitly defined in the model.
# but old obervatons do not have them filled in, and there they have to be read from the (slow) status history.
# first try the new (fast) field.
timestamp = None
if status == 'starting':
timestamp = self.timestamp_starting
elif status == 'running':
timestamp = self.timestamp_running
elif status == 'completing':
timestamp = self.timestamp_completing
elif status == 'ingesting':
timestamp = self.timestamp_ingesting
elif status == 'archived':
timestamp = self.timestamp_archived
elif status == 'aborted':
timestamp = self.timestamp_aborted
elif status == 'ingest error':
timestamp = self.timestamp_ingest_error
# then try the old (slow) status history mechanism
if timestamp == None:
queryset = Status.objects.filter(taskObject__taskID=taskID).filter(
taskObject__task_type='observation').filter(name__icontains=status).order_by('-timestamp')
if len(queryset) > 0:
observation = queryset[0]
timestamp = observation.timestamp
return timestamp
# only retrieve a limited number of fields (for better performance)
class TimesManager(models.Manager):
def get_queryset(self):
return super(TimesManager, self).get_queryset().only('taskID','observing_mode','endtime','starttime')
class Times(Observation):
"""
# this is a proxy model of Observation for reporting.
# What is this? : https://www.benlopatin.com/using-django-proxy-models/
"""
objects = TimesManager()
@property
def duration(self):
try:
duration = (self.endtime - self.starttime).seconds
except:
# to prevent crash for invalid observations that do not have a starttime
duration = None
return duration
@property
def total_size(self):
# sum the sizes of all dataproducts with this taskID. In Mb
size = get_sum_from_dataproduct_field(self.taskID, 'size') / 1e6
return size
@property
def write_speed(self):
speed = None
if (self.total_size!=None) and (self.duration!=None):
speed = self.total_size / self.duration
return speed
@property
def timestamp_ingesting_derived(self):
timestamp = get_timestamp_status(self, self.taskID, 'ingesting')
return timestamp
@property
def timestamp_ingest_error_derived(self):
timestamp = get_timestamp_status(self, self.taskID, 'ingest error')
return timestamp
@property
def timestamp_archived_derived(self):
timestamp = get_timestamp_status(self, self.taskID, 'archived')
return timestamp
@property
def ingest_duration(self):
duration = None
# calculate the number of seconds between the to timestamps
if (self.timestamp_ingesting_derived!=None) and (self.timestamp_archived_derived!=None):
duration = (self.timestamp_archived_derived - self.timestamp_ingesting_derived).total_seconds()
# in case of an ingest error there is also a duration (albeit small)
elif (self.timestamp_ingesting_derived!=None) and (self.timestamp_ingest_error_derived!=None):
duration = (self.timestamp_ingest_error_derived - self.timestamp_ingesting_derived).total_seconds()
return duration
@property
def ingest_speed(self):
speed = None
if (self.total_size!=None) and (self.ingest_duration!=None):
speed = self.total_size / self.ingest_duration
return speed
class Meta:
proxy = True
# only retrieve a limited number of fields (for better performance)
class TimeUsedManager(models.Manager):
def get_queryset(self):
return super(TimeUsedManager, self).get_queryset().only('taskID','observing_mode','endtime','starttime')
class TimeUsed(Observation):
"""
# this is a proxy model of Observation for reporting.
# What is this? : https://www.benlopatin.com/using-django-proxy-models/
"""
objects = TimeUsedManager()
@property
def duration(self):
try:
duration = (self.endtime - self.starttime).seconds
except:
# to prevent crash for invalid observations that do not have a starttime
duration = None
return duration
@property
def timestamp_running_derived(self):
timestamp = get_timestamp_status(self, self.taskID, 'running')
return timestamp
@property
def timestamp_aborted_derived(self):
timestamp = get_timestamp_status(self, self.taskID, 'aborted')
return timestamp
@property
def timestamp_archived_derived(self):
timestamp = get_timestamp_status(self, self.taskID, 'archived')
return timestamp
class Meta:
proxy = True
\ No newline at end of file
from rest_framework import serializers
from .models import DataProduct, Observation, Status, TaskObject, Times
from .models import DataProduct, Observation, Status, TaskObject
import logging
logger = logging.getLogger(__name__)
......@@ -31,7 +31,7 @@ class DataProductSerializer(serializers.ModelSerializer):
fields = ('id','task_type','name','filename','description',
'taskID','creationTime','size','quality',
'my_status','new_status','status_history','parent',
'data_location','irods_collection','node')
'data_location','node')
class ObservationSerializer(serializers.ModelSerializer):
......@@ -51,22 +51,11 @@ class ObservationSerializer(serializers.ModelSerializer):
class Meta:
model = Observation
fields = ('id','task_type', 'name', 'process_type','taskID','beamPattern',
'field_name','field_ra','field_ha','field_dec','field_beam','integration_factor','central_frequency',
'field_name','field_ra','field_dec',
'creationTime','starttime','endtime', 'duration', 'size',
'my_status','new_status','status_history',
'generated_dataproducts','telescopes',
'data_location', 'irods_collection','node','control_parameters',
'skip_auto_ingest','observing_mode','science_mode','parset_location',
'par_file_name','number_of_bins','start_band','end_band','beams', 'delay_center_offset',
'locality_policy','max_lifetime_on_disk','quality','science_observation','filler','ingest_progress',
'timestamp_starting','timestamp_running','timestamp_completing',
'timestamp_ingesting','timestamp_archived','timestamp_aborted','timestamp_ingest_error')
'data_location', 'node','control_parameters',
'skip_auto_ingest','observing_mode','beams',
'quality','ingest_progress')
class TimesSerializer(serializers.ModelSerializer):
#readonly = True
class Meta:
model = Times
fields = ('taskID','observing_mode','starttime','endtime',
'duration','total_size','write_speed',
'timestamp_ingesting_derived','timestamp_ingest_error_derived','ingest_duration','ingest_speed')
#read_only_fields = fields
This diff is collapsed.
......@@ -46,20 +46,7 @@ urlpatterns = [
views.PostDataproductsView.as_view(),
name='post-dataproducts-view'),
# --- reports ---
# get observing times and ingest times
path('times', views.GetTimesView.as_view(), name='get-times'),
path('times-drf', views.GetTimesViewDRF.as_view(), name='get-times-drf'),
path('speeds', views.ReportSpeedsView.as_view(), name='report-speeds'),
# ex: /atdb/time-used?from=2019-06-01T00:00:00Z&to=2019-06-08T00:00:00Z
path('time-used', views.ReportTimeUsedView.as_view(), name='time-used'),
# --- controller resources ---
# ex: /atdb/mark-period-as?from=2019-06-01T00:00:00Z&to=2019-06-08T00:00:00Z&type=science,system,filler
path('mark-period-as', views.MarkPeriodAsView.as_view(), name='mark-period'),
path('observations/<int:pk>/setstatus/<new_status>/<page>',
views.ObservationSetStatus,
name='observation-setstatus-view'),
......
......@@ -13,9 +13,9 @@ from django.template import loader
from django.shortcuts import render, redirect
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from .models import DataProduct, Observation, Status, Times, TimeUsed
from .models import DataProduct, Observation, Status
from django.db.models import Q
from .serializers import DataProductSerializer, ObservationSerializer, StatusSerializer, TimesSerializer
from .serializers import DataProductSerializer, ObservationSerializer, StatusSerializer
from .forms import FilterForm
from .services import algorithms
......@@ -35,9 +35,7 @@ class ObservationFilter(filters.FilterSet):
'process_type': ['exact', 'in', 'icontains'], #/atdb/observations?&process_type=observation
'observing_mode': ['exact', 'in', 'icontains', 'startswith'], # /atdb/observations/?observing_mode__icontains=arts
'field_ra': ['gt', 'lt', 'gte', 'lte', 'contains', 'exact'],
'field_ha': ['gt', 'lt', 'gte', 'lte', 'contains', 'exact'],
'field_dec': ['gt', 'lt', 'gte', 'lte', 'contains', 'exact'],
'science_mode': ['exact', 'in', 'icontains'], # /atdb/observations?&science_mode=TAB
'name': ['exact', 'icontains'],
'my_status': ['exact', 'icontains', 'in', 'startswith'], #/atdb/observations?&my_status__in=archived,removing
'taskID': ['gt', 'lt', 'gte', 'lte','exact', 'icontains', 'startswith','in'],
......@@ -45,23 +43,10 @@ class ObservationFilter(filters.FilterSet):
'starttime' : ['gt', 'lt', 'gte', 'lte', 'contains', 'exact'],
'endtime': ['gt', 'lt', 'gte', 'lte', 'contains', 'exact'],
'data_location': ['exact', 'icontains'],
'irods_collection': ['exact', 'icontains'],
'node': ['exact', 'in'],
'skip_auto_ingest': ['exact'],
'beams': ['exact', 'icontains'],
'delay_center_offset': ['exact', 'icontains'],
'quality': ['exact', 'icontains'],
'science_observation': ['exact'],
'filler': ['exact'],
'locality_policy': ['icontains', 'exact'],
'max_lifetime_on_disk': ['gt', 'lt', 'gte', 'lte', 'contains', 'exact'],
'timestamp_starting' : ['gt', 'lt', 'gte', 'lte', 'contains', 'exact'],
'timestamp_running': ['gt', 'lt', 'gte', 'lte', 'contains', 'exact'],
'timestamp_completing': ['gt', 'lt', 'gte', 'lte', 'contains', 'exact'],
'timestamp_aborted': ['gt', 'lt', 'gte', 'lte', 'contains', 'exact'],
'timestamp_ingesting': ['gt', 'lt', 'gte', 'lte', 'contains', 'exact'],
'timestamp_archived': ['gt', 'lt', 'gte', 'lte', 'contains', 'exact'],
'timestamp_ingest_error': ['gt', 'lt', 'gte', 'lte', 'contains', 'exact'],
}
# example: /atdb/dataproducts?status__in=created,archived
......@@ -81,7 +66,6 @@ class DataProductFilter(filters.FilterSet):
'parent__taskID': ['exact', 'in', 'icontains'],
'my_status': ['exact', 'icontains', 'in'],
'data_location': ['exact', 'icontains'],
'irods_collection': ['exact', 'icontains'],
'node': ['exact', 'in'],
}
......@@ -188,7 +172,6 @@ def get_searched_observations(search):
observations = Observation.objects.filter(
Q(taskID__contains=search) |
Q(observing_mode__icontains=search) |
Q(science_mode__icontains=search) |
Q(my_status__icontains=search) |
Q(field_name__icontains=search)).order_by('-creationTime')
return observations
......@@ -490,249 +473,3 @@ class PostDataproductsView(generics.CreateAPIView):
'taskID': taskID,
})
# --- views for atdb_reporting -------------------------
class TimesFilter(filters.FilterSet):
# http://localhost:8000/atdb/times?taskID=181120001&observing_mode__contains=imaging
# http://localhost:8000/atdb/times?taskID__contains=1811&observing_mode__contains=imaging
# A direct filter on a @property field is not possible, this simulates that behaviour
#taskID = filters.Filter(field_name="observation__taskID",lookup_expr='exact')
class Meta:
model = Times
# https://django-filter.readthedocs.io/en/master/ref/filters.html?highlight=exclude
fields = {
'starttime': ['gt', 'lt', 'gte', 'lte', 'contains', 'exact'],
'endtime': ['gt', 'lt', 'gte', 'lte', 'contains', 'exact'],
#'duration': ['gt', 'lt', 'gte', 'lte', 'exact'],
'taskID': ['exact', 'in', 'range', 'gt', 'lt', 'gte', 'lte','contains'],
'observing_mode': ['exact', 'in','contains'],
#'total_size': ['gt', 'lt', 'gte', 'lte', 'exact'],
}
class TimesPagination(pagination.PageNumberPagination):
page_size = 1000
class GetTimesView(generics.ListAPIView):
"""
View to show some observing and ingesting times and speeds.
Writing my own serializer to check if it is faster than DRF (no difference noted).
"""
queryset = Times.objects.order_by('-taskID')
@timeit
def list(self, request, *args, **kwargs):
"""
Overriding the list method so that a custom created json response can be returned.
This is faster than what the DRF Serializer can do, and better customizable
:return: Returns a custom response with a json structure
"""
# execute the 'TimesFilter' on the original queryset.
my_filter = TimesFilter(request.GET, queryset=self.get_queryset())
# qs.values() would be faster than using a serializer,
# but it only works for model fields, not for properies and aggregations
# values = my_filter.qs.values('taskID','ingest_speed')
my_times = []
for rec in my_filter.qs:
#logger.info(str(my_time))
my_time = {}
my_time['taskID'] = rec.taskID
my_time['observing_mode'] = rec.observing_mode
my_time['starttime'] = rec.starttime
my_time['endtime'] = rec.endtime
my_time['duration'] = rec.duration
my_time['total_size'] = rec.total_size
my_time['write_speed'] = rec.write_speed
my_time['timestamp_ingesting'] = rec.timestamp_ingesting_derived
my_time['timestamp_archived'] = rec.timestamp_archived_derived
my_time['timestamp_ingest_error'] = rec.timestamp_ingest_error_derived
my_time['ingest_duration'] = rec.ingest_duration
my_time['ingest_speed'] = rec.ingest_speed
my_times.append(my_time)
return Response({
'count': len(my_times),
'results': my_times,
# 'values' : values
})
class GetTimesViewDRF(generics.ListAPIView):
"""
View to show some observing and ingesting times and speeds
Using DRF serializer
"""
queryset = Times.objects.order_by('-taskID')
serializer_class = TimesSerializer
pagination_class = TimesPagination
# using the Django Filter Backend - https://django-filter.readthedocs.io/en/latest/index.html
filter_backends = (filters.DjangoFilterBackend,)
filter_class = TimesFilter
class ReportSpeedsView(generics.ListAPIView):
"""
View to show some observing and ingesting times and speeds.
Similar to GetTimesView, but with a different serialisation
(Times is faster than Speeds)
"""
queryset = Times.objects.order_by('-taskID')
@timeit
def list(self, request, *args, **kwargs):
"""
Overriding the list method so that a custom created json response can be returned.
This is faster than what the DRF Serializer can do, and better customizable
:return: Returns a custom response with a json structure
"""
# execute the 'TimesFilter' on the original queryset.
my_filter = TimesFilter(request.GET, queryset=self.get_queryset())
# qs.values() would be faster than using a serializer,
# but it only works for model fields, not for properies and aggregations
# values = my_filter.qs.values('taskID','ingest_speed')
datapoints = []
for rec in my_filter.qs:
try:
if rec.write_speed > 0:
datapoint = {}
datapoint['taskid'] = rec.taskID
datapoint['timestamp'] = rec.starttime
datapoint['type'] = 'observing'
datapoint['duration'] = rec.duration
datapoint['timestamp_end'] = rec.starttime + datetime.timedelta(seconds=rec.duration)
datapoint['speed_bps'] = rec.write_speed * 8 / 1000
datapoints.append(datapoint)
if rec.ingest_speed is not None:
datapoint = {}
datapoint['taskid'] = rec.taskID
#nofrag, frag = rec.timestamp_ingesting.split('.')
#timestamp = datetime.datetime.strptime(nofrag, '%Y-%m-%dT%H:%M:%S')
datapoint['timestamp'] = rec.timestamp_ingesting_derived
datapoint['type'] = 'ingesting'
datapoint['duration'] = rec.ingest_duration
datapoint['timestamp_end'] = rec.timestamp_ingesting_derived + datetime.timedelta(seconds=rec.ingest_duration)
datapoint['speed_bps'] = rec.ingest_speed * 8 / 1000
datapoints.append(datapoint)
prev_ingest_speed = datapoint['speed_bps']
if rec.timestamp_ingest_error_derived is not None:
datapoint = {}
datapoint['taskid'] = rec.taskID
datapoint['timestamp'] = rec.timestamp_ingest_error_derived
datapoint['type'] = 'ingest_error'
datapoint['speed_bps'] = prev_ingest_speed
datapoints.append(datapoint)
except Exception as err:
# an unknown error, just skip that record and continue
logger.error("ReportSpeedsView: "+str(err))
pass
sorted_datapoints = sorted(datapoints, key=lambda k: k['timestamp'])
return Response({
'datapoints':sorted_datapoints
})
# --- MarkPeriod ---
class MarkPeriodAsView(generics.ListAPIView):
"""
Mark a timegrange or range of taskid's as a certain type
"""
queryset = TimeUsed.objects.all()
@timeit
def list(self, request, *args, **kwargs):
"""
Overriding the list method so that a custom created json response can be returned.
This is faster than what the DRF Serializer can do, and better customizable
:return: Returns a custom response with a json structure
"""
# read the arguments from the query
# this can either be the 'taskid_from .. taskid_to' range, or the 'from..to' range as timestamps
try:
taskid_from = self.request.query_params['taskid_from']
except:
taskid_from = None
try:
taskid_to = self.request.query_params['taskid_to']
except:
taskid_to = None
try:
param_from = self.request.query_params['from']
except:
param_from = None
try:
param_to = self.request.query_params['to']
except:
param_to = None
try:
quality = self.request.query_params['quality']
except:
quality = None
try:
observing_mode = self.request.query_params['observing_mode']
except:
observing_mode = None
try:
type = self.request.query_params['type']
except:
type = None
changed_observations = algorithms.mark_period_as(param_from, param_to, taskid_from, taskid_to, type, quality, observing_mode)
return Response({
'changed_observations': changed_observations
})
class ReportTimeUsedView(generics.ListAPIView):
"""
An overview of time-on-sky and time-available per observing mode.
"""
queryset = TimeUsed.objects.all()
@timeit
def list(self, request, *args, **kwargs):
"""
Overriding the list method so that a custom created json response can be returned.
This is faster than what the DRF Serializer can do, and better customizable
:return: Returns a custom response with a json structure
"""
# read the arguments from the query
param_to = self.request.query_params['to']
param_from = self.request.query_params['from']
try:
report_type = self.request.query_params['report_type']
except:
report_type = 'time_used'
time_used_data = algorithms.get_time_used_data(param_to, param_from, report_type)
return Response({
'time_used_data':time_used_data
})
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment