diff --git a/Docker/lofar-ci/Dockerfile_ci_sas b/Docker/lofar-ci/Dockerfile_ci_sas index d5d9b7ce6e4cb7cdc2eb0b4c5fead89d55ae6a5f..e756c0d4799af4316fd89f27a7ad42227c0fc3fd 100644 --- a/Docker/lofar-ci/Dockerfile_ci_sas +++ b/Docker/lofar-ci/Dockerfile_ci_sas @@ -16,7 +16,7 @@ RUN yum erase -y postgresql postgresql-server postgresql-devel && \ cd /bin && ln -s /usr/pgsql-9.6/bin/initdb && ln -s /usr/pgsql-9.6/bin/postgres ENV PATH /usr/pgsql-9.6/bin:$PATH -RUN pip3 install cython kombu lxml requests pygcn xmljson mysql-connector-python python-dateutil Django==3.0.9 djangorestframework djangorestframework-xml ldap==1.0.2 flask fabric coverage python-qpid-proton PyGreSQL numpy h5py psycopg2 testing.postgresql Flask-Testing scipy Markdown django-filter python-ldap python-ldap-test ldap3 djangorestframework django-jsonforms django-json-widget django-jsoneditor drf-yasg flex swagger-spec-validator django-auth-ldap mozilla-django-oidc jsonschema comet pyxb==1.2.5 graphviz isodate +RUN pip3 install cython kombu lxml requests pygcn xmljson mysql-connector-python python-dateutil Django==3.0.9 djangorestframework djangorestframework-xml ldap==1.0.2 flask fabric coverage python-qpid-proton PyGreSQL numpy h5py psycopg2 testing.postgresql Flask-Testing scipy Markdown django-filter python-ldap python-ldap-test ldap3 djangorestframework django-jsonforms django-json-widget django-jsoneditor drf-yasg flex swagger-spec-validator django-auth-ldap mozilla-django-oidc jsonschema comet pyxb==1.2.5 graphviz isodate astropy # Note: nodejs now comes with npm, do not install the npm package separately, since that will be taken from the epel repo and is conflicting. RUN echo "Installing Nodejs packages..." && \ diff --git a/LTA/sip/lib/CMakeLists.txt b/LTA/sip/lib/CMakeLists.txt index a9851a135a5f82c62c0110b5d802d191a95a2fe0..7cfd5468d9f8430570aeea7bb77a6e4c52b0495e 100644 --- a/LTA/sip/lib/CMakeLists.txt +++ b/LTA/sip/lib/CMakeLists.txt @@ -14,6 +14,7 @@ set(_py_files constants.py visualizer.py query.py + station_coordinates.py ) @@ -25,6 +26,7 @@ set(resource_files python_install(${_py_files} DESTINATION lofar/lta/sip) + install(FILES ${resource_files} DESTINATION ${PYTHON_INSTALL_DIR}/lofar/lta/sip COMPONENT ${lower_package_name}) diff --git a/LTA/sip/lib/siplib.py b/LTA/sip/lib/siplib.py index bb3c6238a14a19117ee2d5379f838ddb6b04f66c..4f89a4fe91f5552972e9b43c0cc4afe903d9d9fc 100644 --- a/LTA/sip/lib/siplib.py +++ b/LTA/sip/lib/siplib.py @@ -28,6 +28,7 @@ from . import ltasip import pyxb from . import constants +from . import station_coordinates import os import uuid import xml.dom.minidom @@ -38,8 +39,6 @@ import logging logger = logging.getLogger(__name__) VERSION = "SIPlib 0.4" -d = os.path.dirname(os.path.realpath(__file__)) -STATION_CONFIG_PATH = d+'/station_coordinates.conf' ltasip.Namespace.setPrefix('sip') # todo: create docstrings for everything. @@ -144,30 +143,28 @@ class Station(): __afield1=None __afield2=None - with open(STATION_CONFIG_PATH, 'r') as f: - for line in f.readlines(): - if line.strip(): - field_coords = eval("dict("+line+")") # literal_eval does not accept dict definition via constructor. Make sure config file is not writable to prevent code execution! - for type in antennafieldtypes: - if field_coords["name"] == name+"_"+type: - __afield=AntennafieldXYZ( - type=type, + station_coords = station_coordinates.parse_station_coordinates() + for atype in antennafieldtypes: + if name+"_"+atype in station_coords.keys(): + field_coords = station_coords[name+"_"+atype] + __afield=AntennafieldXYZ( + type=atype, coordinate_system=field_coords["coordinate_system"], coordinate_unit=constants.LENGTHUNIT_M, # Does this make sense? I have to give a lenght unit accoridng to the XSD, but ICRF should be decimal degrees?! coordinate_x=field_coords["x"], coordinate_y=field_coords["y"], coordinate_z=field_coords["z"]) - if not __afield1: - __afield1=__afield - elif not __afield2: - __afield2=__afield + if not __afield1: + __afield1=__afield + elif not __afield2: + __afield2=__afield if not __afield1: - raise Exception("no matching coordinates found for station:", name,"and fields",str(antennafieldtypes)) + raise Exception("no matching coordinates found for station:", name, "and fields", str(antennafieldtypes)) - if name.startswith( 'CS' ): + if name.startswith('CS'): sttype = "Core" - elif name.startswith( "RS" ): + elif name.startswith("RS"): sttype = "Remote" else: sttype = "International" diff --git a/LTA/sip/lib/station_coordinates.conf b/LTA/sip/lib/station_coordinates.conf index 07e488f9a72ccf960c6e6f30c9bc39823e3c7613..741cd1395f2a6a362e70335b7c97d0ac383eb746 100644 --- a/LTA/sip/lib/station_coordinates.conf +++ b/LTA/sip/lib/station_coordinates.conf @@ -154,3 +154,9 @@ coordinate_system='ITRF2005', x='3850973.9872', y='1439061.04111', z='4860478.99 coordinate_system='ITRF2005', x='3850980.8812', y='1438994.87911', z='4860498.993' , name='PL611_HBA' coordinate_system='ITRF2005', x='3551478.64311', y='1334128.4928', z='5110179.160' , name='PL612_LBA' coordinate_system='ITRF2005', x='3551481.8171', y='1334203.5728', z='5110157.410' , name='PL612_HBA' + +coordinate_system='ITRF2005', x='3801633.528060000', y='-529021.899396000', z='5076997.185' , name='IE613_LBA' +coordinate_system='ITRF2005', x='3801691.943300000', y='-528983.966429000', z='5076957.924' , name='IE613_HBA' + +coordinate_system='ITRF2005', x='3183318.032280000', y='1276777.654760000', z='5359435.077' , name='LV614_LBA' +coordinate_system='ITRF2005', x='3183249.285620000', y='1276801.742170000', z='5359469.949' , name='LV614_HBA' \ No newline at end of file diff --git a/LTA/sip/lib/station_coordinates.py b/LTA/sip/lib/station_coordinates.py new file mode 100644 index 0000000000000000000000000000000000000000..f2952a203d2af5ee5578342eac1af1706c41662c --- /dev/null +++ b/LTA/sip/lib/station_coordinates.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python3 + +# This module provides functions for easy creation of a Lofar LTA SIP document. +# It builds upon a Pyxb-generated API from the schema definition, which is very clever but hard to use, since +# the arguments in class constructors and functions definitions are not verbose and there is no intuitive way +# to determine the mandatory and optional elements to create a valid SIP document. This module is designed to +# provide easy-to-use functions that bridges this shortcoming of the Pyxb API. +# +# Usage: Import module. Create an instance of Sip. +# Add elements through the Sip.add_X functions. Many require instances of other classes of the module. +# call getprettyxml() and e.g. save to disk. +# +# Note on validation: From construction through every addition, the SIP should remain valid (or throw an error +# that clearly points out where e.g. a given value does not meet the restrictions of the SIP schema. +# +# Note on code structure: This has to be seen as a compromise between elegant and maintainable code with well- +# structured inheritance close to the schema definition on the one hand, and something more straightforward to use, +# with flatter hierarchies on the other hand. +# +# Note on parameter maps: The ...Map objects are helper objects to create dictionaries for the commonly used +# constructor arguments of several other objects. This could alternatively also be implemented via inheritance from +# a supertype, and indeed is solved like this in the pyxb code. However, this then requires the use of an argument +# list pointer, which hides the list of required and optional arguments from the user. Alternatively, all arguments +# have to be mapped in all constructors repeatedly, creating lots of boilerplate code. This is the nicest approach +# I could think of that keeps the whole thing reasonably maintainable AND usable. + +import os +d = os.path.dirname(os.path.realpath(__file__)) +STATION_CONFIG_PATH = d+'/station_coordinates.conf' + + +def parse_station_coordinates() -> dict: + """ + :return: a dict mapping station field name, e.g. "CS002_LBA", to a dict containing ITRF coordinates + """ + station_coordinates = {} + with open(STATION_CONFIG_PATH, 'r') as f: + for line in f.readlines(): + if line.strip(): + field_coords = eval("dict(" + line + ")") # literal_eval does not accept dict definition via constructor. Make sure config file is not writable to prevent code execution! + station_coordinates[field_coords.pop("name")] = field_coords + return station_coordinates + + diff --git a/SAS/TMSS/src/tmss/tmssapp/CMakeLists.txt b/SAS/TMSS/src/tmss/tmssapp/CMakeLists.txt index 47a6fc110c6e09c09bf272f1ee0f0f04a5a65407..3c5a89286a7c38d5b12fbb41aca524553cf443d7 100644 --- a/SAS/TMSS/src/tmss/tmssapp/CMakeLists.txt +++ b/SAS/TMSS/src/tmss/tmssapp/CMakeLists.txt @@ -10,6 +10,7 @@ set(_py_files validation.py subtasks.py tasks.py + conversions.py ) python_install(${_py_files} diff --git a/SAS/TMSS/src/tmss/tmssapp/adapters/sip.py b/SAS/TMSS/src/tmss/tmssapp/adapters/sip.py index 50e2a205555195be071027cdc0894164c2726335..49c160a383e56cb9b54f6c44ddc0e0e400a71ad9 100644 --- a/SAS/TMSS/src/tmss/tmssapp/adapters/sip.py +++ b/SAS/TMSS/src/tmss/tmssapp/adapters/sip.py @@ -31,6 +31,7 @@ mapping_filterset_type_TMSS_2_SIP = { "HBA_210_250": constants.FILTERSELECTIONTYPE_210_250_MHZ } + def get_number_of_dataproducts_of_type(subtask, dataproduct_datatype): """ Retrieve the number of dataproducts of given data type in subtask: @@ -42,7 +43,7 @@ def get_number_of_dataproducts_of_type(subtask, dataproduct_datatype): subtask_outputs = list(SubtaskOutput.objects.filter(subtask_id=subtask.id)) for subtask_output in subtask_outputs: dataproducts = list(Dataproduct.objects.filter(producer_id=subtask_output.id, - dataformat=dataproduct_datatype)) + datatype=dataproduct_datatype)) nbr_dataproduct += len(dataproducts) return nbr_dataproduct @@ -248,8 +249,7 @@ def create_sip_representation_for_dataproduct(dataproduct: Dataproduct): "unknown": constants.STORAGEWRITERTYPE_UNKNOWN} try: - # todo: why is this not with the data but with the connector? The type of data should not depend on what it is used for? I don't get it... - what if we have several connectors? - dataproduct_type = type_map[dataproduct.producer.subtask.task_blueprint.specifications_template.output_connector_types.first().datatype.value] # todo: does not work on the main dataproduct? + dataproduct_type = type_map[dataproduct.datatype.value] except Exception as err: dataproduct_type = constants.DATAPRODUCTTYPE_UNKNOWN logger.warning("Could not determine the type of dataproduct id %s (%s). Falling back to %s" % (dataproduct.id, err, dataproduct_type)) diff --git a/SAS/TMSS/src/tmss/tmssapp/conversions.py b/SAS/TMSS/src/tmss/tmssapp/conversions.py new file mode 100644 index 0000000000000000000000000000000000000000..e851ecbe396955955f1ae9dc1f32890cb819b53d --- /dev/null +++ b/SAS/TMSS/src/tmss/tmssapp/conversions.py @@ -0,0 +1,41 @@ +from astropy.time import Time +import astropy.units +from lofar.lta.sip import station_coordinates +from datetime import datetime +from astropy.coordinates.earth import EarthLocation + + +def local_sidereal_time_for_utc_and_station(timestamp: datetime = None, + station: str = 'CS002', + field: str = 'LBA', + kind: str = "apparent"): + """ + calculate local sidereal time for given utc time and station + :param timestamp: timestamp as datetime object + :param station: station name + :param field: antennafield, 'LBA' or 'HBA' + :param kind: 'mean' or 'apparent' + :return: + """ + if timestamp is None: + timestamp = datetime.utcnow() + station_coords = station_coordinates.parse_station_coordinates() + field_coords = station_coords["%s_%s" % (station, field)] + location = EarthLocation.from_geocentric(x=field_coords['x'], y=field_coords['y'], z=field_coords['z'], unit=astropy.units.m) + return local_sidereal_time_for_utc_and_longitude(timestamp=timestamp, longitude=location.lon.to_string(decimal=True), kind=kind) + + +def local_sidereal_time_for_utc_and_longitude(timestamp: datetime = None, + longitude: float = 6.8693028, + kind: str = "apparent"): + """ + :param timestamp: timestamp as datetime object + :param longitude: decimal longitude of observer location (defaults to CS002 LBA center) + :param kind: 'mean' or 'apparent' + :return: + """ + if timestamp is None: + timestamp = datetime.utcnow() + t = Time(timestamp, format='datetime', scale='utc') + return t.sidereal_time(kind=kind, longitude=longitude) + diff --git a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py index 607273e7c9f438c01d81c5a90d077e7e79b3bd95..f327b9bb689bf9c622289872d7ffb9688182a768 100644 --- a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py +++ b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 3.0.9 on 2020-08-19 13:24 +# Generated by Django 3.0.8 on 2020-09-09 09:23 from django.conf import settings import django.contrib.postgres.fields @@ -52,6 +52,7 @@ class Migration(migrations.Migration): ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), ('location', models.CharField(help_text='Human-readable location of the cluster.', max_length=128)), + ('archive_site', models.BooleanField(help_text='TRUE if this cluster is an archive site, FALSE if not (f.e. a local cluster, or user-owned cluster).')), ], options={ 'abstract': False, @@ -288,6 +289,7 @@ class Migration(migrations.Migration): ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), ('capacity', models.BigIntegerField(help_text='Capacity in bytes')), + ('directory', models.CharField(help_text='Root directory under which we are allowed to write our data.', max_length=1024)), ], options={ 'abstract': False, @@ -342,6 +344,7 @@ class Migration(migrations.Migration): ('private_data', models.BooleanField(default=True, help_text='True if data of this project is sensitive. Sensitive data is not made public.')), ('expert', models.BooleanField(default=False, help_text='Expert projects put more responsibility on the PI.')), ('filler', models.BooleanField(default=False, help_text='Use this project to fill up idle telescope time.')), + ('archive_subdirectory', models.CharField(help_text='Subdirectory in which this project will store its data in the LTA. The full directory is constructed by prefixing with archive_location→directory.', max_length=1024)), ], options={ 'abstract': False, @@ -1044,6 +1047,11 @@ class Migration(migrations.Migration): name='resource_type', field=models.ForeignKey(help_text='Resource type.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.ResourceType'), ), + migrations.AddField( + model_name='project', + name='archive_location', + field=models.ForeignKey(help_text='Ingest data to this LTA cluster only (NULLable). NULL means: no preference.', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Filesystem'), + ), migrations.AddField( model_name='project', name='cycles', @@ -1136,6 +1144,11 @@ class Migration(migrations.Migration): name='dataformat', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataformat'), ), + migrations.AddField( + model_name='dataproduct', + name='datatype', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Datatype'), + ), migrations.AddField( model_name='dataproduct', name='feedback_template', diff --git a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py index 4ac8634bab3ccf3a644f423d5fca7330ef387a2f..d658ff43c17c43157e6e149c2a24bfc8024393da 100644 --- a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py @@ -269,6 +269,7 @@ class Dataproduct(BasicCommon): filename = CharField(max_length=128, help_text='Name of the file (or top-level directory) of the dataproduct. Adheres to a naming convention, but is not meant for parsing.') directory = CharField(max_length=1024, help_text='Directory where this dataproduct is (to be) stored.') dataformat = ForeignKey('Dataformat', null=False, on_delete=PROTECT) + datatype = ForeignKey('Datatype', null=False, on_delete=PROTECT) deleted_since = DateTimeField(null=True, help_text='When this dataproduct was removed from disk, or NULL if not deleted (NULLable).') pinned_since = DateTimeField(null=True, help_text='When this dataproduct was pinned to disk, that is, forbidden to be removed, or NULL if not pinned (NULLable).') specifications_doc = JSONField(help_text='Dataproduct properties (f.e. beam, subband), to distinguish them when produced by the same task, and to act as input for selections in the Task Input and Work Request Relation Blueprint objects.') @@ -308,10 +309,18 @@ class DataproductTransform(BasicCommon): class Filesystem(NamedCommon): capacity = BigIntegerField(help_text='Capacity in bytes') cluster = ForeignKey('Cluster', on_delete=PROTECT, help_text='Cluster hosting this filesystem.') + directory = CharField(max_length=1024, help_text='Root directory under which we are allowed to write our data.') + + def save(self, force_insert=False, force_update=False, using=None, update_fields=None): + if self.directory and not self.directory.endswith('/'): + raise ValueError('directory value must end with a trailing slash!') # todo: ...and needs to start with slash? + + super().save(force_insert, force_update, using, update_fields) class Cluster(NamedCommon): location = CharField(max_length=128, help_text='Human-readable location of the cluster.') + archive_site = BooleanField(help_text='TRUE if this cluster is an archive site, FALSE if not (f.e. a local cluster, or user-owned cluster).') class DataproductArchiveInfo(BasicCommon): diff --git a/SAS/TMSS/src/tmss/tmssapp/models/specification.py b/SAS/TMSS/src/tmss/tmssapp/models/specification.py index f292c06a9e03d7a0e9a3d9e44626715c30daa714..d1d4c025536364b66a070d0e9ee9ec9c11ad9f18 100644 --- a/SAS/TMSS/src/tmss/tmssapp/models/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/models/specification.py @@ -323,6 +323,16 @@ class Project(NamedCommonPK): filler = BooleanField(default=False, help_text='Use this project to fill up idle telescope time.') project_category = ForeignKey('ProjectCategory', null=True, on_delete=PROTECT, help_text='Project category.') period_category = ForeignKey('PeriodCategory', null=True, on_delete=PROTECT, help_text='Period category.') + archive_location = ForeignKey('Filesystem', null=True, on_delete=PROTECT, help_text='Ingest data to this LTA cluster only (NULLable). NULL means: no preference.') + archive_subdirectory = CharField(max_length=1024, help_text='Subdirectory in which this project will store its data in the LTA. The full directory is constructed by prefixing with archive_location→directory.') + + def save(self, force_insert=False, force_update=False, using=None, update_fields=None): + if self.archive_subdirectory and not self.archive_subdirectory.endswith('/'): + raise ValueError('directory value must end with a trailing slash!') + if self.archive_subdirectory and self.archive_subdirectory.startswith('/'): + raise ValueError('directory value must be a relative path (and not start with a slash)!') + + super().save(force_insert, force_update, using, update_fields) # JK, 29/07/20 - after discussion with Sander, it turns out that the ticket TMSS-277 was a misunderstanding. # 'default' does not refer to 'default values' that are supposed to be filled in by the backend. diff --git a/SAS/TMSS/src/tmss/tmssapp/populate.py b/SAS/TMSS/src/tmss/tmssapp/populate.py index 67d8048759c79fc06e7d748627d57463e8e41439..9331a7264e4d97839d16ae9d989e01cfddf54de4 100644 --- a/SAS/TMSS/src/tmss/tmssapp/populate.py +++ b/SAS/TMSS/src/tmss/tmssapp/populate.py @@ -179,7 +179,7 @@ def populate_resources(apps, schema_editor): def populate_misc(apps, schema_editor): - cluster = Cluster.objects.create(name="CEP4", location="CIT") + cluster = Cluster.objects.create(name="CEP4", location="CIT", archive_site=False) fs = Filesystem.objects.create(name="LustreFS", cluster=cluster, capacity=3.6e15) diff --git a/SAS/TMSS/src/tmss/tmssapp/subtasks.py b/SAS/TMSS/src/tmss/tmssapp/subtasks.py index ddb337c7407fb40f5f3db99fee0da27c1a6856b4..6a5db732c5e58f98caf51656155e9762bcfca881 100644 --- a/SAS/TMSS/src/tmss/tmssapp/subtasks.py +++ b/SAS/TMSS/src/tmss/tmssapp/subtasks.py @@ -546,6 +546,7 @@ def schedule_qafile_subtask(qafile_subtask: Subtask): qafile_subtask_dataproduct = Dataproduct.objects.create(filename="L%s_QA.h5" % (qa_input.producer.subtask_id, ), directory="/data/qa/qa_files", dataformat=Dataformat.objects.get(value=Dataformat.Choices.QA_HDF5.value), + datatype=Datatype.objects.get(value=Datatype.Choices.QUALITY.value), # todo: is this correct? producer=qafile_subtask.outputs.first(), specifications_doc="", specifications_template=DataproductSpecificationsTemplate.objects.first(), # ????? @@ -596,6 +597,7 @@ def schedule_qaplots_subtask(qaplots_subtask: Subtask): obs_subtask = qafile_subtask.predecessors.first() qaplots_subtask_dataproduct = Dataproduct.objects.create(directory="/data/qa/plots/L%s" % (obs_subtask.id, ), dataformat=Dataformat.objects.get(value=Dataformat.Choices.QA_PLOTS.value), + datatype=Datatype.objects.get(value=Datatype.Choices.QUALITY.value), # todo: is this correct? producer=qaplots_subtask.outputs.first(), specifications_doc="", specifications_template=DataproductSpecificationsTemplate.objects.first(), # ????? @@ -715,6 +717,7 @@ def schedule_observation_subtask(observation_subtask: Subtask): Dataproduct.objects.create(filename="L%d_SAP%03d_SB%03d_uv.MS" % (observation_subtask.id, sap_nr, sb_nr), directory=directory, dataformat=Dataformat.objects.get(value="MeasurementSet"), + datatype=Datatype.objects.get(value="visibilities"), # todo: is this correct? producer=subtask_output, specifications_doc={"sap": [sap_nr]}, # todo: set correct value. This will be provided by the RA somehow specifications_template=dataproduct_specifications_template, @@ -795,6 +798,7 @@ def schedule_pipeline_subtask(pipeline_subtask: Subtask): output_dp = Dataproduct.objects.create(filename=filename, directory=input_dp.directory.replace(str(pipeline_subtask_input.producer.subtask.pk), str(pipeline_subtask.pk)), dataformat=Dataformat.objects.get(value="MeasurementSet"), + datatype=Datatype.objects.get(value="visibilities"), # todo: is this correct? producer=pipeline_subtask_output, specifications_doc={}, specifications_template=dataproduct_specifications_template, diff --git a/SAS/TMSS/src/tmss/tmssapp/views.py b/SAS/TMSS/src/tmss/tmssapp/views.py index 4614c940953d2a277b00cf1eb0589ef6efb1edd5..37f7a9cd70e92da9803737dd51b8cd19577e03b9 100644 --- a/SAS/TMSS/src/tmss/tmssapp/views.py +++ b/SAS/TMSS/src/tmss/tmssapp/views.py @@ -5,7 +5,9 @@ from django.shortcuts import get_object_or_404, render from lofar.sas.tmss.tmss.tmssapp import models from lofar.common.json_utils import get_default_json_object_for_schema from lofar.sas.tmss.tmss.tmssapp.adapters.parset import convert_to_parset - +from datetime import datetime +import dateutil.parser +from lofar.sas.tmss.tmss.tmssapp.conversions import local_sidereal_time_for_utc_and_station, local_sidereal_time_for_utc_and_longitude def subtask_template_default_specification(request, subtask_template_pk:int): subtask_template = get_object_or_404(models.SubtaskTemplate, pk=subtask_template_pk) @@ -23,11 +25,41 @@ def subtask_parset(request, subtask_pk:int): subtask = get_object_or_404(models.Subtask, pk=subtask_pk) parset = convert_to_parset(subtask) return HttpResponse(str(parset), content_type='text/plain') - + + def index(request): return render(request, os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), '../../frontend','tmss_webapp/build/index.html')) #return render(request, "../../../frontend/frontend_poc/build/index.html") + def task_specify_observation(request, pk=None): task = get_object_or_404(models.TaskDraft, pk=pk) return HttpResponse("response", content_type='text/plain') + + +def utc(request): + return HttpResponse(datetime.utcnow().isoformat(), content_type='text/plain') + + +def lst(request): + # Handling optional parameters via django paths in urls.py is a pain, we access them on the request directly instead. + timestamp = request.GET.get('timestamp', None) + station = request.GET.get('station', None) + longitude = request.GET.get('longitude', None) + + # conversions + if timestamp: + timestamp = dateutil.parser.parse(timestamp) # isot to datetime + if longitude: + longitude = float(longitude) + + if station: + lst_lon = local_sidereal_time_for_utc_and_station(timestamp, station) + elif longitude: + lst_lon = local_sidereal_time_for_utc_and_longitude(timestamp, longitude) + else: + # fall back to defaults + lst_lon = local_sidereal_time_for_utc_and_station(timestamp) + + # todo: do we want to return a dict, so users can make sure their parameters were parsed correctly instead? + return HttpResponse(str(lst_lon), content_type='text/plain') \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/urls.py b/SAS/TMSS/src/tmss/urls.py index 53146045e08986f1cb8930e993b04129df909610..9b7895326ffebfad99aa9740faa0e649fdc6f608 100644 --- a/SAS/TMSS/src/tmss/urls.py +++ b/SAS/TMSS/src/tmss/urls.py @@ -28,6 +28,8 @@ from rest_framework.documentation import include_docs_urls from drf_yasg.views import get_schema_view from drf_yasg import openapi +from datetime import datetime + # # Django style patterns # @@ -54,7 +56,9 @@ urlpatterns = [ path('docs/', include_docs_urls(title='TMSS API')), re_path(r'^swagger(?P<format>\.json|\.yaml)$', swagger_schema_view.without_ui(cache_timeout=0), name='schema-json'), path('swagger/', swagger_schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'), - path('redoc/', swagger_schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc') + path('redoc/', swagger_schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'), + path(r'util/utc', views.utc, name="system-utc"), + path(r'util/lst', views.lst, name="conversion-lst"), ] diff --git a/SAS/TMSS/test/CMakeLists.txt b/SAS/TMSS/test/CMakeLists.txt index b19ddcd546e283f0e176ecaf57711bb3b8b8f03c..769fce231ac3bc18470ae3c974456d4ec089ff68 100644 --- a/SAS/TMSS/test/CMakeLists.txt +++ b/SAS/TMSS/test/CMakeLists.txt @@ -32,6 +32,7 @@ if(BUILD_TESTING) lofar_add_test(t_adapter) lofar_add_test(t_tasks) lofar_add_test(t_scheduling) + lofar_add_test(t_conversions) # To get ctest running file(COPY testdata DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) diff --git a/SAS/TMSS/test/t_conversions.py b/SAS/TMSS/test/t_conversions.py new file mode 100755 index 0000000000000000000000000000000000000000..ccd4025f6c4c21a43d63f5ccb6a55c3b764f0963 --- /dev/null +++ b/SAS/TMSS/test/t_conversions.py @@ -0,0 +1,133 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2018 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id: $ + +import os +import unittest +import datetime +import logging +import requests +import dateutil.parser +import astropy.coordinates + +logger = logging.getLogger(__name__) +logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) +from lofar.sas.tmss.tmss.tmssapp.conversions import local_sidereal_time_for_utc_and_station, local_sidereal_time_for_utc_and_longitude + +# Do Mandatory setup step: +# use setup/teardown magic for tmss test database, ldap server and django server +# (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_test_environment_unittest_setup module) +from lofar.sas.tmss.test.tmss_test_environment_unittest_setup import * + +class SiderealTime(unittest.TestCase): + + def test_local_sidereal_time_for_utc_and_longitude_returns_correct_result(self): + # test result against known correct value + lst = local_sidereal_time_for_utc_and_longitude(timestamp=datetime.datetime(year=2020, month=1, day=1, hour=12, minute=0, second=0)) + self.assertEqual(str(lst), '19h09m54.9567s') + + def test_local_sidereal_time_for_utc_and_longitude_considers_timestamp(self): + # test that the results differ for different timestamps + lst1 = local_sidereal_time_for_utc_and_longitude(timestamp=datetime.datetime(year=2020, month=1, day=1, hour=12, minute=0, second=0)) + lst2 = local_sidereal_time_for_utc_and_longitude(timestamp=datetime.datetime(year=2020, month=1, day=2, hour=12, minute=0, second=0)) + self.assertNotEqual(str(lst1), str(lst2)) + + def test_local_sidereal_time_for_utc_and_longitude_considers_longitude(self): + # test that the results differ for different longitudes + lst1 = local_sidereal_time_for_utc_and_longitude(timestamp=datetime.datetime(year=2020, month=1, day=1, hour=12, minute=0, second=0), longitude=6.789) + lst2 = local_sidereal_time_for_utc_and_longitude(timestamp=datetime.datetime(year=2020, month=1, day=1, hour=12, minute=0, second=0), longitude=6.123) + self.assertNotEqual(str(lst1), str(lst2)) + + def test_local_sidereal_time_for_utc_and_station_returns_correct_result(self): + # assert result against known correct value + lst = local_sidereal_time_for_utc_and_station(timestamp=datetime.datetime(year=2020, month=1, day=1, hour=12, minute=0, second=0)) + self.assertEqual(str(lst), '19h09m55.0856s') + + def test_local_sidereal_time_for_utc_and_station_considers_timestamp(self): + # test that the results differ for different timestamps + lst1 = local_sidereal_time_for_utc_and_station(timestamp=datetime.datetime(year=2020, month=1, day=1, hour=12, minute=0, second=0)) + lst2 = local_sidereal_time_for_utc_and_station(timestamp=datetime.datetime(year=2020, month=1, day=2, hour=12, minute=0, second=0)) + self.assertNotEqual(str(lst1), str(lst2)) + + def test_local_sidereal_time_for_utc_and_station_considers_station(self): + # test that the results differ for different stations + lst1 = local_sidereal_time_for_utc_and_station(timestamp=datetime.datetime(year=2020, month=1, day=1, hour=12, minute=0, second=0), station="CS002") + lst2 = local_sidereal_time_for_utc_and_station(timestamp=datetime.datetime(year=2020, month=1, day=1, hour=12, minute=0, second=0), station="DE602") + self.assertNotEqual(str(lst1), str(lst2)) + + +class UtilREST(unittest.TestCase): + + def test_util_utc_returns_timestamp(self): + + # assert local clock differs not too much from returned TMSS system clock + r = requests.get(BASE_URL + '/util/utc', auth=AUTH) + self.assertEqual(r.status_code, 200) + returned_datetime = dateutil.parser.parse(r.content.decode('utf8')) + current_datetime = datetime.datetime.utcnow() + delta = abs((returned_datetime - current_datetime).total_seconds()) + self.assertTrue(delta < 60.0) + + def test_util_lst_returns_longitude(self): + + # assert returned value is a parseable hms value + for query in ['/util/lst', + '/util/lst?timestamp=2020-01-01T12:00:00', + '/util/lst?timestamp=2020-01-01T12:00:00&longitude=54.321', + '/util/lst?timestamp=2020-01-01T12:00:00&station=DE609']: + r = requests.get(BASE_URL + query, auth=AUTH) + self.assertEqual(r.status_code, 200) + lon_str = r.content.decode('utf8') + lon_obj = astropy.coordinates.Longitude(lon_str) + self.assertEqual(str(lon_obj), lon_str) + + def test_util_lst_considers_timestamp(self): + + # assert returned value matches known result for given timestamp + r = requests.get(BASE_URL + '/util/lst?timestamp=2020-01-01T12:00:00', auth=AUTH) + self.assertEqual(r.status_code, 200) + lon_str = r.content.decode('utf8') + self.assertEqual('19h09m55.0856s', lon_str) + + def test_util_lst_considers_station(self): + + # assert returned value differs when a different station is given + r1 = requests.get(BASE_URL + '/util/lst', auth=AUTH) + r2 = requests.get(BASE_URL + '/util/lst?station=DE602', auth=AUTH) + self.assertEqual(r1.status_code, 200) + self.assertEqual(r2.status_code, 200) + lon_str1 = r1.content.decode('utf8') + lon_str2 = r2.content.decode('utf8') + self.assertNotEqual(lon_str1, lon_str2) + + def test_util_lst_considers_longitude(self): + # assert returned value differs when a different station is given + r1 = requests.get(BASE_URL + '/util/lst', auth=AUTH) + r2 = requests.get(BASE_URL + '/util/lst?longitude=12.345', auth=AUTH) + self.assertEqual(r1.status_code, 200) + self.assertEqual(r2.status_code, 200) + lon_str1 = r1.content.decode('utf8') + lon_str2 = r2.content.decode('utf8') + self.assertNotEqual(lon_str1, lon_str2) + + +if __name__ == "__main__": + os.environ['TZ'] = 'UTC' + unittest.main() diff --git a/SAS/TMSS/test/t_conversions.run b/SAS/TMSS/test/t_conversions.run new file mode 100755 index 0000000000000000000000000000000000000000..d7c74389715a9cd50f3c36c5e406607f77c048f2 --- /dev/null +++ b/SAS/TMSS/test/t_conversions.run @@ -0,0 +1,6 @@ +#!/bin/bash + +# Run the unit test +source python-coverage.sh +python_coverage_test "*tmss*" t_conversions.py + diff --git a/SAS/TMSS/test/t_conversions.sh b/SAS/TMSS/test/t_conversions.sh new file mode 100755 index 0000000000000000000000000000000000000000..c95892264d5c49a9a76e274e0b99c308fe8ae29c --- /dev/null +++ b/SAS/TMSS/test/t_conversions.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +./runctest.sh t_conversions \ No newline at end of file diff --git a/SAS/TMSS/test/t_tmssapp_scheduling_django_API.py b/SAS/TMSS/test/t_tmssapp_scheduling_django_API.py index 719013734259c39a65aa0f8afc9719d3ee25658a..e874abfa07ce6c9e4f0254517e605b0c5d531c90 100755 --- a/SAS/TMSS/test/t_tmssapp_scheduling_django_API.py +++ b/SAS/TMSS/test/t_tmssapp_scheduling_django_API.py @@ -376,6 +376,15 @@ class FilesystemTest(unittest.TestCase): self.assertLess(before, entry.updated_at) self.assertGreater(after, entry.updated_at) + def test_Filesystem_raises_ValueError_on_invalid_directory_name(self): + + # setup + test_data = Filesystem_test_data(directory="/no/trailing/slash") + + # assert + with self.assertRaises(ValueError): + entry = models.Filesystem.objects.create(**test_data) + class ClusterTest(unittest.TestCase): def test_Cluster_gets_created_with_correct_creation_timestamp(self): diff --git a/SAS/TMSS/test/t_tmssapp_specification_django_API.py b/SAS/TMSS/test/t_tmssapp_specification_django_API.py index e994df895e9f5167535d8981ce9ab552bc3cd69b..b2a6a26940dbc92980c3dee255527f1a0e28a837 100755 --- a/SAS/TMSS/test/t_tmssapp_specification_django_API.py +++ b/SAS/TMSS/test/t_tmssapp_specification_django_API.py @@ -249,6 +249,19 @@ class ProjectTest(unittest.TestCase): self.assertLess(before, entry.updated_at) self.assertGreater(after, entry.updated_at) + def test_Project_raises_ValueError_on_invalid_archive_subdirectory_name(self): + + # setup + test_data_1 = Project_test_data(archive_subdirectory="no/trailing/slash") + test_data_2 = Project_test_data(archive_subdirectory="/with/leading/slash/") + + # assert + with self.assertRaises(ValueError): + entry = models.Project.objects.create(**test_data_1) + + with self.assertRaises(ValueError): + entry = models.Project.objects.create(**test_data_2) + class ProjectQuotaTest(unittest.TestCase): def test_ProjectQuota_prevents_missing_project(self): diff --git a/SAS/TMSS/test/tmss_test_data_django_models.py b/SAS/TMSS/test/tmss_test_data_django_models.py index dd093be160512794fd2c8a7025d4e8f6d0e2b5cf..dd47feef976db59124e5a732d65038a5074543ab 100644 --- a/SAS/TMSS/test/tmss_test_data_django_models.py +++ b/SAS/TMSS/test/tmss_test_data_django_models.py @@ -123,7 +123,7 @@ def Cycle_test_data() -> dict: "start": datetime.utcnow().isoformat(), "stop": datetime.utcnow().isoformat()} -def Project_test_data() -> dict: +def Project_test_data(archive_subdirectory="my_project/") -> dict: return { #"cycles": [models.Cycle.objects.create(**Cycle_test_data())], # ManyToMany, use set() "name": 'my_project_' + str(uuid.uuid4()), "description": 'my description ' + str(uuid.uuid4()), @@ -133,7 +133,8 @@ def Project_test_data() -> dict: "can_trigger": False, "private_data": True, "expert": True, - "filler": False} + "filler": False, + "archive_subdirectory": archive_subdirectory} def ResourceType_test_data() -> dict: return { @@ -368,7 +369,7 @@ def Subtask_test_data(task_blueprint: models.TaskBlueprint=None, subtask_templat stop_time = datetime.utcnow() + timedelta(minutes=10) if cluster is None: - cluster = models.Cluster.objects.create(name="dummy cluster", location="downstairs", tags=[]) + cluster = models.Cluster.objects.create(name="dummy cluster", location="downstairs", archive_site=True, tags=[]) if state is None: state = models.SubtaskState.objects.get(value='defining') @@ -389,6 +390,7 @@ def Dataproduct_test_data(producer: models.SubtaskOutput=None, filename: str="my_file.ext", directory: str="/data/test-projects", dataformat: models.Dataformat=None, + datatype: models.Datatype=None, specifications_doc: object=None) -> dict: if producer is None: @@ -397,12 +399,16 @@ def Dataproduct_test_data(producer: models.SubtaskOutput=None, if dataformat is None: dataformat = models.Dataformat.objects.get(value="MeasurementSet") + if datatype is None: + datatype = models.Datatype.objects.get(value="visibilities") + if specifications_doc is None: specifications_doc={} return {"filename": filename, "directory": directory, "dataformat": dataformat, + "datatype": datatype, "deleted_since": None, "pinned_since": None, "specifications_doc": specifications_doc, @@ -430,14 +436,16 @@ def DataproductTransform_test_data() -> dict: "identity": True, "tags": ['tmss', 'testing']} -def Filesystem_test_data() -> dict: +def Filesystem_test_data(directory="/") -> dict: return {"capacity": 1111111111, "cluster": models.Cluster.objects.create(**Cluster_test_data()), + "directory": directory, "tags": ['tmss', 'testing']} def Cluster_test_data(name="default cluster") -> dict: return {"name": name, "location": "upstairs", + "archive_site": True, "tags": ['tmss', 'testing']} def DataproductArchiveInfo_test_data() -> dict: diff --git a/SAS/TMSS/test/tmss_test_data_rest.py b/SAS/TMSS/test/tmss_test_data_rest.py index d919fbbcc46cddd25b80ccc6e091b43802775c64..fb60b94907ac79e5f2f00696c08c39de6249de38 100644 --- a/SAS/TMSS/test/tmss_test_data_rest.py +++ b/SAS/TMSS/test/tmss_test_data_rest.py @@ -176,7 +176,8 @@ class TMSSRESTTestDataCreator(): "trigger_priority": 1000, "can_trigger": False, "private_data": True, - "cycles": []} + "cycles": [], + "archive_subdirectory": 'my_project/'} def ResourceType(self, description="my resource_type description"): return { @@ -444,6 +445,7 @@ class TMSSRESTTestDataCreator(): return {"name": name if name else "Cluster %s" % uuid.uuid4(), "description": 'My one cluster', "location": "upstairs", + "archive_site": False, "tags": ['tmss', 'testing']} def Subtask(self, cluster_url=None, task_blueprint_url=None, specifications_template_url=None, specifications_doc=None, state:str="defining"): @@ -458,7 +460,7 @@ class TMSSRESTTestDataCreator(): specifications_template_url = self.post_data_and_get_url(self.SubtaskTemplate(), '/subtask_template/') if specifications_doc is None: - specifications_doc = requests.get(specifications_template_url + 'default_specification/', auth=self.auth).content.decode('utf-8') + specifications_doc = requests.get(specifications_template_url + '/default_specification/', auth=self.auth).content.decode('utf-8') return {"start_time": datetime.utcnow().isoformat(), "stop_time": datetime.utcnow().isoformat(), @@ -480,7 +482,7 @@ class TMSSRESTTestDataCreator(): return {"subtask": subtask_url, "tags": []} - def Dataproduct(self, filename="my_filename", directory="/tmp/", specifications_template_url=None, subtask_output_url=None, dataproduct_feedback_template_url=None, dataformat="MeasurementSet"): + def Dataproduct(self, filename="my_filename", directory="/tmp/", specifications_template_url=None, subtask_output_url=None, dataproduct_feedback_template_url=None, dataformat="MeasurementSet", datatype="visibilities"): if specifications_template_url is None: specifications_template_url = self.post_data_and_get_url(self.SubtaskTemplate(), '/dataproduct_specifications_template/') @@ -493,6 +495,7 @@ class TMSSRESTTestDataCreator(): return {"filename": filename, "directory": directory, "dataformat": "%s/dataformat/%s" % (self.django_api_url, dataformat), + "datatype": "%s/datatype/%s" % (self.django_api_url, datatype), "deleted_since": None, "pinned_since": None, "specifications_doc": "{}", @@ -582,5 +585,6 @@ class TMSSRESTTestDataCreator(): "description": 'My one filesystem', "capacity": 1111111111, "cluster": cluster_url, + "directory": '/', "tags": ['tmss', 'testing']}