diff --git a/README.md b/README.md index faeee5c1b85b1ec371efc29944ab0ece865b80f1..66f4d98eab931efe0acad76d8c620c2aa0bc3010 100644 --- a/README.md +++ b/README.md @@ -41,6 +41,8 @@ Deployment Diagram: > export DOCKER_BUILD_DIR=$HOME/my_docker/atdb-ldv/atdb-ldv/atdb > export DOCKER_COMPOSE_DIR=$DOCKER_BUILD_DIR/docker + > cd $DOCKER_BUILD_DIR + > git pull > cd $DOCKER_COMPOSE_DIR > docker-compose -p atdb up -d diff --git a/atdb/taskdatabase/models.py b/atdb/taskdatabase/models.py index c69d47278e5f8df01a8355c6bcdfad84974d1f53..66d4841292752340b9b1f8abbfffd05ff52976b6 100644 --- a/atdb/taskdatabase/models.py +++ b/atdb/taskdatabase/models.py @@ -111,7 +111,6 @@ class Observation(TaskObject): end_band = models.IntegerField(null=True) # ARTS SC4 - process_triggers = models.BooleanField(default=False) beams = models.CharField(max_length=255, default="0..39") quality = models.CharField(max_length=30, default="unknown") diff --git a/atdb/taskdatabase/serializers.py b/atdb/taskdatabase/serializers.py index df64b0113a6993cc89f8ee5cfb0ca23c24691416..0b0a4c740572f7ccbe3d1ed9c7698258c2ad7bd4 100644 --- a/atdb/taskdatabase/serializers.py +++ b/atdb/taskdatabase/serializers.py @@ -57,7 +57,7 @@ class ObservationSerializer(serializers.ModelSerializer): 'generated_dataproducts','telescopes', 'data_location', 'irods_collection','node','control_parameters', 'skip_auto_ingest','observing_mode','science_mode','parset_location', - 'par_file_name','number_of_bins','start_band','end_band','process_triggers','beams', 'delay_center_offset', + 'par_file_name','number_of_bins','start_band','end_band','beams', 'delay_center_offset', 'locality_policy','max_lifetime_on_disk','quality','science_observation','filler','ingest_progress', 'timestamp_starting','timestamp_running','timestamp_completing', 'timestamp_ingesting','timestamp_archived','timestamp_aborted','timestamp_ingest_error') diff --git a/atdb/taskdatabase/templates/taskdatabase/index.html b/atdb/taskdatabase/templates/taskdatabase/index.html index da4c482f493549316e7ca651fe35168aec1cf923..fa35e9aec93e2c0833530ae880926f49cfbf6c88 100644 --- a/atdb/taskdatabase/templates/taskdatabase/index.html +++ b/atdb/taskdatabase/templates/taskdatabase/index.html @@ -46,7 +46,7 @@ </div> {% include 'taskdatabase/pagination.html' %} </div> - <p class="footer"> Version 2.0.0 (11 dec 2020)</p> + <p class="footer"> Version 2.0.0 (15 dec 2020 - 08:00)</p> <script type="text/javascript"> (function(seconds) { diff --git a/atdb/taskdatabase/views.py b/atdb/taskdatabase/views.py index 7ab9e6d3955927d6bdc3e30c12ad15e953c7b2ce..4f216d30f4eb106f9985aa049a5610d402e7a32d 100644 --- a/atdb/taskdatabase/views.py +++ b/atdb/taskdatabase/views.py @@ -48,7 +48,6 @@ class ObservationFilter(filters.FilterSet): 'irods_collection': ['exact', 'icontains'], 'node': ['exact', 'in'], 'skip_auto_ingest': ['exact'], - 'process_triggers': ['exact'], 'beams': ['exact', 'icontains'], 'delay_center_offset': ['exact', 'icontains'], 'quality': ['exact', 'icontains'], diff --git a/atdb_interface_pip/README.rst b/atdb_interface_pip/README.rst deleted file mode 100644 index ee32709119ddbb4832c0f010a5ca756bc2ee71c7..0000000000000000000000000000000000000000 --- a/atdb_interface_pip/README.rst +++ /dev/null @@ -1,34 +0,0 @@ -atdb_interface -============== -This module contains a service that run standalone and interact with the Apertif Task Database REST API. - -See 'atdb_interface -h' for help and 'atdb_interface -e' for examples. - -This package is required when using atdb_services - -Installing -^^^^^^^^^^ -To install a version from Nexus repository use:: - - > pip install <<nexus url>> --upgrade - or download and install the tarball, - > pip install atdb_interface.tar.gz --upgrade - -Within the development environment such as with PyCharm one can install the package within the virtualenv with which -PyCharm is configured. To avoid uninstall and install after each code change pip can install packages within development -mode:: - - (.env) > pip install -e ..project../atdb_interface_pip --upgrade - -This will install the package with soft links to the original code, such that one gets immediate refresh within PyCharm, -which is used for refactoring, code completion, imports etc. - -Uninstalling -^^^^^^^^^^^^ -Uninstall is trivial using the command (watch out for the '-'):: - - > pip uninstall atdb-interface - -or without confirmation:: - - > pip uninstall --yes atdb-interface \ No newline at end of file diff --git a/atdb_interface_pip/atdb_interface.egg-info/PKG-INFO b/atdb_interface_pip/atdb_interface.egg-info/PKG-INFO deleted file mode 100644 index b7592a3fdd2caa376bedf844d982c9f9f8e397a6..0000000000000000000000000000000000000000 --- a/atdb_interface_pip/atdb_interface.egg-info/PKG-INFO +++ /dev/null @@ -1,10 +0,0 @@ -Metadata-Version: 1.0 -Name: atdb-interface -Version: 1.2.9 -Summary: ATDB interface -Home-page: https://www.astron.nl/wsrt/wiki/doku.php?id=atdb:atdb_interface -Author: Nico Vermaas - Astron -Author-email: vermaas@astron.nl -License: BSD -Description: UNKNOWN -Platform: UNKNOWN diff --git a/atdb_interface_pip/atdb_interface.egg-info/SOURCES.txt b/atdb_interface_pip/atdb_interface.egg-info/SOURCES.txt deleted file mode 100644 index e4282e3424038df43a21917665fd87e7e214632b..0000000000000000000000000000000000000000 --- a/atdb_interface_pip/atdb_interface.egg-info/SOURCES.txt +++ /dev/null @@ -1,10 +0,0 @@ -README.rst -setup.py -atdb_interface/__init__.py -atdb_interface/atdb_interface.py -atdb_interface.egg-info/PKG-INFO -atdb_interface.egg-info/SOURCES.txt -atdb_interface.egg-info/dependency_links.txt -atdb_interface.egg-info/entry_points.txt -atdb_interface.egg-info/requires.txt -atdb_interface.egg-info/top_level.txt \ No newline at end of file diff --git a/atdb_interface_pip/atdb_interface.egg-info/dependency_links.txt b/atdb_interface_pip/atdb_interface.egg-info/dependency_links.txt deleted file mode 100644 index 8b137891791fe96927ad78e64b0aad7bded08bdc..0000000000000000000000000000000000000000 --- a/atdb_interface_pip/atdb_interface.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/atdb_interface_pip/atdb_interface.egg-info/entry_points.txt b/atdb_interface_pip/atdb_interface.egg-info/entry_points.txt deleted file mode 100644 index a5448a6ff9ed0a266fb5e820b67a2412c3b60fb2..0000000000000000000000000000000000000000 --- a/atdb_interface_pip/atdb_interface.egg-info/entry_points.txt +++ /dev/null @@ -1,3 +0,0 @@ -[console_scripts] -atdb_interface = atdb_interface.atdb_interface:main - diff --git a/atdb_interface_pip/atdb_interface.egg-info/requires.txt b/atdb_interface_pip/atdb_interface.egg-info/requires.txt deleted file mode 100644 index f2293605cf1b01dca72aad0a15c45b72ed5429a2..0000000000000000000000000000000000000000 --- a/atdb_interface_pip/atdb_interface.egg-info/requires.txt +++ /dev/null @@ -1 +0,0 @@ -requests diff --git a/atdb_interface_pip/atdb_interface.egg-info/top_level.txt b/atdb_interface_pip/atdb_interface.egg-info/top_level.txt deleted file mode 100644 index 643d97a1dd631803cab0848e4caf893c3ea0deee..0000000000000000000000000000000000000000 --- a/atdb_interface_pip/atdb_interface.egg-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -atdb_interface diff --git a/atdb_interface_pip/atdb_interface/__init__.py b/atdb_interface_pip/atdb_interface/__init__.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/atdb_interface_pip/atdb_interface/atdb_interface.py b/atdb_interface_pip/atdb_interface/atdb_interface.py deleted file mode 100644 index 44cb7b02f5a3620e3a07fe586580e70fa63b3212..0000000000000000000000000000000000000000 --- a/atdb_interface_pip/atdb_interface/atdb_interface.py +++ /dev/null @@ -1,825 +0,0 @@ -#!/usr/bin/python3 -import sys -import os -import requests -import json -import argparse -import datetime - -from pkg_resources import get_distribution -pkg_version = get_distribution('atdb_interface').version -""" -atdb_interface.py : a commandline tool to interface with the ATDB REST API. -:author Nico Vermaas - Astron -""" -LAST_UPDATE = "31 oct 2019" - -# ==================================================================== - -# The request header -ATDB_HEADER = { - 'content-type': "application/json", - 'cache-control': "no-cache", - 'authorization': "Basic YWRtaW46YWRtaW4=" -} - -DEFAULT_BACKEND_HOST = "http://atdb-test.astron.nl/atdb" -ATDB_HOST_DEV = "http://localhost:8000/atdb/" # your local development environment with Django webserver -ATDB_HOST_VM = "http://192.168.22.25/atdb" # your local Ansible/Vagrant setup for testing -ATDB_HOST_ACC = "http://192.168.22.25/atdb" # your local Ansible/Vagrant acceptance setup -ATDB_HOST_TEST = "http://atdb-test.astron.nl/atdb" # the atdb test environment -ATDB_HOST_PROD = "http://atdb.astron.nl/atdb" # the atdb production environment. - -TIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ" - - -class ATDB: - """ - Calibrators class, use to parse SIP and update the backend database - through REST API calls. - """ - def __init__(self, host, verbose=False): - """ - Constructor. - :param host: the host name of the backend. - :param username: The username known in Django Admin. - :param verbose: more information runtime. - :param header: Request header for Atdb REST requests with token authentication. - """ - # accept some presets to set host to dev, test, acc or prod - self.host = host - if self.host=='dev': - self.host = DEFAULT_BACKEND_HOST - elif self.host=='vm': - self.host = ATDB_HOST_VM - elif self.host=='test': - self.host = ATDB_HOST_TEST - elif self.host=='acc': - self.host = ATDB_HOST_ACC - elif self.host=='prod': - self.host = ATDB_HOST_PROD - if not self.host.endswith('/'): - self.host += '/' - - self.verbose = verbose - self.header = ATDB_HEADER - - def verbose_print(self, info_str): - """ - Print info string if verbose is enabled (default False) - :param info_str: String to print - """ - if self.verbose: - timestamp = datetime.datetime.now().strftime(TIME_FORMAT) - print(str(timestamp)+ ' - '+info_str) - - # === Backend requests ================================================================================ - @staticmethod - def reconstruct_beams_obsolete(payload_string): - """ - Normally, to translate the specification string to a json payload that the http requests understand, - all substrings like 'a,b,c' are replaced with "a","b","c". But this should not be done for the beams list, - which is given as something like beams=1..10,11,12 or beams=[1..10,11,12] in the specification. - So this function reconstructs that stting. (a bit ugly, but as usual time constraints force me to cut a corner). - :param payload_string: - :return: - """ - - # find the part where the 'beams' are already converted to a messy string: "beams": ""beams" : "1..10" , "11" , "12" , "new_status"", "new_status" - start = payload_string.find('"beams"') + 10 - - if (start>=10): - # if no beams were found, then ignore this step - end = payload_string.find('"new_status"') -3 - - # save the before and after parts of the string - before = payload_string[0:start] - after = payload_string[end:] - - # extract the middle part that needs to be changed: 1..10" , "11" , "12 - middle = payload_string[start:end] - - # remove all " and spaces, and surround it with "" again. - middle = middle.replace("\"","") - middle = middle.replace(" ","") - middle = '"' + middle + '"' - - # reconstruct the payload string - payload_string = before + middle + after - - return payload_string - - - def jsonifyPayload_obsolete(self, payload): - """ - {name=WSRTA180223003_B003.MS,filename=WSRTA180223003_B003.MS} => - {"name" : "WSRTA180223003_B003.MS" , "filename" : "WSRTA180223003_B003.MS"} - :param payload: - :return: payload_string - """ - - payload_string = str(payload).replace("{","{\"") - payload_string = payload_string.replace("}", "\"}") - payload_string = payload_string.replace("=", "\" : \"") - payload_string = payload_string.replace(",", "\" , \"") - - # reconstruct the lists by moving the brackets outside the double quotes - payload_string = payload_string.replace("\"[", "[\"") - payload_string = payload_string.replace("]\"", "\"]") - payload_string = payload_string.replace("/,", "/\",\"") - payload_string = payload_string.replace("u\"", "\"") - - #payload_string = json.dumps(payload) - # ugly: reconstruct beams string - payload_string = self.reconstruct_beams_obsolete(payload_string) - - self.verbose_print("payload_string: [" + payload_string+"]") - return payload_string - - def encodePayload(self, payload): - """ - - The POST body does not simply accept a payload dict, it needs to be translated to a string with some - peculiarities - :param payload: - :return: payload_string - """ - - payload_string = str(payload).replace("'","\"") - #payload_string = payload_string.replace(",", ",\n") - - # reconstruct the lists by moving the brackets outside the double quotes - payload_string = payload_string.replace("\"[", "[\"") - payload_string = payload_string.replace("]\"", "\"]") - payload_string = payload_string.replace("/,", "/\",\"") - payload_string = payload_string.replace("u\"", "\"") - - self.verbose_print("The payload_string: [" + payload_string+"]") - return payload_string - - - def GET_TaskObjectByTaskId(self, resource, taskid): - """ - Do a http GET request to the alta backend to find the Observation with the given runId - :runId runId: - """ - - url = self.host + resource - # create the querystring, external_ref is the mapping of this element to the alta datamodel lookup field - querystring = {"taskID": taskid} - - response = requests.request("GET", url, headers=self.header, params=querystring) - self.verbose_print("[GET " + response.url + "]") - - try: - json_response = json.loads(response.text) - results = json_response["results"] - taskobject = results[0] - return taskobject - except: - raise (Exception( - "ERROR: " + str(response.status_code) + ", " + str(response.reason) + ', ' + str(response.content))) - - # ------------------------------------------------------------------------------# - # Main User functions # - # ------------------------------------------------------------------------------# - - - def do_GET_ID(self, key, value): - """ - Get the id based on a field value of a resource. This is a generic way to retrieve the id. - :param resource: contains the resource, for example 'observations', 'dataproducts' - :param field: the field to search on, this will probably be 'name' or 'filename' - :param value: the value of the 'field' to search on. - :return id - """ - - # split key in resource and field - params = key.split(":") - resource = params[0] - field = params[1] - - url = self.host + resource + "?" + field + "=" + value - response = requests.request("GET", url, headers=self.header) - self.verbose_print("[GET " + response.url + "]") - self.verbose_print("Response: " + str(response.status_code) + ", " + str(response.reason)) - - try: - json_response = json.loads(response.text) - results = json_response["results"] - result = results[0] - id = result['id'] - return id - except: - return '-1' - #raise (Exception("ERROR: " + response.url + " not found.")) - - - def do_GET(self, key, id, taskid): - """ - Do a http GET request to the ATDB backend to find the value of one field of an object - :param key: contains the name of the resource and the name of the field separated by a colon. - :param id: the database id of the object. - :param taskid (optional): when the taskid (of an activity) is known it can be used instead of id. - """ - - # split key in resource and field - params = key.split(":") - resource = params[0] - field = params[1] - - if taskid!=None: - taskObject = self.GET_TaskObjectByTaskId(resource, taskid) - id = taskObject['id'] - - if id==None: - # give up and throw an exception. - raise (Exception("ERROR: no valid 'id' or 'taskid' provided")) - - url = self.host + resource + "/" + str(id) + "/" - self.verbose_print(('url: ' + url)) - - response = requests.request("GET", url, headers=self.header) - self.verbose_print("[GET " + response.url + "]") - self.verbose_print("Response: " + str(response.status_code) + ", " + str(response.reason)) - - try: - json_response = json.loads(response.text) - value = json_response[field] - return value - except Exception as err: - self.verbose_print("Exception : " + str(err)) - raise ( - Exception("ERROR: " + str(response.status_code) + ", " + str(response.reason) + ', ' + str(response.content))) - - - def do_GET_LIST_page(self, key, query, page): - """ - get a list of objects that match the query. (oiginal do_GET_LIST function was left intact for - backward compatibility reasons). - The backend is paginated, so you may want to indicate which page you want to retrieve. - The function returns a list of values of the field indicated in 'key'. - The function also returns the total count, so you can calculate the number of pages to retrieve - :param key: - :param query: - :param page: - :return: - """ - - self.verbose_print("do_GET_LIST(" + key + "," + query + ")") - # split key in resource and field - params = key.split(":") - resource = params[0] - field = params[1] - - url = self.host + resource + "?" + str(query) - if (page != None): - url = url + "&page=" + str(page) - - # self.verbose_print("url = " + url) - - response = requests.request("GET", url, headers=self.header) - self.verbose_print("[GET " + response.url + "]") - self.verbose_print("Response: " + str(response.status_code) + ", " + str(response.reason)) - - try: - json_response = json.loads(response.text) - results = json_response["results"] - # results = json.loads(response.text) - # loop through the list of results and extract the requested field (probably taskID), - # and add it to the return list. - list = [] - for result in results: - value = result[field] - list.append(value) - - count = json_response["count"] - - try: - # return the next page number, if available - next = json_response["next"] - pos_start = next.find("page") - pos_end = next.find("&",pos_start) - page_number = next[pos_start+5:pos_end] - - except: - page_number = 0 - - return list, count, page_number - - except Exception as err: - self.verbose_print("Exception : " + str(err)) - raise (Exception( - "ERROR: " + str(response.status_code) + ", " + str(response.reason) + ', ' + str(response.content))) - - - # python atdb_interface.py -o GET_LIST --key observations:taskID --query status=valid - def do_GET_LIST(self, key, query): - - self.verbose_print("do_GET_LIST(" + key + "," + query + ")") - # split key in resource and field - params = key.split(":") - resource = params[0] - field = params[1] - - url = self.host + resource + "?" + str(query) - - # self.verbose_print("url = " + url) - - response = requests.request("GET", url, headers=self.header) - self.verbose_print("[GET " + response.url + "]") - self.verbose_print("Response: " + str(response.status_code) + ", " + str(response.reason)) - - try: - json_response = json.loads(response.text) - results = json_response["results"] - #results = json.loads(response.text) - # loop through the list of results and extract the requested field (probably taskID), - # and add it to the return list. - list = [] - for result in results: - value = result[field] - list.append(value) - - return list - - except Exception as err: - self.verbose_print("Exception : " + str(err)) - raise (Exception( - "ERROR: " + str(response.status_code) + ", " + str(response.reason) + ', ' + str(response.content))) - - - def do_GET_NextTaskID(self, timestamp, taskid_postfix=""): - """ - :param timestamp: timestamp on which the taskid is based - :param taskid_postfix: optional addition to the taskid, - like when taskid_postfix="_IMG" the taskid will become "190405001_IMG" - :return: taskid - """ - - self.verbose_print("do_GET_NextTaskID(" + str(timestamp) + ")") - - # construct the url - url = self.host + "get_next_taskid?timestamp=" + str(timestamp)+"&taskid_postfix="+taskid_postfix - - # do the request to the ATDB backend - response = requests.request("GET", url, headers=self.header) - self.verbose_print("[GET " + response.url + "]") - self.verbose_print("Response: " + str(response.status_code) + ", " + str(response.reason)) - - # parse the response - try: - json_response = json.loads(response.text) - taskID = json_response["taskID"] - return taskID - except Exception as err: - self.verbose_print("Exception : " + str(err)) - raise (Exception( - "ERROR: " + str(response.status_code) + ", " + str(response.reason) + ', ' + str(response.content))) - - - def do_GET_Observation(self, taskid): - """ - Do a http request to the ATDB backend get all the observation parameters in the response - :param taskid - """ - self.verbose_print("do_GET_Observation(" + taskid + ")") - - # construct the url - url = self.host + "observations?taskID=" + str(taskid) - - # do the request to the ATDB backend - response = requests.request("GET", url, headers=self.header) - self.verbose_print("[GET " + response.url + "]") - - # parse the response - try: - json_response = json.loads(response.text) - results = json_response["results"] - observation = results[0] - return observation - except Exception as err: - self.verbose_print("Exception : " + str(err)) - raise (Exception( - "ERROR: " + str(response.status_code) + ", " + str(response.reason) + ', ' + str(response.content))) - - - - def do_GET_NextObservation(self, my_status, observing_mode, datawriter): - """ - Do a http request to the ATDB backend get the next observation of a given status and observing_mode - :param my_status: status to search for (probably 'scheduled') - :param observing_mode: imaging or arts - :param taskid (optional): when the taskid (of an activity) is known it can be used instead of id. - """ - self.verbose_print("do_GET_NextObservation(" + my_status + "," + observing_mode + "," + datawriter + ")") - - # construct the url - url = self.host + "get_next_observation?my_status=" + str(my_status) + "&observing_mode=" + str(observing_mode) + "&datawriter=" + str(datawriter) - - # do the request to the ATDB backend - response = requests.request("GET", url, headers=self.header) - self.verbose_print("[GET " + response.url + "]") - self.verbose_print("Response: " + str(response.status_code) + ", " + str(response.reason)) - - # parse the response - try: - json_response = json.loads(response.text) - taskID = json_response["taskID"] - minutes_left = json_response["minutes_left"] - return taskID, minutes_left - except Exception as err: - self.verbose_print("Exception : " + str(err)) - raise (Exception( - "ERROR: " + str(response.status_code) + ", " + str(response.reason) + ', ' + str(response.content))) - - - def do_PUT(self, key='observations', id=None, value=None, taskid=None): - """ - PUT a value to an existing field of a resource (table). - :param key: contains the name of the resource and the name of the field separated by a dot. observations.description - :param id: the database id of the object. - :param value: the value that has to be PUT in the key. If omitted, an empty put will be done to trigger the signals. - :param taskid (optional): when the taskid of an observation is known it can be used instead of id. - """ - - # split key in resource and field - if key.find(':')>0: - params = key.split(":") - resource = params[0] - field = params[1] - else: - resource = key - field = None - - if taskid!=None: - taskObject = self.GET_TaskObjectByTaskId(resource, taskid) - id = taskObject['id'] - - url = self.host + resource + "/" + str(id) + "/" - if id==None: - raise (Exception("ERROR: no valid 'id' or 'taskid' provided")) - - payload = {} - if field!=None: - payload[field]=value - payload = self.encodePayload(payload) - try: - response = requests.request("PUT", url, data=payload, headers=self.header) - self.verbose_print("[PUT " + response.url + "]") - self.verbose_print("Response: " + str(response.status_code) + ", " + str(response.reason)) - except: - raise (Exception( - "ERROR: " + str(response.status_code) + ", " + str(response.reason) + ', ' + str(response.content))) - - - # do_PUT_LIST(key = observations:new_status, taskid = 180223003, value = valid) - def do_PUT_LIST(self, key='dataproducts', taskid=None, value=None): - """ - PUT a value to an existing field of resource (table). - :param key: contains the name of the resource and the name of the field separated by a colon. observations:new_status - :param value: the value that has to be PUT in the key. If omitted, an empty put will be done to trigger the signals. - :param taskid: the value is PUT to all objects with the provided taskid - """ - - # split key in resource and field - if key.find(':')>0: - params = key.split(":") - resource = params[0] - field = params[1] - else: - resource = key - field = None - - get_key = resource+':id' - get_query= 'taskID='+taskid - ids = self.do_GET_LIST(get_key,get_query) - - for id in ids: - url = self.host + resource + "/" + str(id) + "/" - self.verbose_print(('url: ' + url)) - - payload = {} - if field!=None: - payload[field]=value - payload = self.encodePayload(payload) - try: - response = requests.request("PUT", url, data=payload, headers=self.header) - self.verbose_print("[PUT " + response.url + "]") - self.verbose_print("Response: " + str(response.status_code) + ", " + str(response.reason)) - except: - raise (Exception( - "ERROR: " + str(response.status_code) + ", " + str(response.reason) + ', ' + str(response.content))) - - - def do_POST_obsolete(self, resource, payload): - """ - POST a payload to a resource (table). This creates a new object (observation or dataproduct) - This is the old function, left for backward compatibility. Use 'do_POST_json()' now. - :param resource: contains the resource, for example 'observations', 'dataproducts' - :param payload: the contents of the object to create in json format - """ - - url = self.host + resource + '/' - self.verbose_print(('payload: ' + payload)) - - payload = self.jsonifyPayload_obsolete(payload) - try: - response = requests.request("POST", url, data=payload, headers=self.header) - self.verbose_print("[POST " + response.url + "]") - self.verbose_print("Response: " + str(response.status_code) + ", " + str(response.reason)) - if not (response.status_code==200 or response.status_code==201): - raise Exception() - except Exception: - raise (Exception("ERROR: " + str(response.status_code) + ", " + str(response.reason) + ', ' + str(response.content))) - - - def do_POST_json(self, resource, payload): - """ - POST a payload to a resource (table). This creates a new object (observation or dataproduct) - This function replaces the old do_POST function that still needed to convert the json content in a very ugly - :param resource: contains the resource, for example 'observations', 'dataproducts' - :param payload: the contents of the object to create in json format - """ - - url = self.host + resource + '/' - self.verbose_print(('payload: ' + payload)) - - try: - response = requests.request("POST", url, data=payload, headers=self.header) - self.verbose_print("[POST " + response.url + "]") - self.verbose_print("Response: " + str(response.status_code) + ", " + str(response.reason)) - if not (response.status_code==200 or response.status_code==201): - raise Exception() - except Exception: - raise (Exception("ERROR: " + str(response.status_code) + ", " + str(response.reason) + ', ' + str(response.content))) - - - def do_POST_dataproducts(self, taskid, dataproducts): - """ - POST (create) a batch of dataproducts for the (observation) with the given taskid. - This is done with a custom made http request to the ATDB backend - :param taskid: taskid of the observation - :param dataproducts: json list of dataproducts to be added to the provided taskid - """ - - # is 'dataproducts' a valid list of dataproducts? - try: - number_of_dataproducts = len(dataproducts) - self.verbose_print("do_POST_dataproducts(" + taskid + "," + str(number_of_dataproducts) + ")") - except Exception as err: - raise (Exception( - "ERROR: " + str(err))) - - # construct the url - url = self.host + "post_dataproducts?taskID=" + str(taskid) - - # encode the dictonary as proper json - payload = self.encodePayload(dataproducts) - try: - # do a POST request to the 'post_dataproducts' resource of the ATDB backend - response = requests.request("POST", url, data=payload, headers=self.header) - self.verbose_print("[POST " + response.url + "]") - - # if anything went wrong, throw an exception. - if not (response.status_code==200 or response.status_code==201): - raise Exception(str(response.status_code) + " - " + str(response.reason)) - except Exception as err: - raise (Exception("ERROR: " + str(err))) - - # if it has all succeeded, give back the taskid as an indication of success - return taskid - - - def do_DELETE(self, resource, id): - """ - Do a http DELETE request to the ATDB backend - """ - if id == None: - raise (Exception("ERROR: no valid 'id' provided")) - - # if a range of ID's is given then do multiple deletes - if (str(id).find('..')>0): - self.verbose_print("Deleting " + str(id) + "...") - s = id.split('..') - start = int(s[0]) - end = int(s[1]) + 1 - else: - # just a single delete - start = int(id) - end = int(id) + 1 - - for i in range(start,end): - url = self.host + resource + "/" + str(i) + "/" - - try: - response = requests.request("DELETE", url, headers=self.header) - self.verbose_print("[DELETE " + response.url + "]") - self.verbose_print("Response: " + str(response.status_code) + ", " + str(response.reason)) - except: - raise (Exception("ERROR: deleting " + url + "failed." + response.url)) - - - def do_setquality(self, key='observations', id=None, value=None, taskid=None): - """ - PUT a value to the quality field and call the setquality functionality to transmit the value to ALTA. - The interface of this function is as close as possible to the regular do_PUT function to avoid confusion. - :param key: contains the name of the resource and the name of the field separated by a dot. observations.description - :param id: the database id of the object. - :param value: the value that has to be PUT in the key. If omitted, an empty put will be done to trigger the signals. - :param taskid (optional): when the taskid of an observation is known it can be used instead of id. - """ - - # split key in resource and field - if key.find(':') > 0: - params = key.split(":") - resource = params[0] - field = params[1] - else: - resource = key - field = "quality" - - if taskid != None: - taskObject = self.GET_TaskObjectByTaskId(resource, taskid) - id = taskObject['id'] - - # this operation requires to http requests... - - # the first request is a PUT to ATDB to set the Quality field - url = self.host + resource + "/" + str(id) + "/" - if id==None: - raise (Exception("ERROR: no valid 'id' or 'taskid' provided")) - - payload = {} - if field!=None: - payload[field]=value - payload = self.encodePayload(payload) - try: - response = requests.request("PUT", url, data=payload, headers=self.header) - self.verbose_print("[PUT " + response.url + "]") - self.verbose_print("Response: " + str(response.status_code) + ", " + str(response.reason)) - except: - raise (Exception( - "ERROR: " + str(response.status_code) + ", " + str(response.reason) + ', ' + str(response.content))) - - - # the second request is a GET to ATDB's 'setquality' resource, which will trigger a call to ALTA - url = self.host + resource + "/" + str(id) + "/setquality/" + value + "/1" - - try: - response = requests.request("GET", url, headers=self.header) - self.verbose_print("[GET " + response.url + "]") - self.verbose_print("Response: " + str(response.status_code) + ", " + str(response.reason)) - except: - raise (Exception( - "ERROR: " + str(response.status_code) + ", " + str(response.reason) + ', ' + str( - response.content))) - - - -# ------------------------------------------------------------------------------# -# Module level functions # -# ------------------------------------------------------------------------------# -def exit_with_error(message): - """ - Exit the code for an error. - :param message: the message to print. - """ - print(message) - sys.exit(-1) - - -def get_arguments(parser): - """ - Gets the arguments with which this application is called and returns - the parsed arguments. - If a parfile is give as argument, the arguments will be overrided - The args.parfile need to be an absolute path! - :param parser: the argument parser. - :return: Returns the arguments. - """ - args = parser.parse_args() - if args.parfile: - args_file = args.parfile - if os.path.exists(args_file): - parse_args_params = ['@' + args_file] - # First add argument file - # Now add command-line arguments to allow override of settings from file. - for arg in sys.argv[1:]: # Ignore first argument, since it is the path to the python script itself - parse_args_params.append(arg) - print(parse_args_params) - args = parser.parse_args(parse_args_params) - else: - raise (Exception("Can not find parameter file " + args_file)) - return args -# ------------------------------------------------------------------------------# -# Main # -# ------------------------------------------------------------------------------# - - -def main(): - """ - The main module. - """ - parser = argparse.ArgumentParser(fromfile_prefix_chars='@') - parser.add_argument("-v","--verbose", default=False, help="More information at run time.",action="store_true") - parser.add_argument("--host", nargs="?", default='test', help="Presets are 'dev', 'vm', 'test', 'acc', 'prod'. Otherwise give a full url like https://atdb.astron.nl/atdb") - parser.add_argument("--version", default=False, help="Show current version of this program", action="store_true") - parser.add_argument("--operation","-o", default="GET", help="GET, GET_ID, GET_LIST, POST, PUT, DELETE. Note that these operations will only work if you have the proper rights in the ALTA user database.") - parser.add_argument("--id", default=None, help="id of the object to PUT to.") - parser.add_argument("-t", "--taskid", nargs="?", default=None, help="Optional taskID which can be used instead of '--id' to lookup Observations or Dataproducts.") - parser.add_argument("--key", default="observations.title", help="resource.field to PUT a value to. Example: observations.title") - parser.add_argument("--query", "-q", default="taskID=180223003", help="Query to the REST API") - parser.add_argument("--value", default="", help="value to PUT in the resource.field. If omitted it will PUT the object without changing values, but the built-in 'signals' will be triggered.") - parser.add_argument("--payload", "-p", default="{}", help="Payload in json for the POST operation. To create new Observations or Dataproducts. (see examples)") - parser.add_argument("--show_examples", "-e", default=False, help="Show some examples",action="store_true") - parser.add_argument('--parfile', nargs='?', type=str, help='Parameter file') - - args = get_arguments(parser) - try: - atdb = ATDB(args.host, args.verbose) - - if (args.show_examples): - - print('atdb_interface.py version = '+ pkg_version + " (last updated " + LAST_UPDATE + ")") - print('---------------------------------------------------------------------------------------------') - print() - print('--- basic examples --- ') - print() - print("Show the 'status' for Observation with taskID 180720003") - print("> atdb_interface -o GET --key observations:my_status --taskid 180223003") - print() - print("GET the ID of Observation with taskID 180223003") - print("> atdb_interface -o GET_ID --key observations:taskID --value 180223003") - print() - print("GET the ID of Dataproduct with name WSRTA180223003_ALL_IMAGE.jpg") - print("> atdb_interface -o GET_ID --key dataproducts:name --value WSRTA180223003_ALL_IMAGE.jpg") - print() - print("GET the 'status' for Dataproduct with ID = 45") - print("> atdb_interface -o GET --key dataproducts:my_status --id 45") - print() - print("PUT the 'status' of dataproduct with ID = 45 on 'copied'") - print("> atdb_interface -o PUT --key dataproducts:new_status --id 45 --value copied") - print() - print("PUT the 'status' of observation with taskID 180720003 on 'valid'") - print("> atdb_interface -o PUT --key observations:new_status --value valid --taskid 180223003") - print() - print("DELETE dataproduct with ID = 46 from the database (no files will be deleted).") - print("> atdb_interface -o DELETE --key dataproducts --id 46") - print() - print("DELETE dataproducts with ID's ranging from 11..15 from the database (no files will be deleted).") - print("> atdb_interface -o DELETE --key dataproducts --id 11..15 -v") - print() - print('--- advanced examples --- ') - print() - print("GET_LIST of taskIDs for observations with status = 'valid'") - print("> atdb_interface -o GET_LIST --key observations:taskID --query status=valid") - print() - print("GET_LIST of IDs for dataproducts with status = 'invalid'") - print("> atdb_interface -o GET_LIST --key dataproducts:id --query status=invalid") - print() - print("PUT the field 'new_status' on 'valid' for all dataproducts with taskId = '180816001'") - print("> atdb_interface -o PUT_LIST --key dataproducts:new_status --taskid 180816001 --value valid") - print('---------------------------------------------------------------------------------------------') - return - - if (args.version): - print('--- atdb_interface.py version = '+ pkg_version + " (last updated " + LAST_UPDATE + ") ---") - return - - if (args.operation=='GET'): - result = atdb.do_GET(key=args.key, id=args.id, taskid=args.taskid) - print(result) - - if (args.operation == 'GET_ID'): - result = atdb.do_GET_ID(key=args.key, value=args.value) - print(result) - - if (args.operation == 'GET_LIST'): - result = atdb.do_GET_LIST(key=args.key, query=args.query) - print(result) - - if (args.operation=='PUT_LIST'): - atdb.do_PUT_LIST(key=args.key, taskid=args.taskid, value=args.value) - - if (args.operation=='PUT'): - atdb.do_PUT(key=args.key, id=args.id, value=args.value, taskid=args.taskid) - - if (args.operation=='SET_QUALITY'): - atdb.do_setquality(key=args.key, id=args.id, value=args.value, taskid=args.taskid) - - if (args.operation=='POST'): - atdb.do_POST_json(resource=args.key, payload=args.payload) - - if (args.operation=='DELETE'): - atdb.do_DELETE(resource=args.key, id=args.id) - - except Exception as exp: - exit_with_error(str(exp)) - - sys.exit(0) - - -if __name__ == "__main__": - main() - diff --git a/atdb_interface_pip/atdb_interface/examples/arg00_post_new_dataproducts b/atdb_interface_pip/atdb_interface/examples/arg00_post_new_dataproducts deleted file mode 100644 index 3f4815928f420aefda2ecc65562afd1b7a312641..0000000000000000000000000000000000000000 --- a/atdb_interface_pip/atdb_interface/examples/arg00_post_new_dataproducts +++ /dev/null @@ -1,3 +0,0 @@ ---operation=POST ---key=dataproducts ---payload={name=WSRTA180223003_B003.MS,filename=WSRTA180223003_B003.MS,description=WSRTA180223003_B003.MS,dataproduct_type=visibility,taskID=180223003,size=54321,quality=raw,new_status=defined,new_location=datawriter} diff --git a/atdb_interface_pip/build.sh b/atdb_interface_pip/build.sh deleted file mode 100644 index ac4426a806466c4cb66d82f9f0a104c5e0b1485a..0000000000000000000000000000000000000000 --- a/atdb_interface_pip/build.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash -# This script make a source distribution for the pip installable package in the current folder -echo "Build a source distribution for ATDB interface" -python --version -# Explicit give format otherwise a zip is created (Windows?) -python setup.py sdist --formats=gztar - -# Next command will not close the window, can be handy if something goes wrong -#exec $SHELL \ No newline at end of file diff --git a/atdb_interface_pip/dist/atdb_interface-1.2.8.tar.gz b/atdb_interface_pip/dist/atdb_interface-1.2.8.tar.gz deleted file mode 100644 index a431d44d98b0b9097fe70af666c42518a3342e01..0000000000000000000000000000000000000000 Binary files a/atdb_interface_pip/dist/atdb_interface-1.2.8.tar.gz and /dev/null differ diff --git a/atdb_interface_pip/dist/atdb_interface-1.2.9.tar.gz b/atdb_interface_pip/dist/atdb_interface-1.2.9.tar.gz deleted file mode 100644 index e27e0114c2d8a7aa96fe2c2352fd73bdd183e974..0000000000000000000000000000000000000000 Binary files a/atdb_interface_pip/dist/atdb_interface-1.2.9.tar.gz and /dev/null differ diff --git a/atdb_interface_pip/setup.py b/atdb_interface_pip/setup.py deleted file mode 100644 index 6fdd44c1972583cab22039b195da0e7ff4cbf6b6..0000000000000000000000000000000000000000 --- a/atdb_interface_pip/setup.py +++ /dev/null @@ -1,21 +0,0 @@ -from setuptools import setup, find_packages - -def readme(): - with open('README.rst') as f: - return f.read() - -setup(name='atdb_interface', - version='1.2.9', - description='ATDB interface', - url='https://www.astron.nl/wsrt/wiki/doku.php?id=atdb:atdb_interface', - author='Nico Vermaas - Astron', - author_email='vermaas@astron.nl', - license='BSD', - install_requires=['requests'], - packages=find_packages(), - entry_points={ - 'console_scripts': [ - 'atdb_interface=atdb_interface.atdb_interface:main', - ], - }, - ) \ No newline at end of file diff --git a/atdb_interface_pip/upload_to_nexus.sh b/atdb_interface_pip/upload_to_nexus.sh deleted file mode 100644 index a5b6f8f0487549889274a86933f27ad658a46752..0000000000000000000000000000000000000000 --- a/atdb_interface_pip/upload_to_nexus.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/bash - -##---------------------------------------------------------------------------------------------------------------------# -##! \brief Description: This script uploads the created atdb_interface artifact to the Nexus repository -##! In: $1 [Optional] Additional Artifact version tag which can be anything -##! Out: None -##! Returns: None -##! Preconditions: -##! - build should be done /dist/atdb_interface-[version].tar.gz is available -##! Postconditions: -##! - artifact uploaded to https://support.astron.nl/nexus/content/repositories/snapshots/nl/astron/atdb/ATDB_interface-[version][additional version tag].tar.gz -##! Examples: .\upload_to_nexus -##! .\upload_to_nexus 20180913 -##! .\upload_to_nexus test -##---------------------------------------------------------------------------------------------------------------------# - -VERSION=$(python setup.py --version) -ARTIFACT_NAME="ATDB_interface" -ARTIFACT_BUILD="/dist/atdb_interface-${VERSION}.tar.gz" - -ARTIFACT_UPLOAD_BASE_PATH="https://support.astron.nl/nexus/content/repositories/snapshots/nl/astron/atdb/" -if [[ $# -eq 1 ]]; then - ARTIFACT_VERSION="-${VERSION}-${1}" -else - ARTIFACT_VERSION="-${VERSION}" -fi - -ARTIFACT_UPLOAD_PATH="${ARTIFACT_UPLOAD_BASE_PATH}${ARTIFACT_NAME}${ARTIFACT_VERSION}.tar.gz" -ARTIFACT_BUILD_PATH="$(pwd)${ARTIFACT_BUILD}" - -echo "Upload ${ARTIFACT_BUILD_PATH} to $ARTIFACT_UPLOAD_PATH" -curl --insecure --upload-file ${ARTIFACT_BUILD_PATH} -u upload:upload ${ARTIFACT_UPLOAD_PATH} - -# Next command will not close the window, can be handy if something goes wrong -exec $SHELL \ No newline at end of file