diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 1aec4a366ca15499a8ac53c887bf7e9135c9c944..717a5f86b3d62945959cb3cfa012c5e5816a13c8 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,7 +1,9 @@ stages: - prepare - build + - dockerize - unit_test + - deploy # - integration_test # @@ -39,6 +41,46 @@ build_RAServices: - build/gnucxx11_opt - install/*.ztar +build_TMSS: + stage: build + image: ci_raservices:latest + script: + - PACKAGE=TMSS + - echo "Building $PACKAGE..." + - mkdir -p build/gnucxx11_opt + - cd build/gnucxx11_opt + - cmake -DBUILD_PACKAGES=$PACKAGE -DCASACORE_ROOT_DIR=/opt/casacore/ -DCASAREST_ROOT_DIR=/opt/casarest/ -DUSE_LOG4CPLUS=false ../.. + - make -j 6 + - make install + dependencies: + - prepare_RAServices + artifacts: + expire_in: 6 hours + paths: + - build/gnucxx11_opt + +# +# DOCKERIZE +# + +dockerize_TMSS: + stage: dockerize + script: + - cd build/gnucxx11_opt + - ls * + - docker build -t tmss_django:$CI_COMMIT_SHORT_SHA -f docker/Dockerfile-tmss . + - cd ../.. + - cd SAS/TMSS/test/oidc/docker-test-mozilla-django-oidc + - docker build -t tmss_testprovider:$CI_COMMIT_SHORT_SHA -f dockerfiles/oidc_testprovider . + - docker login -u $CI_NEXUS_REGISTRY_USERNAME -p $CI_NEXUS_REGISTRY_PASSWORD $CI_NEXUS_REGISTRY + - docker tag tmss_django:$CI_COMMIT_SHORT_SHA nexus.cep4.control.lofar:18080/tmss_django:$CI_COMMIT_SHORT_SHA + - docker push nexus.cep4.control.lofar:18080/tmss_django:$CI_COMMIT_SHORT_SHA + - docker tag tmss_testprovider:$CI_COMMIT_SHORT_SHA nexus.cep4.control.lofar:18080/tmss_testprovider:$CI_COMMIT_SHORT_SHA + - docker push nexus.cep4.control.lofar:18080/tmss_testprovider:$CI_COMMIT_SHORT_SHA + - docker logout $CI_NEXUS_REGISTRY + dependencies: + - build_TMSS + # # UNIT TEST STAGE # @@ -83,4 +125,50 @@ unit_test_RAServices: # paths: # - build/gnucxx11_opt/Testing/Temporary/LastTest.log +deploy-tmss-test: + stage: deploy + before_script: + - 'which ssh-agent || ( apt-get update -y && apt-get install openssh-client git -y )' + - eval $(ssh-agent -s) + - echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add - + - mkdir -p ~/.ssh + - chmod 700 ~/.ssh + - ssh-keyscan scu199.control.lofar >> ~/.ssh/known_hosts + - chmod 644 ~/.ssh/known_hosts + script: + - cd SAS/TMSS + - ssh lofarsys@scu199.control.lofar "docker-compose -f docker-compose-scu199.yml down" + - scp docker-compose-scu199.yml lofarsys@scu199.control.lofar:~/ + - ssh lofarsys@scu199.control.lofar "docker pull ${CI_NEXUS_REGISTRY}/tmss_testprovider:$CI_COMMIT_SHORT_SHA" + - ssh lofarsys@scu199.control.lofar "docker pull ${CI_NEXUS_REGISTRY}/tmss_django:$CI_COMMIT_SHORT_SHA" + - ssh lofarsys@scu199.control.lofar "docker tag ${CI_NEXUS_REGISTRY}/tmss_testprovider:$CI_COMMIT_SHORT_SHA ${CI_NEXUS_REGISTRY}/tmss_testprovider:latest" + - ssh lofarsys@scu199.control.lofar "docker tag ${CI_NEXUS_REGISTRY}/tmss_django:$CI_COMMIT_SHORT_SHA ${CI_NEXUS_REGISTRY}/tmss_django:latest" + - ssh lofarsys@scu199.control.lofar "docker-compose -f docker-compose-scu199.yml up -d" + dependencies: + - build_TMSS + when: manual +deploy-tmss-ua: + stage: deploy + before_script: + - 'which ssh-agent || ( apt-get update -y && apt-get install openssh-client git -y )' + - eval $(ssh-agent -s) + - echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add - + - mkdir -p ~/.ssh + - chmod 700 ~/.ssh + - ssh-keyscan tmss-ua.control.lofar >> ~/.ssh/known_hosts + - chmod 644 ~/.ssh/known_hosts + script: + - cd SAS/TMSS + - ssh lofarsys@tmss-ua.control.lofar "docker-compose -f docker-compose-ua.yml down" + - scp docker-compose-ua.yml lofarsys@tmss-ua.control.lofar:~/ + - ssh lofarsys@tmss-ua.control.lofar "docker pull ${CI_NEXUS_REGISTRY}/tmss_testprovider:$CI_COMMIT_SHORT_SHA" + - ssh lofarsys@tmss-ua.control.lofar "docker pull ${CI_NEXUS_REGISTRY}/tmss_django:$CI_COMMIT_SHORT_SHA" + - ssh lofarsys@tmss-ua.control.lofar "docker tag ${CI_NEXUS_REGISTRY}/tmss_testprovider:$CI_COMMIT_SHORT_SHA ${CI_NEXUS_REGISTRY}/tmss_testprovider:latest" + - ssh lofarsys@tmss-ua.control.lofar "docker tag ${CI_NEXUS_REGISTRY}/tmss_django:$CI_COMMIT_SHORT_SHA ${CI_NEXUS_REGISTRY}/tmss_django:latest" + - ssh lofarsys@tmss-ua.control.lofar "docker-compose -f docker-compose-ua.yml up -d" + dependencies: + - build_TMSS + when: manual + only: + - "master" diff --git a/CAL/CalibrationCommon/lib/coordinates.py b/CAL/CalibrationCommon/lib/coordinates.py index 2adda837e878380f5a30f3595c8351c02c4326a4..0a9762d674f196e9bb405b7e397885fdf934793e 100644 --- a/CAL/CalibrationCommon/lib/coordinates.py +++ b/CAL/CalibrationCommon/lib/coordinates.py @@ -8,7 +8,11 @@ from astropy.utils.data import download_file from astropy.utils import iers from astropy.time import Time -iers.IERS.iers_table = iers.IERS_A.open(download_file(iers.IERS_A_URL, cache=True)) + +try: + iers.IERS.iers_table = iers.IERS_A.open(download_file(iers.IERS_A_URL, cache=True)) +except: + iers.IERS.iers_table = iers.IERS_A.open(download_file(iers.IERS_A_URL_MIRROR, cache=True)) def mjd_to_astropy_time(mjd_time_seconds) -> Time: diff --git a/CAL/CalibrationCommon/lib/datacontainers/holography_dataset.py b/CAL/CalibrationCommon/lib/datacontainers/holography_dataset.py index cfdbb8b6e023401780dfbd29ecf70c8afc1fe119..dcadc629b6ea8a8ec86839249b6bebbebea19164 100644 --- a/CAL/CalibrationCommon/lib/datacontainers/holography_dataset.py +++ b/CAL/CalibrationCommon/lib/datacontainers/holography_dataset.py @@ -93,10 +93,14 @@ class HolographyDataset(): # array(3,3), translation matrix for # (RA, DEC) <-> (l, m) conversion self._rotation_matrix = None - + # str, filter name + self._filter = None + # str, antenna set + self._antenna_set = None # list of beamlet numbers self._beamlets = list() - + # list of used antennas + self._used_antennas = list() # The central beam or beamlet for each frequency self._central_beamlets = dict() self._calibration_tables = dict() @@ -158,6 +162,18 @@ class HolographyDataset(): def target_station_position(self) -> List[Union[str, float, float]]: return self._target_station_position + @property + def antenna_set(self): + return self._antenna_set + + @property + def used_antennas(self): + return self.used_antennas + + @property + def filter(self): + return self._filter + @property def source_name(self) -> str: return self._source_name @@ -372,6 +388,10 @@ class HolographyDataset(): frequency_string, station_name) continue + if beamlet not in ho.ms_for_a_given_beamlet_number: + logger.error('missing beamlet %s for station %s - beamlets present are:', beamlet, + station_name, sorted(ho.ms_for_a_given_beamlet_number.keys())) + raise ValueError('missing beamlet %s for station %s' % (beamlet, station_name)) reference_station_names, cross_correlation = \ ho.ms_for_a_given_beamlet_number[ beamlet].read_cross_correlation_time_flags_lm_per_station_name( @@ -423,20 +443,29 @@ class HolographyDataset(): frequencies = set() sas_ids = set() rcu_list = set() + filterset = set() + antenna_set = set() start_mjd = None end_mjd = None used_antennas = set() + if len(list_of_hbs_ho_tuples) == 0: + + raise ValueError('match is empty') for hbs, ho in list_of_hbs_ho_tuples: target_stations.update(hbs.target_station_names) if station_name in hbs.target_station_names: beam_specifications = hbs.get_beam_specifications_per_station_name(station_name) + if len(beam_specifications) == 0: + logger.error('beam spec notfound for ', station_name) + raise ValueError('Input data incomplete') for beam_specification in beam_specifications: rcu_list.update(beam_specification.rcus_involved) used_antennas.update(beam_specification.antennas_used) mode.add(beam_specification.rcus_mode) - + filterset.add(beam_specification.filter) + antenna_set.add(beam_specification.antenna_set) source_name.add(ho.source_name) source_position.add( (beam_specification.station_pointing['ra'], @@ -456,7 +485,8 @@ class HolographyDataset(): self._target_station_name = station_name reference_stations.update(hbs.reference_station_names) try: - single_beamlet = int(beam_specification.beamlets) + # MOD 1000 in case of the HBA_ONE specification + single_beamlet = int(beam_specification.beamlets) % 1000 except ValueError as e: logger.exception('Target station specification incorrect') raise e @@ -486,6 +516,16 @@ class HolographyDataset(): else: raise ValueError('Multiple source name are not supported') + if len(antenna_set) == 1: + self._antenna_set = antenna_set.pop() + else: + raise ValueError('Multiple antenna set are not supported') + + if len(filterset) == 1: + self._filter = filterset.pop() + else: + raise ValueError('Multiple filters are not supported') + if station_name not in target_stations: logger.error('Station %s was not involved in the observation.' ' The target stations for this observation are %s', @@ -500,6 +540,8 @@ class HolographyDataset(): self._sas_ids = list(sas_ids) self._reference_stations = list(reference_stations) self._rcu_list = list(rcu_list) + self._used_antennas = list(used_antennas) + return target_stations, used_antennas, virtual_pointing def _collect_from_observation(self, station_name, list_of_hbs_ho_tuples, virtual_pointing, used_antennas): @@ -512,7 +554,7 @@ class HolographyDataset(): beamlet_string = str(beamlet) ra, dec, _ = virtual_pointing[(frequency, beamlet)] if isnan(ra) or isnan(dec): - logger.error('skipping pointing %s for frequency %s malformed : %s', + logger.warning('skipping pointing %s for frequency %s malformed : %s', beamlet_string, frequency_string, virtual_pointing[(frequency, beamlet)]) # skip if the pointing is ill specified continue @@ -714,6 +756,11 @@ class HolographyDataset(): result._antenna_field_position = f.attrs[HDS_ANTENNA_FIELD_POSITION].tolist() result._reference_stations = bytestring_list_to_string(list(f[HDS_REFERENCE_STATION])) + if float(result._version) >= 1.1: + result._antenna_set = f.attrs[HDS_ANTENNASET] + result._filter = f.attrs[HDS_FILTER] + result._used_antennas = f.attrs[HDS_USEDANTENNAS] + result._frequencies = list(f[HDS_FREQUENCY]) if HDS_CALIBRATION_TABLES in f: @@ -795,11 +842,15 @@ class HolographyDataset(): f.attrs[HDS_ROTATION_MATRIX] = self._rotation_matrix f.attrs[HDS_ANTENNA_FIELD_POSITION] = self._antenna_field_position f.attrs[HDS_BEAMLETS] = self._beamlets + f.attrs[HDS_FILTER] = self._filter + f.attrs[HDS_ANTENNASET] = self._antenna_set + f.attrs[HDS_USEDANTENNAS] = self._used_antennas # Store the list of reference stations and _frequencies. We just # want to keep 'em around for quick reference. f[HDS_REFERENCE_STATION] = to_numpy_array_string(self._reference_stations) f[HDS_FREQUENCY] = self._frequencies + f.create_group(HDS_CALIBRATION_TABLES) for mode in self._calibration_tables: self._calibration_tables[mode].store_to_hdf(f, diff --git a/CAL/CalibrationCommon/lib/datacontainers/holography_dataset_definitions.py b/CAL/CalibrationCommon/lib/datacontainers/holography_dataset_definitions.py index 21ed4aa8ca0247e98d0d8717cc895d450e2f4235..dc43f17f1509efcf9d5af8f3342aad78fe3beca0 100644 --- a/CAL/CalibrationCommon/lib/datacontainers/holography_dataset_definitions.py +++ b/CAL/CalibrationCommon/lib/datacontainers/holography_dataset_definitions.py @@ -1,6 +1,6 @@ import numpy -HOLOGRAPHY_DATA_SET_VERSION = 1.0 +HOLOGRAPHY_DATA_SET_VERSION = 1.1 # Allowed HDS keywords # @@ -22,6 +22,9 @@ HDS_BEAMLETS = "Beamlets" HDS_CENTRAL_BEAMLETS = "Central_beamlets" HDS_CALIBRATION_TABLES = "Calibration_tables" HDS_DERIVED_CAL_TABLES = "Derived_calibration_tables" +HDS_FILTER = 'Filter' +HDS_ANTENNASET = 'AntennaSet' +HDS_USEDANTENNAS = 'Antennas' # GROUP "RA DEC" HDS_SPECIFIED_RA_DEC = "Specified_RA_DEC" diff --git a/CAL/CalibrationCommon/lib/datacontainers/holography_specification.py b/CAL/CalibrationCommon/lib/datacontainers/holography_specification.py index 22c52b88141c5612fb31d2d1de3cef5d22065ddb..8e0c28351cee2463e22647faae3cc6be5081ee2e 100644 --- a/CAL/CalibrationCommon/lib/datacontainers/holography_specification.py +++ b/CAL/CalibrationCommon/lib/datacontainers/holography_specification.py @@ -17,6 +17,21 @@ MODE_TO_COMPONENT = { 7: 'HBA' } +_FILTER_TO_MODE = { + '210_250': 7, + '110_190': 5, + '170_230': 6 +} + + +def filter_antenna_set_to_mode(filter, antenna_set): + if antenna_set.startswith('HBA'): + return _FILTER_TO_MODE[filter] + elif antenna_set.startswith('LBA'): + raise NotImplementedError() + else: + raise ValueError('antenna_set %s unrecognized' % antenna_set) + def antenna_id_from_rcu_type(rcu, type): """ @@ -62,6 +77,8 @@ class HolographyStationBeamSpecification(object): station_pointing = () virtual_pointing = () station_type = () + filter = '' + antenna_set = '' def _parse_row(row): @@ -71,12 +88,14 @@ def _parse_row(row): :type row: dict[str, str] """ beam_specification = HolographyStationBeamSpecification() + beam_specification.antenna_set = row['antenna_set'] + beam_specification.filter = row['filter'] - beam_specification.rcus_mode = int(row['rcus_mode']) + beam_specification.rcus_mode = filter_antenna_set_to_mode(beam_specification.filter, beam_specification.antenna_set) beam_specification.sub_band_ids = [int(sub_band) for sub_band in row['sub_band'].split(',')] - beam_specification.mode_description = row['mode_description'] + beam_specification.mode_description = "_".join([beam_specification.antenna_set, beam_specification.filter]) beam_specification.rcus_involved = _parse_integer_range_or_list( row['rcus']) beam_specification.beamlets = row['beamlets'] @@ -162,20 +181,21 @@ def _split_line(line): the end_date, the rcu_mode, and the beam_switch_delay :rtype: dict """ - range_regex = '(\d*\:\d*)|(\d*)' + range_regex = '([^\s]*)' ra_dec_regex = '(\d*\.\d*|nan),(-?\d*\.\d*|nan),(\w*)' regex = r'^(?P<station_name>\w*)\s*' \ - r'(?P<mode_description>\w*)\s*' \ + r'(?P<antenna_set>\w*)\s*' \ r'(?P<sub_band>[\d,]*)\s*' \ r'(?P<beamlets>{range_regex})\s*' \ r'(?P<rcus>{range_regex})\s*' \ - r'(?P<rcus_mode>(\d*))\s*' \ + r'(?P<filter>(\w*))\s*' \ r'(?P<virtual_pointing>{ra_dec_regex})\s*' \ r'(?P<station_pointing>{ra_dec_regex})'.format(range_regex=range_regex, ra_dec_regex=ra_dec_regex) match = re.match(regex, line) if match is None: raise ValueError('Cannot parse line {}'.format(line)) + return match.groupdict() @@ -197,17 +217,27 @@ def _parse_integer_range_or_list(string_to_be_parsed): ex "1,2,3,4,5" -> [1, 2, 3, 4, 5] "1:4" -> [1, 2, 3, 4] + "1:3,10:12" -> [1, 2, 3, 10, 11, 12] :param string_to_be_parsed: the string representing a list of int or a range :return: a list of int :rtype: list(int) """ - if ':' in string_to_be_parsed: + if ',' in string_to_be_parsed and ':' in string_to_be_parsed: + fragments = string_to_be_parsed.split(',') + return_value = [] + for fragment in fragments: + if ':' in fragment: + expanded_range = _parse_integer_range_or_list(fragment) + return_value += expanded_range + else: + return_value.append(int(fragment)) + elif ':' in string_to_be_parsed: try: start, end = map(int, string_to_be_parsed.split(':')) except ValueError as e: raise ValueError('Cannot parse string %s expected [start]:[end] -> %s' % (string_to_be_parsed, e)) - return_value = [x for x in range(start, end)] + return_value = [x for x in range(start, end + 1)] elif ',' in string_to_be_parsed: try: return_value = list(map(int, string_to_be_parsed.split(','))) diff --git a/CAL/CalibrationProcessing/bin/holography_process.py b/CAL/CalibrationProcessing/bin/holography_process.py index 23f398b57e185de832753739bf5862e2955ff9e4..bd3e9cbcaca4f79f9282cd6833d78c2da7dc6010 100644 --- a/CAL/CalibrationProcessing/bin/holography_process.py +++ b/CAL/CalibrationProcessing/bin/holography_process.py @@ -10,6 +10,7 @@ from lofar.calibration.processing.averaging import average_data,\ from lofar.calibration.processing.interpolate import derive_interpolation_parameters from lofar.calibration.processing.normalize import normalize_beams_by_central_one +from lofar.calibration.processing.inspect import compute_illumination, compute_beam_shape from lofar.calibration.processing.decorators import log_step_execution @@ -174,6 +175,34 @@ def interpolate_gains_step(dataset, input_datatable): return output_datable +@log_step_execution('compute illumination', logger) +@save_in_case_of_exception() +def illumination_step(dataset): + """ + Compute the illumination of the array + :param dataset: Input dataset + :type dataset: datacontainers.HolographyDataset + :param input_datatable: Input datatable + :type input_datatable: dict(dict(dict(numpy.ndarray))) + :return: Holography dataset + """ + compute_illumination(dataset) + + +@log_step_execution('compute beam shape', logger) +@save_in_case_of_exception() +def illumination_step(dataset): + """ + Compute the recontructed beam shape + :param dataset: Input dataset + :type dataset: datacontainers.HolographyDataset + :param input_datatable: Input datatable + :type input_datatable: dict(dict(dict(numpy.ndarray))) + :return: Holography dataset + """ + compute_beam_shape(dataset) + + @log_step_execution('save', logger) @save_in_case_of_exception() def store_step(dataset, filepath): @@ -203,6 +232,9 @@ def execute_processing(arguments): gains = compute_gains_step(dataset, station_averaged_data) interpolate_gains_step(dataset, gains) + compute_illumination(dataset) + compute_beam_shape(dataset) + store_step(dataset, arguments.output_path) diff --git a/CAL/CalibrationProcessing/lib/processing/averaging.py b/CAL/CalibrationProcessing/lib/processing/averaging.py index fa0d46727cc04e1bbe4788956aa803d549e5b73b..76ffc07b6aa59cfee01087e969a5c6da9741ab1e 100644 --- a/CAL/CalibrationProcessing/lib/processing/averaging.py +++ b/CAL/CalibrationProcessing/lib/processing/averaging.py @@ -495,7 +495,7 @@ def weighted_average_dataset_per_station(dataset, input_data_table): polarization) result_per_beam['l'] = numpy.average(average_per_beam_station['l']) result_per_beam['m'] = numpy.average(average_per_beam_station['m']) - result_per_beam['flag'] = numpy.average(average_per_beam_station['flag']) + result_per_beam['flag'] = numpy.average(average_per_beam_station['flag']) >= 1.0 result_per_beam['t'] = numpy.average(average_per_beam_station['t']) result_per_frequency[beam_str] = dict(mean=result_per_beam) diff --git a/CAL/CalibrationProcessing/lib/processing/inspect.py b/CAL/CalibrationProcessing/lib/processing/inspect.py index c8b5d982f7f37eea90e54cd53a4c795d951eb505..ac62778365c14767d82e2fba9107e04fcc5a6096 100644 --- a/CAL/CalibrationProcessing/lib/processing/inspect.py +++ b/CAL/CalibrationProcessing/lib/processing/inspect.py @@ -4,7 +4,6 @@ import matplotlib.pyplot as plt import os from lofar.calibration.common.datacontainers.holography_dataset import HolographyDataset from matplotlib import cm -from matplotlib.figure import Figure from scipy.constants import c as light_speed from numpy.fft import fft2, fftshift @@ -14,6 +13,10 @@ from numba import jit import numpy from matplotlib.ticker import AutoMinorLocator +from scipy.ndimage.filters import gaussian_filter +import logging + +logger = logging.getLogger(__name__) def __set_minor_ticks_for_canvas(canvas): @@ -21,10 +24,14 @@ def __set_minor_ticks_for_canvas(canvas): canvas.yaxis.set_minor_locator(AutoMinorLocator()) +plt.rcParams['figure.autolayout'] = True + +__DEFAULT_SAMPLING = 250 __LIST_OR_NPARRAY = typing.Union[typing.List[float], numpy.ndarray] __DATATABLE_TYPE = typing.Dict[ str, typing.Dict[str, typing.Dict[str, typing.Dict[str, numpy.ndarray]]]] __MHZ_IN_HZ = 1.e6 +__MAX_SIZE_LOFAR_ARRAY_IN_M = 30 DEFAULT_FORMAT = 'png' @@ -35,7 +42,7 @@ def __save_figure_to_path(figure, path, title, format=DEFAULT_FORMAT): figure.savefig(full_path, format=format) -@jit() +@jit def dft2_numba(x: numpy.ndarray, y: numpy.ndarray, l: numpy.ndarray, @@ -43,7 +50,6 @@ def dft2_numba(x: numpy.ndarray, freq_hz, v: numpy.ndarray, fourier_sign): - arg_factor = (fourier_sign * 2. * numpy.pi * freq_hz / light_speed) result = numpy.zeros((len(x), len(y)), dtype=numpy.complex64) x_down = x.astype(numpy.float32) @@ -61,20 +67,19 @@ def dft2_numba_loop(x, y, l, m, v, arg_factor, result): y_len = len(y) x_len = len(x) k_len = len(v) - phases = numpy.zeros((x_len, y_len, k_len)) + for i in numba.prange(y_len): for j in range(x_len): - for k in range(v.shape[0]): - phases[i, j, k] = arg_factor * (x[j] * l[k] + y[i] * m[k]) + for k in range(k_len): + result[i, j] += v[k] * numpy.exp(1.j * arg_factor * (x[j] * l[k] + y[i] * m[k])) - real_factor = numpy.cos(phases) - imag_factor = numpy.sin(phases) - for i in numba.prange(y_len): - for j in range(x_len): - for k in range(v.shape[0]): +def compute_antennas_field(dset: HolographyDataset): + antenna_positions = dset.antenna_field_position() + antenna_offset = + numpy.mean(antenna_positions, axis=0) - antenna_positions + rotated_antenna_offset = numpy.dot(dset.rotation_matrix(), antenna_offset.T).T.squeeze() - result[i, j] += v[k] * (real_factor[i, j, k] + 1.j * imag_factor[i, j, k]) + return rotated_antenna_offset def complex_value_to_color(complex_array: numpy.ndarray, abs_max=None, abs_min=None, log=True): @@ -101,8 +106,6 @@ def complex_value_to_color(complex_array: numpy.ndarray, abs_max=None, abs_min=N colors = color_map(phase_array) colors[:, 3] = norm_abs_array - #colors[:, 1] *= norm_abs_array - #colors[:, 2] *= norm_abs_array old_shape = list(complex_array.shape) new_shape = old_shape + [4] @@ -138,8 +141,6 @@ def _grid_visibilities_lm_plane(l, m, v, sampling): for l_i, m_i, v_i in zip(l_indexes, m_indexes, v): vis[l_i, m_i] = v_i - from scipy.ndimage.filters import gaussian_filter - sigma = [5, 5] vis.real = gaussian_filter(vis.real, sigma) vis.imag = gaussian_filter(vis.imag, sigma) @@ -155,7 +156,6 @@ def _fft_visibilities_lm_plane(l, m, v, sampling): def _dft_visibilities_lm_plane(l, m, v, sampling, frequency, array_size): - x = numpy.linspace(-array_size, array_size, sampling) y = numpy.linspace(-array_size, array_size, sampling) l = numpy.array(l) @@ -166,18 +166,117 @@ def _dft_visibilities_lm_plane(l, m, v, sampling, frequency, array_size): return fft_vis +def _dft_gains_station_plane(x, y, g, sampling, frequency): + m = numpy.linspace(-1, 1, sampling) + l = numpy.linspace(-1, 1, sampling) + x = numpy.array(x) + y = numpy.array(y) + g = numpy.array(g) + fft_gain = dft2_numba(l, m, x, y, frequency, g, -1) + l_array, m_array = numpy.meshgrid(l, m) + fft_gain[numpy.where((l_array ** 2. + m_array ** 2) > 1)] = 0. + + return fft_gain + + +def compute_illumination_per_frequency_polarization(l, m, v, frequency: float, sampling=__DEFAULT_SAMPLING, + array_size=__MAX_SIZE_LOFAR_ARRAY_IN_M): + fft_vis = _dft_visibilities_lm_plane(l=l, m=m, v=v, + sampling=sampling, + frequency=frequency, + array_size=array_size) + return fft_vis + + +def compute_illumination(hds: HolographyDataset, sampling=__DEFAULT_SAMPLING, + antenna_array_size_in_m=__MAX_SIZE_LOFAR_ARRAY_IN_M): + input_datatable = hds.derived_data['STATION_AVERAGED']['all'] + ILLUMINATION_TAB_NAME = 'ILLUMINATION' + + hds.derived_data[ILLUMINATION_TAB_NAME] = dict() + + output_datatable = hds.derived_data[ILLUMINATION_TAB_NAME] + + output_datatable['array_size'] = antenna_array_size_in_m + output_datatable['sampling'] = sampling + + for polarization in ('XX', 'XY', 'YX', 'YY'): + + illumination_per_polarization = dict() + output_datatable[polarization] = illumination_per_polarization + + for frequency_string in input_datatable: + data_per_frequency = input_datatable[frequency_string] + l_m_v = [ + (data_per_frequency[beam_string]['mean']['l'][0], + data_per_frequency[beam_string]['mean']['m'][0], + data_per_frequency[beam_string]['mean'][polarization][0]) + for beam_string in data_per_frequency] + l, m, v = list(zip(*l_m_v)) + illumination = compute_illumination_per_frequency_polarization(l, m, v, + float(frequency_string), + sampling=sampling, + array_size=antenna_array_size_in_m) + logger.debug('computed illumination for freq %s and polarization %s', frequency_string, polarization) + illumination_per_polarization[frequency_string] = illumination + return hds + + +def compute_beam_shape(hds: HolographyDataset, sampling=__DEFAULT_SAMPLING): + BEAMSHAPE_TAB_NAME = 'BEAM_SHAPE' + input_datatable = hds.derived_data['GAINS']['all'] + antenna_field = compute_antennas_field(hds) + + hds.derived_data[BEAMSHAPE_TAB_NAME] = dict() + + output_datatable = hds.derived_data[BEAMSHAPE_TAB_NAME] + + output_datatable['sampling'] = sampling + + for polarization in ('XX', 'XY', 'YX', 'YY'): + + beam_shape_per_frequency = dict() + output_datatable[polarization] = beam_shape_per_frequency + + for frequency_string in input_datatable: + gains_per_antenna = input_datatable[frequency_string][polarization]['gains'] + frequency = float(frequency_string) + + x, y, g = antenna_field[:, 0], antenna_field[:, 1], gains_per_antenna[:] + dft = _dft_gains_station_plane(x, y, g, __DEFAULT_SAMPLING, + float(frequency)) ## GET ONLY ON THE LM OF THE BEAMS + + logger.debug('computed beam shape for freq %s and polarization %s', frequency_string, polarization) + beam_shape_per_frequency[frequency_string] = dft + return hds + + def _plot_complex_image(canvas, image, extent=None, log=False, abs_max=None, abs_min=None): color_mapped_vis = complex_value_to_color(image, log=log, abs_max=abs_max, abs_min=abs_min) canvas.imshow(color_mapped_vis, extent=extent, origin='lower', resample=True) -def _plot_station_averaged_visibilities_lm_plane_single_frequency_scatter(figure: Figure, l_m_v): +def _plot_station_averaged_visibilities_lm_plane_single_frequency_scatter(target_station, + reference_station, + frequency, + l_m_v, + save_to=None): + frequency_in_mhz = frequency / __MHZ_IN_HZ + l, m, v, flagged = list(zip(*l_m_v)) for index, polarization in enumerate(('XX', 'XY', 'YX', 'YY')): + figure_title = 'OBSERVED_BEAM_VISIBILITIES_{}_{}_{}-{:4.2f}_MHz.{}'.format(target_station, + reference_station, + polarization, + frequency_in_mhz, + DEFAULT_FORMAT) + figure = plt.figure(figure_title) + + figure.suptitle(figure_title.replace('_', ' ')) + v_pol = numpy.array(list(map(lambda x: x[polarization], v))) - canvas = figure.add_subplot(2, 2, index + 1) - canvas.set_title(polarization) + canvas = figure.add_subplot(1, 1, 1) canvas.set_xlabel('l') canvas.set_ylabel('m') canvas.set_xlim(-1, 1) @@ -186,17 +285,33 @@ def _plot_station_averaged_visibilities_lm_plane_single_frequency_scatter(figure v_intensity = numpy.abs(v_pol) canvas.scatter(l, m, c=v_intensity, s=5, vmin=0, vmax=1, cmap='viridis') + if save_to: + __save_figure_to_path(figure, save_to, figure_title) + plt.close(figure) + -def _plot_station_averaged_visibilities_lm_plane_single_frequency(figure: Figure, l_m_v, +def _plot_station_averaged_visibilities_lm_plane_single_frequency(frequency, + reference_station, + target_station, + l_m_v, sampling=512, abs_min=None, - abs_max=None): + abs_max=None, + save_to=None): + frequency_in_mhz = frequency / __MHZ_IN_HZ l, m, v, flagged = list(zip(*l_m_v)) for index, polarization in enumerate(('XX', 'XY', 'YX', 'YY')): + figure_title = 'GRIDDED_VISIBILITIES_{}_{}_{}-{:4.2f}_MHz.{}'.format(reference_station, + target_station, + polarization, + frequency_in_mhz, + DEFAULT_FORMAT) + figure = plt.figure(figure_title) + + figure.suptitle(figure_title.replace('_', ' ')) v_pol = numpy.array(list(map(lambda x: x[polarization], v))) - canvas = figure.add_subplot(2, 2, index + 1) - canvas.set_title(polarization) + canvas = figure.add_subplot(1, 1, 1) canvas.set_xlabel('l') canvas.set_ylabel('m') canvas.set_xlim(-1, 1) @@ -205,15 +320,32 @@ def _plot_station_averaged_visibilities_lm_plane_single_frequency(figure: Figure vis = _grid_visibilities_lm_plane(l, m, v_pol, sampling) _plot_complex_image(canvas, vis, extent=[-1, 1, -1, 1], log=True, abs_min=abs_min, abs_max=abs_max) + if save_to: + __save_figure_to_path(figure, save_to, figure_title) + plt.close(figure) + -def _plot_station_averaged_visibilities_station_plane_single_frequency(figure: Figure, l_m_v, +def _plot_station_averaged_visibilities_station_plane_single_frequency(target_station, + reference_station, + l_m_v, frequency: float, - sampling=50, + antenna_field, + sampling=__DEFAULT_SAMPLING, abs_min=None, - abs_max=None): + abs_max=None, + save_to=None): + frequency_in_mhz = frequency / __MHZ_IN_HZ l, m, v, flagged = list(zip(*l_m_v)) for index, polarization in enumerate(('XX', 'XY', 'YX', 'YY')): + figure_title = 'MEASURED_ILLUMINATION_{}_{}_{}-{:4.2f}_MHz.{}'.format(target_station, + reference_station, + polarization, + frequency_in_mhz, + DEFAULT_FORMAT) + figure = plt.figure(figure_title) + figure.suptitle(figure_title.replace('_', ' ')) + v_pol = numpy.array(list(map(lambda x: x[polarization], v))) array_size = 30 @@ -222,7 +354,7 @@ def _plot_station_averaged_visibilities_station_plane_single_frequency(figure: F frequency=frequency, array_size=array_size) - canvas = figure.add_subplot(2, 2, index + 1) + canvas = figure.add_subplot(1, 1, 1) canvas.set_xlim(-array_size, array_size) canvas.set_ylim(-array_size, array_size) @@ -234,19 +366,25 @@ def _plot_station_averaged_visibilities_station_plane_single_frequency(figure: F abs_max=abs_max) plt.minorticks_on() canvas.imshow(color_mapped_fft[::, ::], origin='lower', extent=[-1 * array_size, - 1 * array_size, - -1 * array_size, - 1 * array_size], + 1 * array_size, + -1 * array_size, + 1 * array_size], resample=True) - canvas.set_title(polarization) + plt.scatter(antenna_field[:, 0], antenna_field[:, 1], marker='o', s=100, + facecolor='none', edgecolors='k') + if save_to: + __save_figure_to_path(figure, save_to, figure_title) + plt.close(figure) -def _plot_station_averaged_visibilities_lm_plane_datatable(data_table: __DATATABLE_TYPE, - central_beamlets, save_to=None): +def _plot_station_averaged_visibilities_lm_plane_datatable(data_table: __DATATABLE_TYPE, + central_beamlets, + target_station, + antenna_array_offset, + save_to=None): for station, data_per_station in data_table.items(): for frequency_str, data_per_frequency in data_per_station.items(): - central_beam = data_per_frequency[central_beamlets[frequency_str]]['mean'] l_m_v = [(data_per_beam['mean']['l'][0] - central_beam['l'][0], @@ -259,48 +397,27 @@ def _plot_station_averaged_visibilities_lm_plane_datatable(data_table: __DATATAB for beam, data_per_beam in data_per_frequency.items()] frequency = float(frequency_str) - frequency_in_mhz = frequency / __MHZ_IN_HZ + _plot_station_averaged_visibilities_lm_plane_single_frequency(frequency=frequency, + target_station=target_station, + reference_station=station, + l_m_v=l_m_v, + save_to=save_to) - figure_title = 'VISIBILITIES_{}-{:4.2f}_MHz.{}'.format(station, - frequency_in_mhz, - DEFAULT_FORMAT) - figure = plt.figure(figure_title) + _plot_station_averaged_visibilities_lm_plane_single_frequency_scatter(frequency=frequency, + target_station=target_station, + reference_station=station, + l_m_v=l_m_v, + save_to=save_to) - figure.suptitle(figure_title.replace('_', ' ')) + _plot_station_averaged_visibilities_station_plane_single_frequency(target_station=target_station, + reference_station=station, l_m_v=l_m_v, + frequency=frequency, + save_to=save_to, + antenna_field=antenna_array_offset) - _plot_station_averaged_visibilities_lm_plane_single_frequency(figure, l_m_v) - if save_to: - __save_figure_to_path(figure, save_to, figure_title) - plt.close(figure) - - figure_title = 'ABS_VISIBILITIES_{}-{:4.2f}_MHz.{}'.format(station, - frequency_in_mhz, - DEFAULT_FORMAT) - figure = plt.figure(figure_title) - - figure.suptitle(figure_title.replace('_', ' ')) - - _plot_station_averaged_visibilities_lm_plane_single_frequency_scatter(figure, l_m_v) - - if save_to: - __save_figure_to_path(figure, save_to, figure_title) - plt.close(figure) - - figure_title = 'FFT_VISIBILITIES_{}-{:4.2f}_MHz.{}'.format(station, - frequency_in_mhz, - DEFAULT_FORMAT) - figure = plt.figure(figure_title) - figure.suptitle(figure_title.replace('_', ' ')) - _plot_station_averaged_visibilities_station_plane_single_frequency(figure, l_m_v, - frequency) - - if save_to: - __save_figure_to_path(figure, save_to, figure_title) - plt.close(figure) - - -def _plot_gains_as_frequency(data_table: __DATATABLE_TYPE, target_station, save_to=None): +def _plot_gains_as_frequency(data_table: __DATATABLE_TYPE, target_station, antenna_field, antenna_list, sampling=__DEFAULT_SAMPLING, + save_to=None): for station, data_per_station in data_table.items(): opened_figures = set() @@ -316,21 +433,65 @@ def _plot_gains_as_frequency(data_table: __DATATABLE_TYPE, target_station, save_ frequencies = numpy.array(frequencies) gains = numpy.stack(gains, axis=1) - for antenna_id, gains_per_antenna in enumerate(gains): - figure_name = 'gains_{:}_antenna_{:}'.format(target_station, - antenna_id) - figure_title = 'GAINS {:} - ANTENNA {:}'.format(target_station, antenna_id) + for antenna_idx, gains_per_antenna in enumerate(gains): + antenna_id = antenna_list[antenna_idx] + figure_name = 'gains_{:}_antenna_{:}_{:}.{}'.format(target_station, + antenna_id, + polarization, + DEFAULT_FORMAT) + figure_title = 'GAINS {:} - ANTENNA {:} - POL'.format(target_station, antenna_id, polarization) figure = plt.figure(figure_name) figure.suptitle(figure_title) opened_figures.add(figure_name) - canvas = figure.add_subplot(2, 2, polarization_index + 1) - canvas.set_title(polarization) + canvas = figure.add_subplot(1, 1, 1) canvas.set_ylim(-numpy.pi, numpy.pi) canvas.set_xlabel('frequency (MHz)') canvas.set_ylabel('Phase') plt.minorticks_on() canvas.plot(frequencies / __MHZ_IN_HZ, numpy.angle(gains_per_antenna), '+-') + + for freq_id, frequency in enumerate(frequencies): + figure_name = 'gains_map_{}_frequency_{:}_{}'.format(target_station, + frequency, polarization) + figure_title = 'gains map {} Freq={:.2f}Mhz'.format(target_station, float(frequency) / __MHZ_IN_HZ) + + figure = plt.figure(figure_name) + figure.suptitle(figure_title) + opened_figures.add(figure_name) + canvas = figure.add_subplot(1, 1, 1) + canvas.set_aspect('equal') + canvas.set_xlabel('x [m]') + canvas.set_ylabel('y [m]') + plt.tight_layout() + plt.minorticks_on() + plt.scatter(antenna_field[:, 0], antenna_field[:, 1], c=numpy.angle(gains[:, freq_id]), vmax=numpy.pi, + vmin=-numpy.pi) + + figure_name = 'BEAM_RECONSTRUCTION_{}_frequency_{:}_{:}.{:}'.format(target_station, + frequency, polarization, + DEFAULT_FORMAT) + figure_title = 'BEAM SHAPE_{}_Freq={:.2f}Mhz_Pol_{:}'.format(target_station, + float(frequency) / __MHZ_IN_HZ, + polarization) + + figure = plt.figure(figure_name) + figure.suptitle(figure_title) + opened_figures.add(figure_name) + canvas = figure.add_subplot(1, 1, 1) + canvas.set_xlabel('l') + canvas.set_ylabel('m') + x, y, g = antenna_field[:, 0], antenna_field[:, 1], gains[:, freq_id] + dft = _dft_gains_station_plane(x, y, g, __DEFAULT_SAMPLING, + float(frequency)) ## GET ONLY ON THE LM OF THE BEAMS + color_mapped_fft = complex_value_to_color(dft, log=False) + plt.minorticks_on() + canvas.imshow(color_mapped_fft[::, ::], origin='lower', extent=[-1, + 1, + -1, + 1]) + plt.tight_layout() + if save_to: for figure_name in opened_figures: figure = plt.figure(figure_name) @@ -349,17 +510,21 @@ def __apply_to_datatable(dset: HolographyDataset, derived_datatable_name, functi def plot_station_averaged_visibilities_lm_plane(dset: HolographyDataset, derived_datatable_name='STATION_AVERAGED', save_to=None): + antenna_field = compute_antennas_field(dset) __apply_to_datatable(dset, derived_datatable_name, _plot_station_averaged_visibilities_lm_plane_datatable, - dset.central_beamlets(), save_to=save_to) + dset.central_beamlets(), + dset.target_station_name, + antenna_field, save_to=save_to) def plot_gains_per_antenna(dset: HolographyDataset, derived_datatable_name='GAINS', save_to=None): + antenna_field = compute_antennas_field(dset) __apply_to_datatable(dset, derived_datatable_name, _plot_gains_as_frequency, - dset.target_station_name, save_to=save_to) + dset.target_station_name, antenna_field, dset.used_antennas(), save_to=save_to) def plot_gains_fit_per_antenna(dset: HolographyDataset, @@ -382,35 +547,118 @@ def plot_gains_fit_per_antenna(dset: HolographyDataset, frequencies = numpy.array(frequencies) gains = numpy.stack(gains, axis=1) - for antenna, antenna_data in enumerate(gains): + for antenna_idx, antenna_data in enumerate(gains): + antenna = dset.used_antennas()[antenna_idx] amplitude_par = interpolation_parameters[station][polarization][str(antenna)]['amplitude']['parameters'] phase_par = \ - interpolation_parameters[station][polarization][str(antenna)]['phase'][ - 'parameters'] + interpolation_parameters[station][polarization][str(antenna)]['phase'][ + 'parameters'] - figure_name = 'station_%s_antenna_%s' % (station, antenna) + figure_name = 'station_%s_antenna_%s_polarization_%s.%s' % (station, + antenna, + polarization, + DEFAULT_FORMAT) figure = plt.figure(figure_name) opened_figures.add(figure_name) - canvas = figure.add_subplot(2, 2, polarization_index + 1) + canvas = figure.add_subplot(2, 1, 1) __set_minor_ticks_for_canvas(canvas) - canvas.set_title(polarization) canvas.set_xlabel('frequency (MHz)') canvas.set_ylabel('Amplitude') - canvas.plot(frequencies/__MHZ_IN_HZ, numpy.abs(antenna_data), '+') - canvas.plot(frequencies/__MHZ_IN_HZ, amplitude_par['m'] * frequencies + amplitude_par['q']) + canvas.plot(frequencies / __MHZ_IN_HZ, numpy.abs(antenna_data), '+') + canvas.plot(frequencies / __MHZ_IN_HZ, amplitude_par['m'] * frequencies + amplitude_par['q']) plt.minorticks_on() - canvas = figure.add_subplot(2, 2, polarization_index + 3) + canvas = figure.add_subplot(2, 1, 2) __set_minor_ticks_for_canvas(canvas) - canvas.set_title(polarization) canvas.set_xlabel('frequency (MHz)') canvas.set_ylabel('Phase') canvas.set_ylim(-numpy.pi, numpy.pi) plt.minorticks_on() - canvas.plot(frequencies/__MHZ_IN_HZ, numpy.angle(antenna_data), '+') - canvas.plot(frequencies/__MHZ_IN_HZ, phase_par['m'] * frequencies + phase_par['q']) + canvas.plot(frequencies / __MHZ_IN_HZ, numpy.angle(antenna_data), '+') + canvas.plot(frequencies / __MHZ_IN_HZ, phase_par['m'] * frequencies + phase_par['q']) plt.tight_layout() + + if save_to: + for figure_name in opened_figures: + figure = plt.figure(figure_name) + if save_to: + __save_figure_to_path(figure, save_to, figure_name) + plt.close(figure) + + +def plot_abs_visibilities(figure, data_table): + pols = ('XX', 'XY', 'YX', 'YY') + abs_data = numpy.stack([numpy.abs(data_table[pol]) for pol in pols], axis=0) + max_value = numpy.max(abs_data) * 10 + min_value = numpy.min(abs_data) / 10 + + for pol_i, pol in enumerate(pols): + canvas = figure.add_subplot(2, 2, pol_i + 1) + canvas.set_ylim([min_value, max_value]) + abs_per_pol = abs_data[pol_i, :] + time = data_table['t'] + + flag = data_table['flag'] + flagged_point = time[flag] + if flagged_point: + canvas.axvline(flagged_point, color='red') + canvas.plot(time, abs_per_pol, '+-') + plt.xticks(rotation=45) + plt.semilogy() + plt.xlabel('t') + plt.ylabel('$|V|$') + plt.minorticks_on() + plt.tight_layout() + + +def plot_phase_visibilities(figure, data_table): + pols = ('XX', 'XY', 'YX', 'YY') + angle_data = numpy.stack([numpy.angle(data_table[pol]) for pol in pols], axis=0) + max_value = numpy.pi + .5 + min_value = -numpy.pi - .5 + + for pol_i, pol in enumerate(pols): + canvas = figure.add_subplot(2, 2, pol_i + 1) + canvas.set_ylim([min_value, max_value]) + ang_per_pol = angle_data[pol_i, :] + + time = data_table['t'] + plt.xticks(rotation=45) + flag = data_table['flag'] + flagged_point = time[flag] + if flagged_point: + canvas.axvline(flagged_point, color='red') + + canvas.plot(time, ang_per_pol, '+-') + plt.xlabel('t') + plt.ylabel('$\\phi$') + + plt.minorticks_on() + plt.tight_layout() + + +def plot_raw_uv(dset: HolographyDataset, selected_stations=None, save_to=None): + datatable = dset.data + selected_stations = selected_stations if selected_stations else datatable.keys() + central_beamlet_number_per_frequency = dset.central_beamlets + opened_figures = [] + for station_name in selected_stations: + for frequency_str in datatable[station_name]: + title = 'RAWUV_AMPL_%s_%s' % (station_name, frequency_str) + figure = plt.figure(title) + opened_figures.append(title) + vis_per_frequency_station = datatable[station_name][frequency_str] + vis_central_beam = vis_per_frequency_station[central_beamlet_number_per_frequency[frequency_str]] + plot_abs_visibilities(figure, vis_central_beam) + + title = 'RAWUV_PHASE_%s_%s' % (station_name, frequency_str) + figure = plt.figure(title) + opened_figures.append(title) + vis_per_frequency_station = datatable[station_name][frequency_str] + vis_central_beam = vis_per_frequency_station[central_beamlet_number_per_frequency[frequency_str]] + plot_phase_visibilities(figure, vis_central_beam) + if save_to: for figure_name in opened_figures: figure = plt.figure(figure_name) diff --git a/CAL/CalibrationProcessing/lib/processing/solver.py b/CAL/CalibrationProcessing/lib/processing/solver.py index 09e0462f776f2d8d324c846175c27dcc73ebacee..458fcd94e760bfe390b32dac491474999c93710f 100644 --- a/CAL/CalibrationProcessing/lib/processing/solver.py +++ b/CAL/CalibrationProcessing/lib/processing/solver.py @@ -195,6 +195,7 @@ def __remove_flagged_data(visibilities, matrix, flags): matrix_new[index, :] = matrix[index, :] return visibilities, numpy.array(matrix_new) + def solve_gains_per_datatable(dataset, datatable, **kwargs): """ Solve for the gains the given datatable diff --git a/CAL/Docker/HolographyBase/Dockerfile b/CAL/Docker/HolographyBase/Dockerfile index 58828f618122e8720aa60b5a4dadc3e1893cef77..c7b36c4f10d43f480127d9fa5ac7b1cdcb13911b 100644 --- a/CAL/Docker/HolographyBase/Dockerfile +++ b/CAL/Docker/HolographyBase/Dockerfile @@ -20,7 +20,7 @@ RUN apt-get update && apt-get -y install \ RUN pip3 install dataclasses \ scipy==1.3.1 \ numpy==1.17.0 \ - astropy==3.2.1 \ + astropy==3.2.3 \ h5py==2.9.0 \ emcee==2.2.1 \ numba==0.45.1 \ @@ -93,11 +93,25 @@ RUN mkdir /root/src/dysco && \ make -j 5 && \ make install +RUN mkdir /root/src/stman && \ + cd /root/src/stman && \ + wget https://github.com/lofar-astron/LofarStMan/archive/master.tar.gz && \ + tar -xvf master.tar.gz && \ + rm master.tar.gz && \ + mkdir build && \ + cd build && \ + cmake -DCMAKE_CXX_FLAGS="${CXX_FLAGS}" -DPORTABLE=True -DCMAKE_INSTALL_PREFIX=/opt/stman/ -DCASACORE_ROOT_DIR=/opt/casacore/ /root/src/stman/LofarStMan-master/ && \ + make -j 5 && \ + make install + + FROM base ENV PYTHONPATH=/opt/dysco/lib/python3.6/site-packages:/opt/dysco/lib64/python3.6/site-packages:${PYTHONPATH} COPY --from=base-build /opt/casacore/ /opt/casacore/ COPY --from=base-build /opt/python-casacore/ /opt/python-casacore/ COPY --from=base-build /opt/dysco/ /opt/dysco/ +COPY --from=base-build /opt/stman/ /opt/stman/ + ## Installing convenience packages RUN apt install -y \ diff --git a/CAL/Docker/HolographyDeploy/Dockerfile b/CAL/Docker/HolographyDeploy/Dockerfile index f3ebc351b2cd7606c1ada6691b662197cbc0999c..ca29545328eca4b31615bb4d8ab9a69d2051fc21 100644 --- a/CAL/Docker/HolographyDeploy/Dockerfile +++ b/CAL/Docker/HolographyDeploy/Dockerfile @@ -3,7 +3,7 @@ COPY --from=holography-build:latest /opt/lofar /opt/lofar COPY ./entrypoint.sh /entrypoint.sh -ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/casacore/lib/:/opt/dysco/lib +ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/casacore/lib/:/opt/dysco/lib:/opt/stman/lib ENV PYTHONPATH=$PYTHONPATH:/opt/python-casacore/lib/python3.6/site-packages/ diff --git a/CMake/LofarMacros.cmake b/CMake/LofarMacros.cmake index 5533655f66f415316e85c0ba4a95b5ec8d7665da..62530bb75cfa086ad898d4a98e88bffb3fa17712 100644 --- a/CMake/LofarMacros.cmake +++ b/CMake/LofarMacros.cmake @@ -284,6 +284,46 @@ if(NOT DEFINED LOFAR_MACROS_INCLUDED) endmacro(lofar_add_data_files) + # -------------------------------------------------------------------------- + # lofar_add_docker_files([name1 [name2 ..]]) + # + # Add docker files (architecture-independent data) that need to be + # installed into the <prefix>/docker directory. Also create a symbolic link + # in <build-dir>/docker to each of these files. The file names may contain + # a relative(!) path. + # + # The mentioned files are installed in the same relative path as provided, + # that is: + # lofar_add_docker_files(foo/bar) + # installs "docker/foo/bar". To override this behaviour use: + # lofar_add_docker_files(foo/bar DESTINATION .) + # installs "docker/bar". + # -------------------------------------------------------------------------- + macro(lofar_add_docker_files) + string(REGEX REPLACE ";?DESTINATION.*" "" _src_names "${ARGN}") + string(REGEX MATCH "DESTINATION;.*" _destination "${ARGN}") + string(REGEX REPLACE "^DESTINATION;" "" _destination "${_destination}") + string(TOLOWER ${PACKAGE_NAME} lower_package_name) + foreach(_src_name ${_src_names}) + if(_destination MATCHES ".+") + get_filename_component(_src_filename ${_src_name} NAME) + set(_dest_name ${_destination}/${_src_filename}) + else(_destination MATCHES ".+") + set(_dest_name ${_src_name}) + endif(_destination MATCHES ".+") + + get_filename_component(_abs_name ${_src_name} ABSOLUTE) + get_filename_component(_dest_path ${_dest_name} PATH) + + file(MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/docker/${_dest_path}) + execute_process(COMMAND ${CMAKE_COMMAND} -E create_symlink + ${_abs_name} ${CMAKE_BINARY_DIR}/docker/${_dest_name}) + install(FILES ${_src_name} + DESTINATION docker/${_dest_path} + COMPONENT ${lower_package_name}) + endforeach(_src_name ${_src_names}) + endmacro(lofar_add_docker_files) + # -------------------------------------------------------------------------- # lofar_add_test(name [source ...] [DEPENDS depend ...]) # diff --git a/LCS/PyCommon/CMakeLists.txt b/LCS/PyCommon/CMakeLists.txt index 126bec92a1ca358382db30f4f921e6348e3a695d..9b653127fbad894a805502c0367ff1beac65de0c 100644 --- a/LCS/PyCommon/CMakeLists.txt +++ b/LCS/PyCommon/CMakeLists.txt @@ -26,6 +26,7 @@ set(_py_files flask_utils.py subprocess_utils.py xmlparse.py + json_utils.py test_utils.py) python_install(${_py_files} DESTINATION lofar/common) diff --git a/LCS/PyCommon/datetimeutils.py b/LCS/PyCommon/datetimeutils.py index 02edc9cd4f1500fb14d1f2d3344e1366da738b75..25733ba9d4be41e7e2064609bb8778a38b89a7ac 100644 --- a/LCS/PyCommon/datetimeutils.py +++ b/LCS/PyCommon/datetimeutils.py @@ -63,10 +63,14 @@ def format_timedelta(td): return '-' + str(-td) return str(td) -def parseDatetime(date_time): +def parseDatetime(date_time: str) -> datetime: """ Parse the datetime format used in LOFAR parsets. """ return datetime.strptime(date_time, ('%Y-%m-%d %H:%M:%S.%f' if '.' in date_time else '%Y-%m-%d %H:%M:%S')) +def formatDatetime(timestamp: datetime) -> str: + """ Format the timestamp as used in LOFAR parsets. """ + return timestamp.strftime(('%Y-%m-%d %H:%M:%S' if timestamp.microsecond == 0 else '%Y-%m-%d %H:%M:%S.%f')) + MDJ_EPOCH = datetime(1858, 11, 17, 0, 0, 0) def to_modified_julian_date(timestamp): diff --git a/LCS/PyCommon/json_utils.py b/LCS/PyCommon/json_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..aa98861b84a92ad04ef782723add4023b7a3715a --- /dev/null +++ b/LCS/PyCommon/json_utils.py @@ -0,0 +1,49 @@ +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +from jsonschema import validators, Draft6Validator +from copy import deepcopy + +def _extend_with_default(validator_class): + '''see: https://python-jsonschema.readthedocs.io/en/stable/faq/#why-doesn-t-my-schema-s-default-property-set-the-default-on-my-instance''' + validate_properties = validator_class.VALIDATORS["properties"] + + def set_defaults(validator, properties, instance, schema): + for property, subschema in properties.items(): + if "default" in subschema: + instance.setdefault(property, subschema["default"]) + for error in validate_properties( + validator, properties, instance, schema, + ): + yield error + + return validators.extend( + validator_class, {"properties" : set_defaults}, + ) + + +_DefaultValidatingDraft6Validator = _extend_with_default(Draft6Validator) + +def get_default_json_object_for_schema(schema: str) -> dict: + '''return a valid json object for the given schema with all properties with their default values''' + return add_defaults_to_json_object_for_schema({}, schema) + +def add_defaults_to_json_object_for_schema(json_object: dict, schema: str) -> dict: + '''return a copy of the json object with defaults filled in accoring to the schema for all the missing properties''' + copy_of_json_object = deepcopy(json_object) + _DefaultValidatingDraft6Validator(schema).validate(copy_of_json_object) + return copy_of_json_object diff --git a/LCS/PyCommon/test/CMakeLists.txt b/LCS/PyCommon/test/CMakeLists.txt index 361ff4368e57f2253124baa60cc613e24c3c0502..57ade8b7a0b8a757cdf6f5a8ce4775f2aec99747 100644 --- a/LCS/PyCommon/test/CMakeLists.txt +++ b/LCS/PyCommon/test/CMakeLists.txt @@ -27,6 +27,7 @@ IF(BUILD_TESTING) lofar_add_test(t_methodtrigger) lofar_add_test(t_util) lofar_add_test(t_test_utils) + lofar_add_test(t_json_utils) lofar_add_test(t_cep4_utils) lofar_add_test(t_postgres) ENDIF() \ No newline at end of file diff --git a/LCS/PyCommon/test/postgres.py b/LCS/PyCommon/test/postgres.py index c108713b30a1ac8dd632db729e20d9fcff74efef..6ff9dc07e9f2bfdd4a811547e956cac24de87baf 100755 --- a/LCS/PyCommon/test/postgres.py +++ b/LCS/PyCommon/test/postgres.py @@ -29,6 +29,7 @@ import testing.postgresql from lofar.common.dbcredentials import Credentials from lofar.common.postgres import PostgresDatabaseConnection from lofar.common.testing.dbcredentials import TemporaryCredentials +from lofar.common.util import find_free_port class PostgresTestDatabaseInstance(): ''' A helper class which instantiates a running postgres server (not interfering with any other test/production postgres servers) @@ -36,9 +37,10 @@ class PostgresTestDatabaseInstance(): Derive your own sub-class and implement apply_database_schema with your own sql schema to setup your type of database. ''' - def __init__(self, user: str = 'test_user') -> None: + def __init__(self, user: str = 'test_user', preferred_port: int=5444) -> None: self._postgresql = None self.tmp_creds = TemporaryCredentials(user=user) + self.tmp_creds.dbcreds.port = preferred_port def __enter__(self): '''create/instantiate the postgres server''' @@ -66,7 +68,9 @@ class PostgresTestDatabaseInstance(): '''instantiate the isolated postgres server''' logger.info('creating test-database instance...') - self._postgresql = testing.postgresql.PostgresqlFactory(cache_initialized_db=True)() + factory = testing.postgresql.PostgresqlFactory(cache_initialized_db=True) + factory.settings['port'] = find_free_port(self.tmp_creds.dbcreds.port) + self._postgresql = factory() # fill credentials with the dynamically created postgress instance (e.g. port changes for each time) dsn = self._postgresql.dsn() diff --git a/LCS/PyCommon/test/t_json_utils.py b/LCS/PyCommon/test/t_json_utils.py new file mode 100755 index 0000000000000000000000000000000000000000..831e7e610de4658c03266c764ae1acc702708956 --- /dev/null +++ b/LCS/PyCommon/test/t_json_utils.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +import logging +logger = logging.getLogger(__name__) +logging.basicConfig(format='%(asctime)s %(process)s %(threadName)s %(levelname)s %(message)s', level=logging.DEBUG) + +import unittest +from lofar.common.json_utils import get_default_json_object_for_schema + +class TestJSONUtils(unittest.TestCase): + def test_empty_schema_yields_empty_object(self): + schema = {} + json = get_default_json_object_for_schema(schema) + self.assertEqual({}, json) + + def test_schema_for_object_with_plain_properties(self): + schema = {"type":"object", + "default": {}, + "properties": { + "prop_a": { "type": "integer", "default": 42}, + "prop_b": {"type": "number", "default": 3.14} + } + } + + json = get_default_json_object_for_schema(schema) + self.assertEqual({"prop_a": 42, + "prop_b": 3.14}, json) + + def test_schema_for_object_with_nested_objects(self): + schema = {"type":"object", + "default": {}, + "properties": { + "sub_a": {"type": "object", + "default": {}, + "properties": { + "prop_a": {"type": "integer", "default": 42}, + "prop_b": {"type": "number", "default": 3.14} + } + }, + "prop_a": { "type": "integer", "default": 42}, + "prop_b": {"type": "number", "default": 3.14} + } + } + + json = get_default_json_object_for_schema(schema) + self.assertEqual({"sub_a": {"prop_a": 42, + "prop_b": 3.14}, + "prop_a": 42, + "prop_b": 3.14}, json) + +if __name__ == '__main__': + unittest.main() diff --git a/LCS/PyCommon/test/t_json_utils.run b/LCS/PyCommon/test/t_json_utils.run new file mode 100755 index 0000000000000000000000000000000000000000..4ef087e0334221bf9ec7a2cca2a4344047664805 --- /dev/null +++ b/LCS/PyCommon/test/t_json_utils.run @@ -0,0 +1,5 @@ +#!/bin/bash + +source python-coverage.sh +python_coverage_test "*json_utils*" t_json_utils.py + diff --git a/LCS/PyCommon/test/t_json_utils.sh b/LCS/PyCommon/test/t_json_utils.sh new file mode 100755 index 0000000000000000000000000000000000000000..3152fa0d6ed297257abb4959116c25107b5f15d1 --- /dev/null +++ b/LCS/PyCommon/test/t_json_utils.sh @@ -0,0 +1,2 @@ +#!/bin/sh +./runctest.sh t_json_utils diff --git a/LCS/PyCommon/util.py b/LCS/PyCommon/util.py index 20b44d9339dbcd51e5684b02b883349a71b983e8..fb7ad5090e01591c729c52f54451ecb583e28577 100644 --- a/LCS/PyCommon/util.py +++ b/LCS/PyCommon/util.py @@ -204,8 +204,10 @@ def find_free_port(preferred_port: int=0): s.bind(('', preferred_port)) except OSError as e: if e.errno==98: # OSError: [Errno 98] Address already in use - # OS will find a available port with bind option 0 - s.bind(('', 0)) + return find_free_port(preferred_port+1) + # + # # OS will find a available port with bind option 0 + # s.bind(('', 0)) else: raise diff --git a/LCS/pyparameterset/src/__init__.py b/LCS/pyparameterset/src/__init__.py index 0309b12e714afe059a475d0711d96e1ccb1580a4..3f55a4550ae93e414a20015ddadb343173d5d4e6 100755 --- a/LCS/pyparameterset/src/__init__.py +++ b/LCS/pyparameterset/src/__init__.py @@ -153,12 +153,6 @@ class parameterset(PyParameterSet): def adoptDict(self, parms): for (k,v) in list(parms.items()): - # str(container) calls __repr__ on its items, which ends - # badly for us for lists of unicode strings ([u"a"] -> ['ua']). - # We thus stringify the items first. - if isinstance(v, list): - v = [str(x) for x in v] - self.replace (str(k), str(v)) # k, v always type string @staticmethod diff --git a/LCU/StationTest/rspctlprobe.py b/LCU/StationTest/rspctlprobe.py index e3acfa3d77b9c90eaf89a9a664ff20ad2f67d845..1d254e6ad388452474771e25d72e34e82f82573f 100755 --- a/LCU/StationTest/rspctlprobe.py +++ b/LCU/StationTest/rspctlprobe.py @@ -113,7 +113,7 @@ def issue_rspctl_command(cmd): out, err = proc.communicate() if proc.returncode == 0: - out = list(filter(None, out.decode().split('\n')))[1:] + out = list(filter(None, out.decode().split('\n'))) return out, err else: raise Exception("Program failed with error: \n" + @@ -143,7 +143,7 @@ def parse_clock_output(out, err): :return: the int value of the clock in Mhz :rtype: int """ - match = re.search("\s*Sample frequency: clock=(\d{3})MHz\s*", out) + match = re.search("\s*Sample frequency: clock=(\d{3})MHz\s*", out[0]) if match: return int(match.group(1)) else: @@ -210,7 +210,7 @@ def parse_rcu_output(out, err): the delay and the attenuation :rtype: dict """ - rcu_values = [_f for _f in out.split('\n') if _f] # It filters empty strings + rcu_values = out[1:] rcu_by_id = {} # list of RCUs listed by ID for rcu_value in rcu_values: @@ -245,7 +245,7 @@ def query_rcu_mode(): return parse_rcu_output(out, err) # -------Subbands -def parse_subbands_output(rcu_values, err): +def parse_subbands_output(out, err): """ Parses the output of rspctl --subbands @@ -264,7 +264,7 @@ def parse_subbands_output(rcu_values, err): the delay and the attenuation :rtype: dict """ - + rcu_values = out[1:] rcu_by_id = {} i_row = 0 @@ -306,7 +306,7 @@ def query_sub_bands_mode(): return parse_subbands_output(out, err) # -------XCSub bands -def parse_xcsub_bands_output(rcu_values, err): +def parse_xcsub_bands_output(out, err): """ Parses the output of rspctl --xcsubbands @@ -342,7 +342,7 @@ def parse_xcsub_bands_output(rcu_values, err): :return: a dict indexed by the rcu board id containing the list of xcsub bands used :rtype: dict """ - + rcu_values= out[1:] rcu_by_id = {} i_row = 0 @@ -387,7 +387,7 @@ def query_xcsub_bands_mode(): return parse_xcsub_bands_output(out, err) # -------Spectral inversion -def parse_spinv_output(board_values, err): +def parse_spinv_output(out, err): """ Parses the output of rspctl --spinv @@ -426,7 +426,7 @@ def parse_spinv_output(board_values, err): the delay and the attenuation :rtype: dict """ - + board_values = out[1:] rcu_by_id = {} for board_value in board_values: temp = board_value.split(":") @@ -738,6 +738,10 @@ def setup_logging(): format = '%(asctime)s - %(name)s: %(message)s', datefmt = "%m/%d/%Y %I:%M:%S %p", level = logging.DEBUG) + + +__MODE_NOT_SET_DEFAULT = -2 + def init(): """ @@ -758,7 +762,7 @@ def setup_command_argument_parser(): parser.add_argument('--wait', type = int, default = [0], nargs = '+') parser.add_argument('--xcsubband', type = str, default = "") parser.add_argument('--loops', type = int, default = 1) - parser.add_argument('--mode', type = int, default = -2) + parser.add_argument('--mode', type = int, default = __MODE_NOT_SET_DEFAULT) return parser def parse_and_execute_command_arguments(): @@ -802,7 +806,9 @@ def parse_and_execute_command_arguments(): mode = program_arguments.mode, add_options = options, output_directory = program_arguments.directory) - set_mode(0) # SWITCH BACK TO MODE 0 AT THE END + if program_arguments.mode != __MODE_NOT_SET_DEFAULT: + # SWITCH BACK TO MODE 0 AT THE END IF MODE SWITCH WAS SET + set_mode(0) except Exception as e: logger.error('error executing rspctl : %s', e) logger.error('traceback \n%s', traceback.format_exc()) diff --git a/LTA/ltastorageoverview/test/test_lso_webservice.py b/LTA/ltastorageoverview/test/test_lso_webservice.py index bbd18d7c636d2fcd07fa77c1f25c6900efeab7c4..6b2a545f26b75ba7021ac3b1e0338b94bde881bd 100755 --- a/LTA/ltastorageoverview/test/test_lso_webservice.py +++ b/LTA/ltastorageoverview/test/test_lso_webservice.py @@ -28,7 +28,13 @@ from io import StringIO from lofar.lta.ltastorageoverview import store from lofar.lta.ltastorageoverview.testing.common_test_ltastoragedb import LTAStorageDbTestMixin from lofar.lta.ltastorageoverview.webservice import webservice as webservice -from flask_testing import LiveServerTestCase as FlaskLiveTestCase + +try: + from flask_testing import LiveServerTestCase as FlaskLiveTestCase +except ImportError: + # some platforms/configurations cannot import flask/werkzeug + # exit with special code for now... until we understand why. + exit(3) import logging from lofar.common.test_utils import unit_test, integration_test diff --git a/QA/QA_Service/bin/qa_webservice b/QA/QA_Service/bin/qa_webservice index 6b46dc901ba09fd3f8fa90882025a6f88c47dc4b..4aa9dade16d9470b125ae9241aca419fdd12886b 100755 --- a/QA/QA_Service/bin/qa_webservice +++ b/QA/QA_Service/bin/qa_webservice @@ -18,13 +18,12 @@ # with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. from lofar.common.cep4_utils import * -from lofar.common import isProductionEnvironment, isTestEnvironment - -from subprocess import call +from subprocess import call, CalledProcessError import socket import logging +import time import signal - +from lofar.common import isProductionEnvironment, isTestEnvironment logger = logging.getLogger(__name__) def kill_zombies(): @@ -32,7 +31,10 @@ def kill_zombies(): cmd = ['pkill', '-9', '-f "python /opt/adder/cluster/webservice.py"'] cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) logger.info('killing any lingering qa webservice service on cep4 head node: %s', ' '.join(cmd)) - call(cmd) + try: + call(cmd) + except CalledProcessError as e: + logger.warning(e) if __name__ == '__main__': @@ -55,10 +57,17 @@ if __name__ == '__main__': signal.signal(s, signal_handler) - cmd = ['python', '/opt/adder/cluster/webservice.py'] - cmd = wrap_command_for_docker(cmd, 'adder_clustering', 'latest', ['/data/qa', '/data/share', '/data/scratch', '/data/parsets']) - cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + while True: + try: + cmd = ['python', '/opt/adder/cluster/webservice.py'] + cmd = wrap_command_for_docker(cmd, 'adder_clustering', 'latest', ['/qa', '/data/share', '/data/scratch', '/data/parsets']) + cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + + logger.info('starting webservice on cep4 head node: %s', ' '.join(cmd)) - logger.info('starting webservice on cep4 head node: %s', ' '.join(cmd)) + exit(call(cmd)) + except CalledProcessError as e: + logger.warning(e) + logger.info("trying to start again in 60 seconds") + time.sleep(60) - exit(call(cmd)) diff --git a/SAS/QPIDInfrastructure/amqp-infra-setup.sh b/SAS/QPIDInfrastructure/amqp-infra-setup.sh index 499c7c1928b6395f662295f49a3d1940c168ed74..6bfd5f0477d6019e37d63e7d61494511d39bbfc2 100755 --- a/SAS/QPIDInfrastructure/amqp-infra-setup.sh +++ b/SAS/QPIDInfrastructure/amqp-infra-setup.sh @@ -29,7 +29,6 @@ if $PROD; then CCU=ccu001.control.lofar MCU=mcu001.control.lofar SCU=scu001.control.lofar - LEXAR=lexar003.offline.lofar MOM_SYSTEM=lcs023.control.lofar MOM_INGEST=lcs029.control.lofar @@ -37,7 +36,6 @@ else CCU=ccu199.control.lofar MCU=mcu199.control.lofar SCU=scu199.control.lofar - LEXAR=lexar004.offline.lofar MOM_SYSTEM=lcs028.control.lofar MOM_INGEST=lcs028.control.lofar @@ -55,8 +53,11 @@ qpid-config -b $CCU add queue mom.command --durable qpid-config -b $MOM_SYSTEM add queue mom.command --durable qpid-config -b $CCU add queue mom.importxml --durable qpid-config -b $MOM_SYSTEM add queue mom.importxml --durable +qpid-config -b $MOM_SYSTEM add queue ${PREFIX}mom.task.feedback.dataproducts --durable +qpid-config -b $MOM_SYSTEM add queue ${PREFIX}mom.task.feedback.processing --durable qpid-config -b $MOM_SYSTEM add queue mom-otdb-adapter.importxml --durable qpid-config -b $CCU add queue mom-otdb-adapter.importxml --durable +qpid-route -d route add $MOM_SYSTEM $CCU mom-otdb-adapter.importxml '#' qpid-config -b $MOM_SYSTEM add exchange topic ${PREFIX}lofar.mom.bus --durable qpid-config -b $MOM_INGEST add exchange topic ${PREFIX}lofar.mom.bus --durable qpid-config -b $MOM_SYSTEM add exchange topic ${PREFIX}lofar.mom.command --durable @@ -73,9 +74,6 @@ qpid-route -d route add $MOM_SYSTEM $SCU ${PREFIX}lofar.mom.command '#' qpid-route -d route del $SCU $MOM_SYSTEM ${PREFIX}lofar.mom.notification '#' qpid-route -d dynamic del $SCU $MOM_SYSTEM ${PREFIX}lofar.mom.notification qpid-route -d route add $SCU $MOM_SYSTEM ${PREFIX}lofar.mom.notification '#' -qpid-route -d route del $SCU $LEXAR ${PREFIX}lofar.lta.ingest.notification '#' -qpid-route -d dynamic del $SCU $LEXAR ${PREFIX}lofar.lta.ingest.notification -qpid-route -d route add $SCU $LEXAR ${PREFIX}lofar.lta.ingest.notification '#' qpid-route -d queue del $CCU $MCU '' '${PREFIX}lofar.task.specification.system' qpid-route -d queue add $CCU $MCU '' '${PREFIX}lofar.task.specification.system' qpid-route -d queue del $MOM_SYSTEM $CCU '' '${PREFIX}mom.task.specification.system' diff --git a/SAS/TMSS/CMakeLists.txt b/SAS/TMSS/CMakeLists.txt index a3675526ac19179e8416d82a5d116a3f98ca24d3..b41d67a1d9df1b8a505a793a8485e69c78e30df9 100644 --- a/SAS/TMSS/CMakeLists.txt +++ b/SAS/TMSS/CMakeLists.txt @@ -1,10 +1,10 @@ -lofar_package(TMSS 0.1 DEPENDS PyCommon) +lofar_package(TMSS 0.1 DEPENDS PyCommon pyparameterset) add_subdirectory(src) add_subdirectory(bin) add_subdirectory(test) add_subdirectory(frontend) - +lofar_add_docker_files(docker-compose-tmss.yml) diff --git a/SAS/TMSS/bin/CMakeLists.txt b/SAS/TMSS/bin/CMakeLists.txt index ccfbaf738589fdf79df74f52cdcaec6e49c30e5a..447e457176ebedda5204f02f318a9f3cf22fb8fd 100644 --- a/SAS/TMSS/bin/CMakeLists.txt +++ b/SAS/TMSS/bin/CMakeLists.txt @@ -2,3 +2,4 @@ lofar_add_bin_scripts(tmss) lofar_add_bin_scripts(tmss_test_database) lofar_add_bin_scripts(tmss_test_ldap) lofar_add_bin_scripts(tmss_test_environment) +lofar_add_bin_scripts(tmss_manage_django) diff --git a/SAS/TMSS/bin/tmss_manage_django b/SAS/TMSS/bin/tmss_manage_django new file mode 100755 index 0000000000000000000000000000000000000000..08d8fce5623e9ffab5c7485504b31df6c03cc61c --- /dev/null +++ b/SAS/TMSS/bin/tmss_manage_django @@ -0,0 +1,24 @@ +#!/usr/bin/python3 + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + + +from lofar.sas.tmss.manage import main + +if __name__ == "__main__": + main() diff --git a/SAS/TMSS/docker-compose-scu199.yml b/SAS/TMSS/docker-compose-scu199.yml new file mode 100644 index 0000000000000000000000000000000000000000..f6ed576c99a0da4d4ae87346861832e0619ecfa6 --- /dev/null +++ b/SAS/TMSS/docker-compose-scu199.yml @@ -0,0 +1,18 @@ +# This docker-compose is used to run TMSS together with a test open ID connect server on the test system (scu199) +version: "3" +services: + web: + image: nexus.cep4.control.lofar:18080/tmss_django:latest + restart: on-failure + env_file: + - ./.env + command: bash -c 'source /opt/lofar/lofarinit.sh && python3 lib64/python3.6/site-packages/lofar/sas/tmss/manage.py runserver 0.0.0.0:8008' + ports: + - "8008:8008" + testprovider: + image: nexus.cep4.control.lofar:18080/tmss_testprovider:latest + restart: unless-stopped + env_file: + - ./.env + ports: + - "8088:8088" \ No newline at end of file diff --git a/SAS/TMSS/docker-compose-tmss.yml b/SAS/TMSS/docker-compose-tmss.yml new file mode 100644 index 0000000000000000000000000000000000000000..51cf7f82a12570cacb097fb5e4d314f1d4b65a80 --- /dev/null +++ b/SAS/TMSS/docker-compose-tmss.yml @@ -0,0 +1,38 @@ +# This docker-compose is an initial setup for running a TMSS for developmen purposes. +# It does not work at the moment because of code dependecies that are higher on the tree. +version: "3" +services: + tmss_web: + build: + context: .. + dockerfile: docker/Dockerfile-tmss + restart: on-failure + env_file: + - ../.env + command: bash -c 'source /opt/lofar/lofarinit.sh && python3 lib/python3.6/site-packages/lofar/sas/tmss/manage.py runserver 0.0.0.0:8008' + volumes: + - ..:/opt/lofar + ports: + - "8008:8008" + depends_on: + - tmss_db + tmss_db: + image: "postgres:10.3-alpine" + restart: unless-stopped + env_file: + - ../.env + ports: + - "5435:5432" + volumes: + - ./postgres/data:/var/lib/postgresql/data + tmss_testprovider: + build: + context: ../ + dockerfile: docker/oidc/docker-test-mozilla-django-oidc/dockerfiles/oidc_testprovider + restart: unless-stopped + env_file: + - ../.env + ports: + - "8088:8088" + volumes: + - ../../../SAS/TMSS/test/oidc/docker-test-mozilla-django-oidc/testprovider:/code diff --git a/SAS/TMSS/docker-compose-ua.yml b/SAS/TMSS/docker-compose-ua.yml new file mode 100644 index 0000000000000000000000000000000000000000..92097328364f16615a1a435ad1d3a6b61a046f43 --- /dev/null +++ b/SAS/TMSS/docker-compose-ua.yml @@ -0,0 +1,18 @@ +# This docker-compose is used to run TMSS together with a test open ID connect server on the User Acceptance system (tmss-ua) +version: "3" +services: + web: + image: nexus.cep4.control.lofar:18080/tmss_django:latest + restart: on-failure + env_file: + - ./.env + command: bash -c 'source /opt/lofar/lofarinit.sh && python3 lib/python3.6/site-packages/lofar/sas/tmss/manage.py runserver 0.0.0.0:8008' + ports: + - "8008:8008" + testprovider: + image: nexus.cep4.control.lofar:18080/tmss_testprovider:latest + restart: unless-stopped + env_file: + - ./.env + ports: + - "8088:8088" diff --git a/SAS/TMSS/docker/tmss-testenv/docker-compose.yml b/SAS/TMSS/docker/tmss-testenv/docker-compose.yml index a0aff9ea3abb8d5d73918ad6015defefb1fe6c47..2aeb413ed581870c6ef1943e7425fc1e1df9bf61 100644 --- a/SAS/TMSS/docker/tmss-testenv/docker-compose.yml +++ b/SAS/TMSS/docker/tmss-testenv/docker-compose.yml @@ -1,4 +1,4 @@ -version: '3' +version: "2" services: tmss_test: build: @@ -8,6 +8,11 @@ services: ports: - "8008:8008" - "3003:3003" + networks: + - "tmss" + environment: + - OIDC_ENDPOINT_HOST=172.25.0.10 + - OIDC_RP_CLIENT_ID=1 tmss_test_nginx: build: context: . @@ -15,4 +20,23 @@ services: container_name: tmss_test_nginx ports: - "5005:5005" - + networks: + - "tmss" + tmss_test_oidc: + build: + context: ../../test/oidc/docker-test-mozilla-django-oidc/ + dockerfile: dockerfiles/oidc_testprovider + container_name: tmss_test_oidc + ports: + - "8088:8088" + networks: + tmss: + ipv4_address: 172.25.0.10 +networks: + tmss: + driver: bridge + ipam: + driver: default + config: + - subnet: 172.25.0.0/16 + gateway: 172.25.0.1 diff --git a/SAS/TMSS/docker/tmss-testenv/nginx.conf b/SAS/TMSS/docker/tmss-testenv/nginx.conf index 66d9a68ca353a00841ade2a9273ac7c607648007..2bf331144da5267e98697b7e033f5d9476d8c4cc 100644 --- a/SAS/TMSS/docker/tmss-testenv/nginx.conf +++ b/SAS/TMSS/docker/tmss-testenv/nginx.conf @@ -40,5 +40,9 @@ http { proxy_pass http://tmss_test:8008; } + location /openid/ { + proxy_pass http://tmss_test_oidc:8088; + } + } } \ No newline at end of file diff --git a/SAS/TMSS/docker/tmss-testenv/tmss_testenv_Dockerfile b/SAS/TMSS/docker/tmss-testenv/tmss_testenv_Dockerfile index 51319605fd0a2d14e5ff3e6e7a5849f23768b2a2..43fb3c6b42a3bc27f9f9777b0417320dd0caea8d 100644 --- a/SAS/TMSS/docker/tmss-testenv/tmss_testenv_Dockerfile +++ b/SAS/TMSS/docker/tmss-testenv/tmss_testenv_Dockerfile @@ -21,7 +21,7 @@ ENV PATH=$PATH:/usr/pgsql-9.4/bin/ RUN echo "Checking out code base" && \ git clone https://git.astron.nl/ro/lofar.git && \ cd lofar && \ - git checkout TMSS-146 && \ + git checkout master && \ . CMake/gen_LofarPackageList_cmake.sh && \ PACKAGE=TMSS && \ VARIANT=gnucxx11_opt && \ diff --git a/SAS/TMSS/src/CMakeLists.txt b/SAS/TMSS/src/CMakeLists.txt index ec457abffce2e5d12a3b73ed7fdd88e4868343cd..fd5a8389a74c27f43c3def1fadb5a87813d9212f 100644 --- a/SAS/TMSS/src/CMakeLists.txt +++ b/SAS/TMSS/src/CMakeLists.txt @@ -16,6 +16,7 @@ find_python_module(coreapi REQUIRED) # sudo apt-get install python3-c find_python_module(django_jsonforms REQUIRED) # pip3 install django-jsonforms find_python_module(django_json_widget REQUIRED) # pip3 install django-json-widget find_python_module(jsoneditor REQUIRED) # pip3 install django-jsoneditor +find_python_module(jsonschema REQUIRED) # pip3 install jsonschema # modules for swagger API export find_python_module(drf_yasg REQUIRED) # pip install drf-yasg @@ -32,4 +33,7 @@ python_install(${_py_files} lofar_add_bin_scripts(migrate_momdb_to_tmss.py) +lofar_add_docker_files(Dockerfile-tmss) + add_subdirectory(tmss) +add_subdirectory(templates) diff --git a/SAS/TMSS/src/Dockerfile-tmss b/SAS/TMSS/src/Dockerfile-tmss new file mode 100644 index 0000000000000000000000000000000000000000..8af44538aa0aaa703b74711bf979caaeef442e4e --- /dev/null +++ b/SAS/TMSS/src/Dockerfile-tmss @@ -0,0 +1,30 @@ +# Use an official Python runtime as a parent image +FROM python:3.6 + +RUN apt-get -y update && apt-get -y upgrade + +# LOFAR checkout and compile dependencies +RUN apt-get -y update && apt-get -y install make cmake g++ subversion python3 git + +# LOFAR build dependencies +RUN apt-get -y update && apt-get -y install liblog4cplus-dev python3-dev libldap2-dev libsasl2-dev +RUN apt-get -y update && apt-get -y install python3-pip && pip3 install django djangorestframework django-filter django-auth-ldap coreapi python-ldap-test django-jsonforms django-json-widget "git+git://github.com/nnseva/django-jsoneditor.git" psycopg2-binary markdown ldap3 drf-yasg flex swagger-spec-validator mozilla_django_oidc + +# Adding backend directory to make absolute filepaths consistent across services +WORKDIR /opt/lofar + +ENV LOFARROOT=/opt/lofar + +# Add the rest of the code +COPY ./installed /opt/lofar + +# Make port 8000 available for the app +EXPOSE 8000 + +RUN sed -i "s/lfr_root=.*/lfr_root=\/opt\/lofar/g" /opt/lofar/lofarinit.sh + +# Be sure to use 0.0.0.0 for the host within the Docker container, +# otherwise the browser won't be able to find it +# CMD python3 manage.py runserver 0.0.0.0:8000 +# CMD bash -c 'source /opt/lofar/lofarinit.sh && tmss' +CMD bash -c 'source /opt/lofar/lofarinit.sh && python3 lib/python3.6/site-packages/lofar/sas/tmss/manage.py runserver 0.0.0.0:8000' \ No newline at end of file diff --git a/SAS/TMSS/src/manage.py b/SAS/TMSS/src/manage.py index d21a556e0474f6ee732b3c2834ef35d2e1546cad..d828e31836196fec9ecad451f58233b07319c055 100755 --- a/SAS/TMSS/src/manage.py +++ b/SAS/TMSS/src/manage.py @@ -15,13 +15,13 @@ def subscribe_to_signals(): signal.signal(s, signal_handler) -if __name__ == "__main__": +def main(): # do subscribe to more signals than django does for proper exits during testing if os.environ.get('TMSS_RAISE_ON_SIGNALS', "False").lower() in ["true", "1", "on"]: subscribe_to_signals() # normal django startup. Specify the DJANGO_SETTINGS_MODULE, and run it. - os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tmss.settings") + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "lofar.sas.tmss.tmss.settings") try: from django.core.management import execute_from_command_line @@ -32,3 +32,6 @@ if __name__ == "__main__": "forget to activate a virtual environment?" ) from exc execute_from_command_line(sys.argv) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/SAS/TMSS/src/remakemigrations.py b/SAS/TMSS/src/remakemigrations.py index 9decd35f5d7e8d193060f2528c332df7bedd8a61..18fa92e29f2e09649bcdce1499673063dcd4d526 100755 --- a/SAS/TMSS/src/remakemigrations.py +++ b/SAS/TMSS/src/remakemigrations.py @@ -5,13 +5,14 @@ from glob import glob import subprocess as sp import logging + logger = logging.getLogger(__file__) here = os.path.dirname(__file__) relapath = '/tmss/tmssapp/migrations/' template = """ from django.db import migrations -from ..populate import * +from lofar.sas.tmss.tmss.tmssapp.populate import * class Migration(migrations.Migration): diff --git a/SAS/TMSS/src/templates/CMakeLists.txt b/SAS/TMSS/src/templates/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..f9bad3202d61419814c3857778c4d6900d74e556 --- /dev/null +++ b/SAS/TMSS/src/templates/CMakeLists.txt @@ -0,0 +1,7 @@ +set(_template_files + josdejong_jsoneditor_widget.html) + +python_install(${_template_files} + DESTINATION lofar/sas/tmss/templates) + +add_subdirectory(rest_framework) diff --git a/SAS/TMSS/src/templates/__react_jsonschema_form_widget__schema_uri.html b/SAS/TMSS/src/templates/__react_jsonschema_form_widget__schema_uri.html deleted file mode 100644 index 0a19594dadca5cc20a0a9be13c071518733152bb..0000000000000000000000000000000000000000 --- a/SAS/TMSS/src/templates/__react_jsonschema_form_widget__schema_uri.html +++ /dev/null @@ -1,87 +0,0 @@ - -<!-- EXTERNAL RESOURCES --> -<!--<link rel="stylesheet" id="theme" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css">--> -<script src="https://unpkg.com/react@16/umd/react.development.js"></script> -<script src="https://unpkg.com/react-dom@16/umd/react-dom.development.js"></script> -<script src="https://unpkg.com/babel-standalone@6.15.0/babel.min.js"></script> -<script src="https://unpkg.com/react-jsonschema-form/dist/react-jsonschema-form.js"></script> - - -<!-- LOGIC --> -<script type="text/babel"> - - const { Component } = React; - const { render } = ReactDOM; - const {default: Form} = JSONSchemaForm; - const log = (type) => console.log.bind(console, type); - - // Read current document provided by Django. Change 'null' default to {} - var formData = {{ field.value|safe }}; - if (formData === "null" || formData === null){ - var formData = {} - } - console.log("Initial data: " + JSON.stringify(formData)); - - // Updates the hidden input that Django will read the modified JSON from - function set_return_doc(doc){ - document.getElementById("helper_input").value = JSON.stringify(doc.formData); - // console.log(JSON.stringify(doc.formData)); - } - - // Default live validation looks pretty nice in the demo, does not show here for some reason. - // Custom validation function works as such: - function validate(formData, errors) { - // todo: check formData against schema by some external script or so - // if(valid) ( - errors.relative_starttime.addError("The horror!"); - // } - return errors; - } - - // This let's you change the appearance of the individual fields. - // Haven't found a global switch for fields of certain type yet. - const uiSchema = { - relative_starttime: { "ui:widget": "range" } - }; - - // Read schema uri that was specified by Serializer - const template_schema_uri = "{{field.style.template_schema_uri}}"; - console.log('Loading template from ' + template_schema_uri); - - // Get the schema, then render the form with it - $.get(template_schema_uri, function( template ) { - const schema = template['schema']; - console.log('Schema is: ' + JSON.stringify(schema)); - console.log('Rendering JSON form'); - render(( - <Form schema={schema} - uiSchema={uiSchema} - formData={formData} - liveValidate={true} - //validate={validate} // does not work?! - onChange={set_return_doc} - onSubmit={log("submitted")} - onError={log("errors")} /> - ), document.getElementById("app")); - }); - console.log('Rendering done'); - -</script> - - -<!-- WIDGET HTML --> -<div class="form-group {% if field.errors %}has-error{% endif %}"> - - <label class="col-sm-2 control-label"> - {% if field.label %}{{ field.label }}{% endif %} - </label> - - <input id="helper_input" name="{{ field.name }}" type="hidden" class="form-control" {% if field.value != None %} value="{{ field.value|safe }}"{% endif %}"> - - <div class="col-sm-10"> - <div id="app"></div> - </div> - - <!--<pre> {% debug %} </pre>--> - -</div> \ No newline at end of file diff --git a/SAS/TMSS/src/templates/json_editor.html b/SAS/TMSS/src/templates/json_editor.html deleted file mode 100644 index 1ee2e451068a5ee14f72a59cac2e9230cf31da8a..0000000000000000000000000000000000000000 --- a/SAS/TMSS/src/templates/json_editor.html +++ /dev/null @@ -1,31 +0,0 @@ -<!DOCTYPE html> -<html> -<head> - <meta charset="utf-8"/> - <script src="https://cdn.jsdelivr.net/npm/@json-editor/json-editor/dist/jsoneditor.min.js"></script> -</head> -<body> - -<div id='app'></div> - -<script> - var element = document.getElementById('app'); - var schema = { - "type": "array", - "title": "Numbers", - "items": { - "type": "number", - "format": "number", - "title": "Number" - } - }; - var editor = new JSONEditor(element, { - schema: schema - }); - document.querySelector('.get-value').addEventListener('click', function () { - debug.value = JSON.stringify(editor.getValue()); - }); -</script> - -</body> -</html> diff --git a/SAS/TMSS/src/templates/json_editor_widget.html b/SAS/TMSS/src/templates/json_editor_widget.html deleted file mode 100644 index a8aeb6196e5d3d041a05fd9049f94a48f510d929..0000000000000000000000000000000000000000 --- a/SAS/TMSS/src/templates/json_editor_widget.html +++ /dev/null @@ -1,65 +0,0 @@ - -<!-- EXTERNAL RESOURCES --> -<!--<link rel="stylesheet" id="theme" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css">--> -<!--<script src="https://unpkg.com/react@16/umd/react.development.js"></script>--> -<!--<script src="https://unpkg.com/react-dom@16/umd/react-dom.development.js"></script>--> -<!--<script src="https://unpkg.com/babel-standalone@6.15.0/babel.min.js"></script>--> -<script src="https://cdn.jsdelivr.net/npm/@json-editor/json-editor/dist/jsoneditor.min.js"></script> - -<!-- WIDGET HTML --> -<div class="form-group {% if field.errors %}has-error{% endif %}"> - - <label class="col-sm-2 control-label"> - {% if field.label %}{{ field.label }}{% endif %} - </label> - - <input id="helper_input" name="{{ field.name }}" type="hidden" class="form-control" {% if field.value != None %} value="{{ field.value|safe }}"{% endif %}"> - - <div class="col-sm-10"> - <div id="app"></div> - </div> - - <!--<pre> {% debug %} </pre>--> - -</div> - - - -<!-- LOGIC --> -<script type="text/javascript"> - - // Read current document provided by Django. Change 'null' default to {} - var formData = {{ field.value|safe }}; - if (formData === "null" || formData === null){ - var formData = {} - } - console.log("Initial data: " + JSON.stringify(formData)); - - // Read schema (added to widget style by Serializer) - var schema = {{ field.style.schema|safe }}; - console.log('Schema is: ' + JSON.stringify(schema)); - - // render the widget - console.log('Rendering JSON form'); - - JSONEditor.defaults.options.theme = 'bootstrap3'; - JSONEditor.defaults.options.iconlib = "bootstrap3"; - - var element = document.getElementById('app'); - var editor = new JSONEditor(element,{ - schema: schema - }); - - editor.setValue(formData); - console.log('Rendering done'); - - // Updates the hidden input that Django will read the modified JSON from - editor.on('change',function() { - var value = editor.getValue(); - document.getElementById("helper_input").value = JSON.stringify(value); - //console.log(JSON.stringify(value)); - }); - - console.log(value) - -</script> diff --git a/SAS/TMSS/src/templates/react_jsonschema_form.html b/SAS/TMSS/src/templates/react_jsonschema_form.html deleted file mode 100644 index f80d7b18fe3b25f071a396d62a1b88a42e5f0882..0000000000000000000000000000000000000000 --- a/SAS/TMSS/src/templates/react_jsonschema_form.html +++ /dev/null @@ -1,606 +0,0 @@ -<html> -<head> - <title>TMSS JSON EDITOR TEST</title> - <meta charset="utf-8"> - <link rel="stylesheet" id="theme" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css"> - <script src="https://unpkg.com/react@16/umd/react.development.js"></script> - <script src="https://unpkg.com/react-dom@16/umd/react-dom.development.js"></script> - <script src="https://unpkg.com/babel-standalone@6.15.0/babel.min.js"></script> - <script src="https://unpkg.com/react-jsonschema-form/dist/react-jsonschema-form.js"></script> -</head> -<body> -<div id="app"></div> -<script type="text/javascript"> -const schema = { - "$id": "http://example.com/example.json", - "type": "object", - "$schema": "http://json-schema.org/draft-07/schema#", - "definitions": { - "pointing": { - "type": "object", - "additionalProperties": false, - "format": "grid", - "properties": { - "direction_type": { - "type": "string", - "title": "Reference frame", - "description": "", - "default": "J2000", - "enum": [ - "J2000", - "SUN", - "MOON", - "MERCURY", - "VENUS", - "MARS", - "JUPITER", - "SATURN", - "URANUS", - "NEPTUNE", - "PLUTO" - ] - }, - "angle1": { - "type": "number", - "title": "Angle 1", - "description": "First angle (f.e. RA)", - "default": 0 - }, - "angle2": { - "type": "number", - "title": "Angle 2", - "description": "Second angle (f.e. DEC)", - "default": 0 - }, - "distance": { - "type": "number", - "title": "Distance", - "description": "", - "default": 0, - "minimum": 0, - "required": false - } - } - }, - "beamformer_settings": { - "oneOf": [ - { - "type": "object", - "title": "Disabled", - "additionalProperties": false, - "format": "grid", - "properties": {} - }, - { - "type": "object", - "title": "Enabled", - "additionalProperties": false, - "format": "grid", - "properties": { - "enabled": { - "type": "boolean", - "format": "checkbox", - "title": "Enabled", - "description": "", - "default": true, - "options": { - "hidden": true - }, - "enum": [ - true - ] - }, - "storage_cluster": { - "type": "string", - "title": "Storage cluster", - "description": "Which cluster to write the data to", - "default": "CEP4", - "enum": [ - "CEP4", - "DragNet" - ] - }, - "channels_per_subband": { - "type": "integer", - "title": "Channels/subband", - "description": "Number of frequency bands per subband", - "default": 1, - "enum": [ - 1, - 16, - 32, - 64, - 128 - ] - }, - "stokes": { - "type": "string", - "title": "Stokes parameters", - "description": "Which stokes to produce", - "default": "I", - "enum": [ - "I", - "IQUV", - "XXYY" - ] - }, - "integration_factor": { - "type": "integer", - "title": "Integration factor (samples)", - "description": "Number of samples to integrate", - "default": 1, - "minimum": 1 - }, - "subbands_per_file": { - "type": "integer", - "title": "Subbands/file", - "description": "Fewer creates more but smaller files", - "default": 512, - "minimum": 1, - "maximum": 512 - } - } - } - ] - } - }, - "format": "grid", - "additionalProperties": false, - "properties": { - "relative_starttime": { - "type": "number", - "title": "Relative starttime (seconds)", - "description": "Starttime with respect to other observations in this scheduling unit", - "default": 0, - "minimum": 0 - }, - "duration": { - "type": "number", - "title": "Duration (seconds)", - "description": "Duration of this observation", - "default": 60, - "minimum": 0 - }, - "station_mode": { - "type": "object", - "title": "Station mode", - "format": "grid", - "additionalProperties": false, - "properties": { - "bitmode": { - "type": "integer", - "title": "Bits/sample", - "description": "Sample sensitivity. Fewer bits allow more subbands.", - "default": 8, - "enum": [ - 4, - 8, - 16 - ] - }, - "clockMHz": { - "type": "integer", - "title": "Clock (MHz)", - "description": "Sample clock", - "default": 200, - "enum": [ - 160, - 200 - ] - }, - "antenna_type": { - "type": "string", - "title": "Antenna type", - "description": "Determines frequency range and offset of subbands", - "default": "HBA", - "enum": [ - "LBA", - "HBA" - ] - }, - "filter": { - "type": "string", - "title": "Band-pass filter", - "description": "Must match antenna type and clock", - "default": "HBA_110_190", - "enum": [ - "LBA_10_70", - "LBA_30_70", - "LBA_10_90", - "LBA_30_90", - "HBA_110_190", - "HBA_170_230", - "HBA_210_250" - ] - } - } - }, - "stations": { - "type": "object", - "title": "Station list", - "additionalProperties": false, - "properties": { - "station_list": { - "title": "Station list", - "oneOf": [ - { - "type": "array", - "title": "Fixed list", - "format": "checkbox", - "additionalItems": false, - "additionalProperties": false, - "items": { - "type": "string", - "enum": [ - "CS001", - "CS002", - "CS003", - "CS004", - "CS005", - "CS006", - "CS007", - "CS011", - "CS013", - "CS017", - "CS021", - "CS024", - "CS026", - "CS028", - "CS030", - "CS031", - "CS032", - "CS101", - "CS103", - "CS201", - "CS301", - "CS302", - "CS401", - "CS501", - "RS104", - "RS106", - "RS205", - "RS208", - "RS210", - "RS305", - "RS306", - "RS307", - "RS310", - "RS406", - "RS407", - "RS409", - "RS410", - "RS503", - "RS508", - "RS509", - "DE601", - "DE602", - "DE603", - "DE604", - "DE605", - "FR606", - "SE607", - "UK608", - "DE609", - "PL610", - "PL611", - "PL612", - "IE613" - ], - "title": "Station", - "description": "" - }, - "minItems": 1, - "uniqueItems": true - }, - { - "title": "Dynamic list", - "type": "array", - "format": "tabs", - "additionalItems": false, - "items": { - "type": "object", - "format": "grid", - "title": "Station set", - "headerTemplate": "{{ self.group }}", - "additionalProperties": false, - "properties": { - "group": { - "type": "string", - "title": "Group/station", - "description": "Which (group of) station(s) to search in", - "default": "LOFAR" - }, - "min_stations": { - "type": "integer", - "title": "Minimum nr of stations", - "description": "Number of stations to use within group/station", - "default": 1, - "minimum": 0 - } - } - } - } - ] - }, - "antenna_set": { - "type": "string", - "title": "Antenna set", - "description": "Must match antenna type", - "default": "HBA_DUAL", - "enum": [ - "HBA_DUAL", - "HBA_DUAL_INNER", - "HBA_JOINED", - "HBA_JOINED_INNER", - "HBA_ONE", - "HBA_ONE_INNER", - "HBA_ZERO", - "HBA_ZERO_INNER", - "LBA_INNER", - "LBA_OUTER", - "LBA_SPARSE_EVEN", - "LBA_SPARSE_ODD" - ] - }, - "analog_pointing": { - "title": "Analog pointing", - "description": "HBA only", - "$ref": "#/definitions/pointing", - "required": [] - } - } - }, - "beams": { - "type": "array", - "title": "Beams", - "format": "tabs", - "additionalItems": false, - "items": { - "title": "Beam", - "headerTemplate": "{{ i0 }} - {{ self.name }}", - "type": "object", - "additionalProperties": false, - "properties": { - "name": { - "type": "string", - "title": "Name/target", - "description": "Identifier for this beam", - "default": "" - }, - "digital_pointing": { - "title": "Digital pointing", - "$ref": "#/definitions/pointing" - }, - "subbands": { - "type": "array", - "title": "Subband list", - "format": "table", - "additionalItems": false, - "items": { - "type": "integer", - "title": "Subband", - "minimum": 0 - } - }, - "correlator": { - "type": "object", - "title": "Correlator", - "additionalProperties": false, - "required": [], - "properties": { - "pointing": { - "$ref": "#/definitions/pointing" - } - } - }, - "coherent_beamformer": { - "type": "object", - "title": "Coherent Beamformer", - "additionalProperties": false, - "properties": { - "pointings": { - "type": "array", - "format": "tabs", - "title": "Pointings", - "additionalItems": false, - "items": { - "$ref": "#/definitions/pointing", - "title": "Pointing", - "headerTemplate": "{{ i0 }} - ({{ self.angle1 }}, {{ self.angle2 }})" - } - }, - "rings": { - "type": "object", - "title": "TAB ring configuration", - "format": "grid", - "additionalProperties": false, - "properties": { - "number": { - "type": "integer", - "title": "Number of rings", - "description": "", - "default": 0, - "minimum": 0, - "maximum": 10 - }, - "size": { - "type": "number", - "title": "Distance between rings (degrees)", - "description": "RA/DEC distance", - "default": 1, - "minimum": 0 - } - } - } - } - } - } - } - }, - "COBALT": { - "type": "object", - "title": "COBALT correlator/beamformer", - "additionalProperties": false, - "properties": { - "blocksize": { - "type": "integer", - "title": "Block size (samples)", - "description": "Size of blocks COBALT works on, must be a multiple of all processing requirements", - "default": 196608, - "minimum": 97656, - "maximum": 292968, - "required": [] - }, - "delay_compensation": { - "type": "boolean", - "format": "checkbox", - "title": "Apply delay compensation", - "description": "Compensate for geometric and clock differences", - "default": false, - "required": [] - }, - "bandpass_correction": { - "type": "boolean", - "format": "checkbox", - "title": "Apply band-pass correction", - "description": "Compensate for differences in station sensitivity within a subband", - "default": false, - "required": [] - }, - "correlator": { - "title": "Correlator", - "oneOf": [ - { - "type": "object", - "title": "Enabled", - "format": "grid", - "additionalProperties": false, - "properties": { - "enabled": { - "type": "boolean", - "format": "checkbox", - "title": "Enabled", - "description": "", - "default": true, - "options": { - "hidden": true - }, - "enum": [ - true - ] - }, - "storage_cluster": { - "type": "string", - "title": "Storage cluster", - "description": "Which cluster to write the data to", - "default": "CEP4", - "enum": [ - "CEP4", - "DragNet" - ] - }, - "channels_per_subband": { - "type": "integer", - "title": "Channels/subband", - "description": "Number of frequency bands per subband", - "default": 64, - "minimum": 1, - "enum": [ - 1, - 8, - 16, - 32, - 64, - 128, - 256, - 512, - 1024 - ] - }, - "blocks_per_integration": { - "type": "integer", - "title": "Blocks per integration", - "description": "Number of blocks to integrate", - "default": 0, - "minimum": 0, - "required": [] - }, - "integrations_per_block": { - "type": "integer", - "title": "Integrations per block", - "description": "Number of integrations to fit within each block", - "default": 0, - "minimum": 0, - "required": [] - }, - "integration_time": { - "type": "number", - "title": "Integration time (seconds)", - "description": "Desired integration period", - "default": 1, - "minimum": 0.1 - } - } - }, - { - "type": "object", - "title": "Disabled", - "additionalProperties": false, - "format": "grid", - "properties": {} - } - ] - }, - "beamformer": { - "type": "object", - "title": "Beamformer", - "additionalProperties": false, - "properties": { - "station_subset": { - "type": "string", - "title": "Station subset", - "description": "Subset filter of stations to use for beamforming", - "default": "", - "required": [] - }, - "coherent": { - "title": "Coherent beamformer", - "$ref": "#/definitions/beamformer_settings" - }, - "incoherent": { - "title": "Incoherent beamformer", - "$ref": "#/definitions/beamformer_settings" - }, - "flys_eye": { - "title": "Fly's Eye", - "$ref": "#/definitions/beamformer_settings" - } - } - } - } - } - } -}; -</script> -<script crossorigin type="text/babel"> - const { Component } = React; -const { render } = ReactDOM; -const {default: Form} = JSONSchemaForm; - -const log = (type) => console.log.bind(console, type); - -const onSubmit = ({formData}) => console.log("Data submitted: " + JSON.stringify(formData)); - -const formData = {"relative_starttime":"bla","duration":120,"station_mode":{"bitmode":4, "clockMHz":200,"antenna_type":"HBA","filter":"HBA_110_190"},"stations":{"antenna_set":"HBA_DUAL","analog_pointing":{"direction_type":"MOON","angle1":0,"angle2":0,"distance":0}},"COBALT":{"blocksize":196608,"delay_compensation":false,"bandpass_correction":false,"beamformer":{"station_subset":""}}}; - -render(( - <Form schema={schema} - formData={formData} - onChange={log("changed")} - onSubmit={onSubmit} - onError={log("errors")} /> -), document.getElementById("app")); -</script> -</body> -</html> - diff --git a/SAS/TMSS/src/templates/react_jsonschema_form_widget.html b/SAS/TMSS/src/templates/react_jsonschema_form_widget.html deleted file mode 100644 index c249ad6c4e9b0f38f0f71e5ae32c54757698fe12..0000000000000000000000000000000000000000 --- a/SAS/TMSS/src/templates/react_jsonschema_form_widget.html +++ /dev/null @@ -1,83 +0,0 @@ - -<!-- EXTERNAL RESOURCES --> -<!--<link rel="stylesheet" id="theme" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css">--> -<script src="https://unpkg.com/react@16/umd/react.development.js"></script> -<script src="https://unpkg.com/react-dom@16/umd/react-dom.development.js"></script> -<script src="https://unpkg.com/babel-standalone@6.15.0/babel.min.js"></script> -<script src="https://unpkg.com/react-jsonschema-form/dist/react-jsonschema-form.js"></script> - - -<!-- LOGIC --> -<script type="text/babel"> - - const { Component } = React; - const { render } = ReactDOM; - const {default: Form} = JSONSchemaForm; - const log = (type) => console.log.bind(console, type); - - // Read current document provided by Django. Change 'null' default to {} - var formData = {{ field.value|safe }}; - if (formData === "null" || formData === null){ - var formData = {} - } - console.log("Initial data: " + JSON.stringify(formData)); - - // Updates the hidden input that Django will read the modified JSON from - function set_return_doc(doc){ - document.getElementById("helper_input").value = JSON.stringify(doc.formData); - // console.log(JSON.stringify(doc.formData)); - } - - // Default live validation looks pretty nice in the demo, does not show here for some reason. - // Custom validation function works as such: - function validate(formData, errors) { - // todo: check formData against schema by some external script or so - // if(valid) ( - errors.relative_starttime.addError("The horror!"); - // } - return errors; - } - - // This let's you change the appearance of the individual fields. - // Haven't found a global switch for fields of certain type yet. - const uiSchema = { - relative_starttime: { "ui:widget": "range" } - }; - - // Read schema (added to widget style by Serializer) - const schema = {{ field.style.schema|safe }}; - console.log('Schema is: ' + JSON.stringify(schema)); - - //Render the form - console.log('Rendering JSON form'); - render(( - <Form schema={schema} - uiSchema={uiSchema} - formData={formData} - liveValidate={true} - //validate={validate} // does not work?! - onChange={set_return_doc} - onSubmit={log("submitted")} - onError={log("errors")} /> - ), document.getElementById("app")); - console.log('Rendering done'); - -</script> - - -<!-- WIDGET HTML --> -<div class="form-group {% if field.errors %}has-error{% endif %}"> - - <label class="col-sm-2 control-label"> - {% if field.label %}{{ field.label }}{% endif %} - </label> - - <input id="helper_input" name="{{ field.name }}" type="hidden" class="form-control" {% if field.value != None %} value="{{ field.value|safe }}"{% endif %}"> - - <div class="col-sm-10"> - <div id="app"></div> - </div> - - <!--<pre> {% debug %} </pre>--> - -</div> \ No newline at end of file diff --git a/SAS/TMSS/src/templates/rest_framework/CMakeLists.txt b/SAS/TMSS/src/templates/rest_framework/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..9b8a613d8ce88fa1ed844c18143f74b4656e9b9d --- /dev/null +++ b/SAS/TMSS/src/templates/rest_framework/CMakeLists.txt @@ -0,0 +1,5 @@ +set(_template_files + api.html) + +python_install(${_template_files} + DESTINATION lofar/sas/tmss/templates/rest_framework) diff --git a/SAS/TMSS/src/templates/rest_framework/api.html b/SAS/TMSS/src/templates/rest_framework/api.html index 36a74512e955026bed46b1785651d0632f2df408..b48c8a16e7d750ef39dd6d2f7c282689150a8a1f 100644 --- a/SAS/TMSS/src/templates/rest_framework/api.html +++ b/SAS/TMSS/src/templates/rest_framework/api.html @@ -9,10 +9,11 @@ {% block userlinks %} {% if request.user.is_authenticated %} - <p>Current user: {{ request.user.email }}</p> + <p>Current user: {{ request.user.email }}</p> + <form action="{% url 'oidc_logout' %}" method="post">{% csrf_token %} + <input type="submit" value="logout"> + </form> {% else %} <a href="{% url 'oidc_authentication_init' %}">Login</a> {% endif %} {% endblock %} - - diff --git a/SAS/TMSS/src/tmss/CMakeLists.txt b/SAS/TMSS/src/tmss/CMakeLists.txt index f0786d66c89c5f3980f4034cb64fa408084c00e1..a38c2b149ed20a69a4ae3376365d869db9c1990e 100644 --- a/SAS/TMSS/src/tmss/CMakeLists.txt +++ b/SAS/TMSS/src/tmss/CMakeLists.txt @@ -6,6 +6,7 @@ set(_py_files settings.py urls.py wsgi.py + exceptions.py ) python_install(${_py_files} diff --git a/SAS/TMSS/src/tmss/exceptions.py b/SAS/TMSS/src/tmss/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..7d100f0daa3413dd501f51e2c66c46032022e81c --- /dev/null +++ b/SAS/TMSS/src/tmss/exceptions.py @@ -0,0 +1,9 @@ + +class TMSSException(Exception): + pass + +class SpecificationException(TMSSException): + pass + +class ConversionException(TMSSException): + pass diff --git a/SAS/TMSS/src/tmss/settings.py b/SAS/TMSS/src/tmss/settings.py index af27ed44e3b73401302dd6ccb13e26d7d4f2797a..1cad3faa07a41a9fdfe8cf0018aacef2be6e86fc 100644 --- a/SAS/TMSS/src/tmss/settings.py +++ b/SAS/TMSS/src/tmss/settings.py @@ -68,17 +68,15 @@ LOGGING = { } # Build paths inside the project like this: os.path.join(BASE_DIR, ...) -BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +BASE_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) # SECURITY WARNING: keep the secret key used in production secret! -SECRET_KEY = '-&$!kx$_0)u1x#zk9w^^81hfssaover2(8wdq_8n8n3u(8=-9n' # todo: set something new here for production !!! - +SECRET_KEY = os.getenv('SECRET_KEY', '-&$!kx$_0)u1x#zk9w^^81hfssaover2(8wdq_8n8n3u(8=-9n') # todo: set something new here for production !!! # SECURITY WARNING: don't run with debug turned on in production! -DEBUG = True - -ALLOWED_HOSTS = [] +DEBUG = bool(int(os.getenv('DEBUG', True))) +ALLOWED_HOSTS = os.getenv('ALLOWED_HOSTS').split(',') if os.getenv('ALLOWED_HOSTS') else [] # Application definition @@ -147,23 +145,36 @@ WSGI_APPLICATION = 'lofar.sas.tmss.tmss.wsgi.application' # port = 5432 # user = <your_tmss_user_name> # password = <your_tmss_password> - -creds_name = os.environ.get('TMSS_DBCREDENTIALS', 'tmss') -django_db_credentials = dbcredentials.DBCredentials().get(creds_name) -logger.debug("TMSS Django settings: Using dbcreds '%s' for django database: %s", - creds_name, django_db_credentials.stringWithHiddenPassword()) - DATABASES = { 'default': { # Postgres: 'ENGINE': 'django.db.backends.postgresql_psycopg2', - 'NAME': django_db_credentials.database, - 'USER': django_db_credentials.user, - 'PASSWORD': django_db_credentials.password, - 'HOST': django_db_credentials.host, - 'PORT': django_db_credentials.port, - } + 'NAME': '', + 'USER': '', + 'PASSWORD': '', + 'HOST': '', + 'PORT': '', } +} + +if "POSTGRES_HOST" in os.environ.keys(): + DATABASES['default']['HOST'] = os.getenv('POSTGRES_HOST') + DATABASES['default']['PORT'] = os.getenv('POSTGRES_PORT') + DATABASES['default']['NAME'] = os.getenv('POSTGRES_DB') + DATABASES['default']['USER'] = os.getenv('POSTGRES_USER') + DATABASES['default']['PASSWORD'] = os.getenv('POSTGRES_PASSWORD') +else: + from lofar.common import dbcredentials, isDevelopmentEnvironment + + creds_name = os.environ.get('TMSS_DBCREDENTIALS', 'tmss') + django_db_credentials = dbcredentials.DBCredentials().get(creds_name) + logger.debug("TMSS Django settings: Using dbcreds '%s' for django database: %s", + creds_name, django_db_credentials.stringWithHiddenPassword()) + DATABASES['default']['HOST'] = django_db_credentials.host + DATABASES['default']['PORT'] = django_db_credentials.port + DATABASES['default']['NAME'] = django_db_credentials.database + DATABASES['default']['USER'] = django_db_credentials.user + DATABASES['default']['PASSWORD'] = django_db_credentials.password REST_FRAMEWORK = { # Authentication, see also if-tree below for Open-ID vs LDAP configuration @@ -186,29 +197,23 @@ if "OIDC_RP_CLIENT_ID" in os.environ.keys(): INSTALLED_APPS.append('mozilla_django_oidc') # Load after auth REST_FRAMEWORK['DEFAULT_AUTHENTICATION_CLASSES'].append('mozilla_django_oidc.contrib.drf.OIDCAuthentication') + REST_FRAMEWORK['DEFAULT_AUTHENTICATION_CLASSES'].append('rest_framework.authentication.SessionAuthentication') REST_FRAMEWORK['DEFAULT_PERMISSION_CLASSES'].append('rest_framework.permissions.IsAuthenticated') # OPEN-ID CONNECT - OIDC_DRF_AUTH_BACKEND = 'mozilla_django_oidc.auth.OIDCAuthenticationBackend' - # For talking to PYOP Identity Provider: - #OIDC_VERIFY_SSL = False # TODO: Remove for production! - #OIDC_RP_CLIENT_ID = os.environ.get('OIDC_RP_CLIENT_ID', 'a0PF52uaIQxu') # Secret, do not put real credentials on Git - #OIDC_RP_CLIENT_SECRET = os.environ.get('OIDC_RP_CLIENT_SECRET', '93863d5aad044c0baaf1f0021b2542db') # Secret, do not put real credentials on Git - #OIDC_OP_AUTHORIZATION_ENDPOINT = "https://localhost:9090/authentication" - #OIDC_OP_TOKEN_ENDPOINT = "https://localhost:9090/token" - #OIDC_OP_USER_ENDPOINT = "https://localhost:9090/userinfo" - - # For talking to Mozilla Identity Provider: - OIDC_RP_CLIENT_ID = os.environ.get('OIDC_RP_CLIENT_ID', '1') # Secret, do not put real credentials on Git - OIDC_RP_CLIENT_SECRET = os.environ.get('OIDC_RP_CLIENT_SECRET', 'secret') # Secret, do not put real credentials on Git - OIDC_OP_AUTHORIZATION_ENDPOINT="http://localhost:8088/openid/authorize" - OIDC_OP_TOKEN_ENDPOINT="http://localhost:8088/openid/token" - OIDC_OP_USER_ENDPOINT="http://localhost:8088/openid/userinfo" - - AUTHENTICATION_BACKENDS = ('mozilla_django_oidc.auth.OIDCAuthenticationBackend',) + OIDC_RP_SCOPES = "openid email profile" # todo: groups are not a standard scope, how to handle those? + OIDC_RP_CLIENT_ID = os.environ.get('OIDC_RP_CLIENT_ID', '2') # Secret, do not put real credentials on Git + OIDC_RP_CLIENT_SECRET = os.environ.get('OIDC_RP_CLIENT_SECRET', + 'secret') # Secret, do not put real credentials on Git + OIDC_OP_AUTHORIZATION_ENDPOINT = os.environ.get('OIDC_OP_AUTHORIZATION_ENDPOINT', "http://localhost:8088/openid/authorize") + OIDC_OP_TOKEN_ENDPOINT = os.environ.get('OIDC_OP_TOKEN_ENDPOINT', "http://localhost:8088/openid/token") + OIDC_OP_USER_ENDPOINT = os.environ.get('OIDC_OP_USER_ENDPOINT', "http:/localhost:8088/openid/userinfo") + + AUTHENTICATION_BACKENDS = ('mozilla_django_oidc.auth.OIDCAuthenticationBackend', + 'django.contrib.auth.backends.ModelBackend') elif "TMSS_LDAPCREDENTIALS" in os.environ.keys(): # plain LDAP import ldap @@ -235,7 +240,8 @@ elif "TMSS_LDAPCREDENTIALS" in os.environ.keys(): "email": "mail" } - AUTHENTICATION_BACKENDS = ('django_auth_ldap.backend.LDAPBackend',) + AUTHENTICATION_BACKENDS = ('django_auth_ldap.backend.LDAPBackend', + 'django.contrib.auth.backends.ModelBackend') else: REST_FRAMEWORK['DEFAULT_PERMISSION_CLASSES'].append('rest_framework.permissions.AllowAny') logger.warning("No authentication configured! please set either OIDC_RP_CLIENT_ID or TMSS_LDAPCREDENTIALS environment variable.") @@ -287,4 +293,4 @@ STATIC_URL = '/static/' # Setup support for proxy headers USE_X_FORWARDED_HOST = True -SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') \ No newline at end of file +SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') diff --git a/SAS/TMSS/src/tmss/tmssapp/CMakeLists.txt b/SAS/TMSS/src/tmss/tmssapp/CMakeLists.txt index 2d42ec08412cd4930baedd8108e25b5788506101..aee876a483d8ce2b972e2e008fe883c6269d0a7d 100644 --- a/SAS/TMSS/src/tmss/tmssapp/CMakeLists.txt +++ b/SAS/TMSS/src/tmss/tmssapp/CMakeLists.txt @@ -5,6 +5,7 @@ set(_py_files __init__.py admin.py apps.py + views.py populate.py ) @@ -16,3 +17,4 @@ add_subdirectory(migrations) add_subdirectory(models) add_subdirectory(serializers) add_subdirectory(viewsets) +add_subdirectory(adapters) diff --git a/SAS/TMSS/src/tmss/tmssapp/adapters/CMakeLists.txt b/SAS/TMSS/src/tmss/tmssapp/adapters/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..509baa35b2cf9733c312448e087a0578bd72de7c --- /dev/null +++ b/SAS/TMSS/src/tmss/tmssapp/adapters/CMakeLists.txt @@ -0,0 +1,9 @@ + +include(PythonInstall) + +set(_py_files + parset.py + ) + +python_install(${_py_files} + DESTINATION lofar/sas/tmss/tmss/tmssapp/adapters) \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/tmssapp/adapters/parset.py b/SAS/TMSS/src/tmss/tmssapp/adapters/parset.py new file mode 100644 index 0000000000000000000000000000000000000000..e906cb22c94b2199922af61516f30603d55e69fd --- /dev/null +++ b/SAS/TMSS/src/tmss/tmssapp/adapters/parset.py @@ -0,0 +1,110 @@ +#!/usr/bin/python3 + +# Copyright (C) 2020 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +from lofar.sas.tmss.tmss.tmssapp import models +from lofar.parameterset import parameterset +from lofar.common.datetimeutils import formatDatetime +from lofar.common.json_utils import add_defaults_to_json_object_for_schema +from lofar.sas.tmss.tmss.exceptions import * + +def _convert_to_parset_for_observationcontrol_schema(subtask: models.Subtask) -> parameterset: + # make sure the spec is complete (including all non-filled in properties with default) + spec = add_defaults_to_json_object_for_schema(subtask.specifications_doc, subtask.specifications_template.schema) + + parset = dict() # parameterset has no proper assignment operators, so take detour via dict... + parset["Observation.ObsID"] = subtask.pk + parset["Observation.momID"] = -1 # Needed by MACScheduler + parset["Observation.otdbID"] = -1 # Needed by MACScheduler; should/can this be the same as subtask.pk? + parset["Observation.processType"] = subtask.specifications_template.type.value.capitalize() + parset["Observation.processSubtype"] = subtask.specifications_template.type.value.capitalize() # TODO: where to derive the processSubtype from? + parset["Observation.startTime"] = formatDatetime(subtask.start_time) + parset["Observation.stopTime"] = formatDatetime(subtask.stop_time) + parset["Observation.VirtualInstrument.stationList"] = spec["stations"]["station_list"] + parset["Observation.antennaSet"] = spec["stations"]["antenna_set"] + parset["Observation.bandFilter"] = spec["stations"]["filter"] + parset["Observation.sampleClock"] = 200 # why is this not part of the schema? for example as a required setting with a single allowed value. + parset["Observation.nrBitsPerSample"] = 8 # why is this not part of the schema? for example as a required setting with a single allowed value. + + digi_beams = spec['stations']['digital_pointings'] + parset["Observation.nrBeams"] = len(digi_beams) + for beam_nr, digi_beam in enumerate(digi_beams): + beam_prefix = "Observation.Beam[%d]." % beam_nr + parset[beam_prefix+"directionType"] = digi_beam['pointing']['direction_type'] + parset[beam_prefix+"angle1"] = digi_beam['pointing']['angle1'] + parset[beam_prefix+"angle2"] = digi_beam['pointing']['angle2'] + parset[beam_prefix+"target"] = digi_beam['name'] + parset[beam_prefix+"subbandList"] = digi_beam['subbands'] + + phase_centers = spec['COBALT']['correlator']['phase_centers'] + if phase_centers: + # for now, cobalt can handle only one phase_center + # assume the first is the one + phase_center = phase_centers[0] + parset[beam_prefix+"Correlator.phaseCenterOverride"] = phase_center['index'] == beam_nr + parset[beam_prefix+"Correlator.directionType"] = phase_center['pointing']['direction_type'] + parset[beam_prefix+"Correlator.angle1"] = phase_center['pointing']['angle1'] + parset[beam_prefix+"Correlator.angle2"] = phase_center['pointing']['angle2'] + + analog_beam = spec['stations']['analog_pointing'] + parset["Observation.nrAnaBeams"] = 1 + beam_prefix = "Observation.AnaBeam[0]." + parset[beam_prefix+"directionType"] = analog_beam['direction_type'] + parset[beam_prefix+"angle1"] = analog_beam['angle1'] + parset[beam_prefix+"angle2"] = analog_beam['angle2'] + + parset["Cobalt.realTime"] = True + parset["Cobalt.blockSize"] = spec['COBALT']['blocksize'] + parset["Cobalt.correctBandPass"] = spec['COBALT']['bandpass_correction'] + parset["Cobalt.delayCompensation"] = spec['COBALT']['delay_compensation'] + + parset["Cobalt.Correlator.nrChannelsPerSubband"] = spec['COBALT']['correlator']['channels_per_subband'] + parset["Cobalt.Correlator.nrBlocksPerIntegration"] = spec['COBALT']['correlator']['blocks_per_integration'] + parset["Cobalt.Correlator.nrIntegrationsPerBlock"] = spec['COBALT']['correlator']['integrations_per_block'] + + parset["Observation.DataProducts.Output_Correlated.enabled"] = True + parset["Observation.Cluster.ProcessingCluster.clusterName"] = subtask.cluster or "CEP4" + + parset["Observation.DataProducts.Output_Correlated.filenames"] = [] + parset["Observation.DataProducts.Output_Correlated.locations"] = [] + # TODO: do not use SubtaskOutput.objects.filter but make subtask.subtask_outputs work + subtask_outputs = list(models.SubtaskOutput.objects.filter(subtask_id=subtask.id)) + for subtask_output in subtask_outputs: + dataproducts = list(models.Dataproduct.objects.filter(producer_id=subtask_output.id)) + for dataproduct in dataproducts: + parset["Observation.DataProducts.Output_Correlated.filenames"].append(dataproduct.filename) + parset["Observation.DataProducts.Output_Correlated.locations"].append(dataproduct.directory) + + # convert dict to real parameterset, and return it + parset = parameterset(parset) + return parset + +# dict to store conversion methods based on subtask.specifications_template.name +_convertors = {'observationcontrol schema': _convert_to_parset_for_observationcontrol_schema } + +def convert_to_parset(subtask: models.Subtask) -> parameterset: + ''' + Convert the specifications in the subtask to a LOFAR parset for MAC/COBALT + :raises ConversionException if no proper conversion is available. + ''' + try: + convertor = _convertors[subtask.specifications_template.name] + return convertor(subtask) + except KeyError: + raise ConversionException("Cannot convert subtask id=%d to parset. No conversion routine available for specifications_template='%s'" % ( + subtask.id, subtask.specifications_template.name)) diff --git a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py index 9983921379f68923aa8d52839d28b5afa17f9f80..6d51f05c4fa1fd59f037d1b8e7b025cceea5b219 100644 --- a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py +++ b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py @@ -1,14 +1,863 @@ -# Generated by Django 2.0.6 on 2018-06-11 08:56 +# Generated by Django 2.2.5 on 2020-02-17 13:55 -from django.db import migrations +import django.contrib.postgres.fields +import django.contrib.postgres.fields.jsonb +import django.contrib.postgres.indexes +from django.db import migrations, models +import django.db.models.deletion class Migration(migrations.Migration): - dependencies = [ - ] + initial = True + + dependencies = [] operations = [ - # Add things like triggers to the database here: - # migrations.RunSQL('RAW SQL CODE') + migrations.CreateModel( + name='Algorithm', + fields=[ + ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Cluster', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('location', models.CharField(help_text='Human-readable location of the cluster.', max_length=128)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='CopyReason', + fields=[ + ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Cycle', + fields=[ + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128, primary_key=True, serialize=False)), + ('start', models.DateTimeField(help_text='Moment at which the cycle starts, that is, when its projects can run.')), + ('stop', models.DateTimeField(help_text='Moment at which the cycle officially ends.')), + ('number', models.IntegerField(help_text='Cycle number.')), + ('standard_hours', models.IntegerField(help_text='Number of offered hours for standard observations.')), + ('expert_hours', models.IntegerField(help_text='Number of offered hours for expert observations.')), + ('filler_hours', models.IntegerField(help_text='Number of offered hours for filler observations.')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Dataformat', + fields=[ + ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Dataproduct', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('filename', models.CharField(help_text='Name of the file (or top-level directory) of the dataproduct. Adheres to a naming convention, but is not meant for parsing.', max_length=128)), + ('directory', models.CharField(help_text='Directory where this dataproduct is (to be) stored.', max_length=1024)), + ('deleted_since', models.DateTimeField(help_text='When this dataproduct was removed from disk, or NULL if not deleted (NULLable).', null=True)), + ('pinned_since', models.DateTimeField(help_text='When this dataproduct was pinned to disk, that is, forbidden to be removed, or NULL if not pinned (NULLable).', null=True)), + ('specifications_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Dataproduct properties (f.e. beam, subband), to distinguish them when produced by the same task, and to act as input for selections in the Task Input and Work Request Relation Blueprint objects.')), + ('do_cancel', models.DateTimeField(help_text='When this dataproduct was cancelled (NULLable). Cancelling a dataproduct triggers cleanup if necessary.', null=True)), + ('expected_size', models.BigIntegerField(help_text='Expected size of dataproduct size, in bytes. Used for scheduling purposes. NULL if size is unknown (NULLable).', null=True)), + ('size', models.BigIntegerField(help_text='Dataproduct size, in bytes. Used for accounting purposes. NULL if size is (yet) unknown (NULLable).', null=True)), + ('feedback_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Dataproduct properties, as reported by the producing process.')), + ('dataformat', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataformat')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='DataproductFeedbackTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), + ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='DataproductSpecificationsTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), + ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Datatype', + fields=[ + ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='GeneratorTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), + ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), + ('create_function', models.CharField(help_text='Python function to call to execute the generator.', max_length=128)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Project', + fields=[ + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128, primary_key=True, serialize=False)), + ('priority', models.IntegerField(default=0, help_text='Priority of this project w.r.t. other projects. Projects can interrupt observations of lower-priority projects.')), + ('can_trigger', models.BooleanField(default=False, help_text='True if this project is allowed to supply observation requests on the fly, possibly interrupting currently running observations (responsive telescope).')), + ('private_data', models.BooleanField(default=True, help_text='True if data of this project is sensitive. Sensitive data is not made public.')), + ('expert', models.BooleanField(default=False, help_text='Expert projects put more responsibility on the PI.')), + ('filler', models.BooleanField(default=False, help_text='Use this project to fill up idle telescope time.')), + ('cycle', models.ForeignKey(help_text='Cycle(s) to which this project belongs (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='projects', to='tmssapp.Cycle')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Role', + fields=[ + ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='ScheduleMethod', + fields=[ + ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='SchedulingSet', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('generator_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Parameters for the generator (NULLable).', null=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='SchedulingUnitBlueprint', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('requirements_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Scheduling and/or quality requirements for this scheduling unit (IMMUTABLE).')), + ('do_cancel', models.BooleanField()), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='SchedulingUnitDraft', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('requirements_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Scheduling and/or quality requirements for this run.')), + ('generator_instance_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Parameter value that generated this run draft (NULLable).', null=True)), + ('copies', models.ForeignKey(help_text='Source reference, if we are a copy (NULLable).', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='copied_from', to='tmssapp.SchedulingUnitDraft')), + ('copy_reason', models.ForeignKey(help_text='Reason why source was copied (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.CopyReason')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='SchedulingUnitTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), + ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='StationType', + fields=[ + ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Subtask', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('start_time', models.DateTimeField(help_text='Start this subtask at the specified time (NULLable).', null=True)), + ('stop_time', models.DateTimeField(help_text='Stop this subtask at the specified time (NULLable).', null=True)), + ('specifications_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Final specifications, as input for the controller.')), + ('do_cancel', models.DateTimeField(help_text='Timestamp when the subtask has been ordered to cancel (NULLable).', null=True)), + ('priority', models.IntegerField(help_text='Absolute priority of this subtask (higher value means more important).')), + ('scheduler_input_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Partial specifications, as input for the scheduler.')), + ('cluster', models.ForeignKey(help_text='Where the Subtask is scheduled to run (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Cluster')), + ('schedule_method', models.ForeignKey(help_text='Which method to use for scheduling this Subtask. One of (MANUAL, BATCH, DYNAMIC).', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.ScheduleMethod')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='SubtaskConnector', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('dataformats', models.ManyToManyField(blank=True, to='tmssapp.Dataformat')), + ('datatype', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Datatype')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='SubtaskInputSelectionTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), + ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='SubtaskState', + fields=[ + ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='SubtaskType', + fields=[ + ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Tags', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('title', models.CharField(max_length=128)), + ('description', models.CharField(max_length=255)), + ], + ), + migrations.CreateModel( + name='TaskBlueprint', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('specifications_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schedulings for this task (IMMUTABLE).')), + ('do_cancel', models.BooleanField(help_text='Cancel this task.')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='TaskConnectors', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('dataformats', models.ManyToManyField(blank=True, to='tmssapp.Dataformat')), + ('datatype', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Datatype')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='TaskDraft', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('specifications_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Specifications for this task.')), + ('copies', models.ForeignKey(help_text='Source reference, if we are a copy (NULLable).', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='copied_from', to='tmssapp.TaskDraft')), + ('copy_reason', models.ForeignKey(help_text='Reason why source was copied (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.CopyReason')), + ('scheduling_unit_draft', models.ForeignKey(help_text='Scheduling Unit draft to which this task draft belongs.', on_delete=django.db.models.deletion.CASCADE, related_name='task_drafts', to='tmssapp.SchedulingUnitDraft')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='TaskTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), + ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), + ('validation_code_js', models.CharField(help_text='JavaScript code for additional (complex) validation.', max_length=128)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='WorkRelationSelectionTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), + ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='TaskRelationDraft', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('selection_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Filter for selecting dataproducts from the output role.')), + ('consumer', models.ForeignKey(help_text='Task Draft that has the input connector.', on_delete=django.db.models.deletion.CASCADE, related_name='consumed_by', to='tmssapp.TaskDraft')), + ('dataformat', models.ForeignKey(help_text='Selected data format to use. One of (MS, HDF5).', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataformat')), + ('input', models.ForeignKey(help_text='Input connector of consumer.', on_delete=django.db.models.deletion.CASCADE, related_name='inputs_task_relation_draft', to='tmssapp.TaskConnectors')), + ('output', models.ForeignKey(help_text='Output connector of producer.', on_delete=django.db.models.deletion.CASCADE, related_name='outputs_task_relation_draft', to='tmssapp.TaskConnectors')), + ('producer', models.ForeignKey(help_text='Task Draft that has the output connector. NOTE: The producer does typically, but not necessarily, belong to the same Scheduling Unit (or even the same Project) as the consumer.', on_delete=django.db.models.deletion.CASCADE, related_name='produced_by', to='tmssapp.TaskDraft')), + ('selection_template', models.ForeignKey(help_text='Schema used for selection_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.WorkRelationSelectionTemplate')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='TaskRelationBlueprint', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('selection_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Filter for selecting dataproducts from the output role.')), + ('consumer', models.ForeignKey(help_text='Task Blueprint that has the input connector.', on_delete=django.db.models.deletion.CASCADE, related_name='consumed_by', to='tmssapp.TaskBlueprint')), + ('dataformat', models.ForeignKey(help_text='Selected data format to use.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataformat')), + ('draft', models.ForeignKey(help_text='Task Relation Draft which this work request instantiates.', on_delete=django.db.models.deletion.CASCADE, related_name='related_task_relation_blueprint', to='tmssapp.TaskRelationDraft')), + ('input', models.ForeignKey(help_text='Input connector of consumer.', on_delete=django.db.models.deletion.CASCADE, related_name='inputs_task_relation_blueprint', to='tmssapp.TaskConnectors')), + ('output', models.ForeignKey(help_text='Output connector of producer.', on_delete=django.db.models.deletion.CASCADE, related_name='outputs_task_relation_blueprint', to='tmssapp.TaskConnectors')), + ('producer', models.ForeignKey(help_text='Task Blueprint that has the output connector.', on_delete=django.db.models.deletion.CASCADE, related_name='produced_by', to='tmssapp.TaskBlueprint')), + ('selection_template', models.ForeignKey(help_text='Schema used for selection_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.WorkRelationSelectionTemplate')), + ], + options={ + 'abstract': False, + }, + ), + migrations.AddField( + model_name='taskdraft', + name='specifications_template', + field=models.ForeignKey(help_text='Schema used for requirements_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.TaskTemplate'), + ), + migrations.AddField( + model_name='taskconnectors', + name='input_of', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='inputs', to='tmssapp.TaskTemplate'), + ), + migrations.AddField( + model_name='taskconnectors', + name='output_of', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='outputs', to='tmssapp.TaskTemplate'), + ), + migrations.AddField( + model_name='taskconnectors', + name='role', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Role'), + ), + migrations.AddField( + model_name='taskblueprint', + name='draft', + field=models.ForeignKey(help_text='Task Draft which this task instantiates.', on_delete=django.db.models.deletion.CASCADE, related_name='related_task_blueprint', to='tmssapp.TaskDraft'), + ), + migrations.AddField( + model_name='taskblueprint', + name='scheduling_unit_blueprint', + field=models.ForeignKey(help_text='Scheduling Unit Blueprint to which this task belongs.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.SchedulingUnitBlueprint'), + ), + migrations.AddField( + model_name='taskblueprint', + name='specifications_template', + field=models.ForeignKey(help_text='Schema used for specifications_doc (IMMUTABLE).', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.TaskTemplate'), + ), + migrations.CreateModel( + name='SubtaskTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), + ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), + ('queue', models.BooleanField(default=False)), + ('realtime', models.BooleanField(default=False)), + ('type', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskType')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='SubtaskOutput', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('connector', models.ForeignKey(help_text='Which connector this Subtask Output implements.', null=True, on_delete=django.db.models.deletion.SET_NULL, to='tmssapp.SubtaskConnector')), + ('subtask', models.ForeignKey(help_text='Subtask to which this output specification refers.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.Subtask')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='SubtaskInput', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('selection_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Filter to apply to the dataproducts of the producer, to derive input dataproducts when scheduling.')), + ('connector', models.ForeignKey(help_text='Which connector this Task Input implements.', null=True, on_delete=django.db.models.deletion.SET_NULL, to='tmssapp.SubtaskConnector')), + ('dataproducts', models.ManyToManyField(help_text='The Dataproducts resulting from application of the filter at time of scheduling Although the dataproducts are simply the result of applying the filter on immutable data, the filter application could change over time. We thus store the result of this filtering directly to retain which input was specified for the task..', to='tmssapp.Dataproduct')), + ('producer', models.ForeignKey(help_text='The Subtask Output providing the input dataproducts.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskOutput')), + ('selection_template', models.ForeignKey(help_text='Schema used for selection_doc.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskInputSelectionTemplate')), + ('subtask', models.ForeignKey(help_text='Subtask to which this input specification refers.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.Subtask')), + ('task_relation_blueprint', models.ForeignKey(help_text='Task Relation Blueprint which this Subtask Input implements (NULLable).', null=True, on_delete=django.db.models.deletion.SET_NULL, to='tmssapp.TaskRelationBlueprint')), + ], + options={ + 'abstract': False, + }, + ), + migrations.AddField( + model_name='subtaskconnector', + name='input_of', + field=models.ForeignKey(blank=True, on_delete=django.db.models.deletion.PROTECT, related_name='outputs', to='tmssapp.SubtaskTemplate'), + ), + migrations.AddField( + model_name='subtaskconnector', + name='output_of', + field=models.ForeignKey(blank=True, on_delete=django.db.models.deletion.PROTECT, related_name='inputs', to='tmssapp.SubtaskTemplate'), + ), + migrations.AddField( + model_name='subtaskconnector', + name='role', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Role'), + ), + migrations.AddField( + model_name='subtask', + name='specifications_template', + field=models.ForeignKey(help_text='Schema used for specifications_doc.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskTemplate'), + ), + migrations.AddField( + model_name='subtask', + name='state', + field=models.ForeignKey(help_text='Subtask state (see Subtask State Machine).', on_delete=django.db.models.deletion.PROTECT, related_name='task_states', to='tmssapp.SubtaskState'), + ), + migrations.AddField( + model_name='subtask', + name='task_blueprint', + field=models.ForeignKey(help_text='Task Blueprint to which this Subtask belongs.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='subtasks', to='tmssapp.TaskBlueprint'), + ), + migrations.AddField( + model_name='schedulingunitdraft', + name='requirements_template', + field=models.ForeignKey(help_text='Schema used for requirements_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.SchedulingUnitTemplate'), + ), + migrations.AddField( + model_name='schedulingunitdraft', + name='scheduling_set', + field=models.ForeignKey(help_text='Set to which this scheduling unit draft belongs.', on_delete=django.db.models.deletion.CASCADE, related_name='scheduling_unit_drafts', to='tmssapp.SchedulingSet'), + ), + migrations.AddField( + model_name='schedulingunitblueprint', + name='draft', + field=models.ForeignKey(help_text='Scheduling Unit Draft which this run instantiates.', on_delete=django.db.models.deletion.CASCADE, related_name='related_scheduling_unit_blueprint', to='tmssapp.SchedulingUnitDraft'), + ), + migrations.AddField( + model_name='schedulingunitblueprint', + name='requirements_template', + field=models.ForeignKey(help_text='Schema used for requirements_doc (IMMUTABLE).', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.SchedulingUnitTemplate'), + ), + migrations.AddField( + model_name='schedulingset', + name='generator_source', + field=models.ForeignKey(help_text='Reference for the generator to an existing collection of specifications (NULLable).', null=True, on_delete=django.db.models.deletion.SET_NULL, to='tmssapp.SchedulingUnitDraft'), + ), + migrations.AddField( + model_name='schedulingset', + name='generator_template', + field=models.ForeignKey(help_text='Generator for the scheduling units in this set (NULLable).', null=True, on_delete=django.db.models.deletion.SET_NULL, to='tmssapp.GeneratorTemplate'), + ), + migrations.AddField( + model_name='schedulingset', + name='project', + field=models.ForeignKey(help_text='Project to which this scheduling set belongs.', on_delete=django.db.models.deletion.PROTECT, related_name='scheduling_sets', to='tmssapp.Project'), + ), + migrations.CreateModel( + name='Filesystem', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('capacity', models.BigIntegerField(help_text='Capacity in bytes')), + ('cluster', models.ForeignKey(help_text='Cluster hosting this filesystem.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Cluster')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='DefaultWorkRelationSelectionTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(max_length=128, unique=True)), + ('template', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.WorkRelationSelectionTemplate')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='DefaultTaskTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(max_length=128, unique=True)), + ('template', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.TaskTemplate')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='DefaultSubtaskTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(max_length=128, unique=True)), + ('template', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskTemplate')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='DefaultSchedulingUnitTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(max_length=128, unique=True)), + ('template', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SchedulingUnitTemplate')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='DefaultGeneratorTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(max_length=128, unique=True)), + ('template', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.GeneratorTemplate')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='DefaultDataproductSpecificationsTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(max_length=128, unique=True)), + ('template', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.DataproductSpecificationsTemplate')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='DataproductTransform', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('identity', models.BooleanField(help_text='TRUE if this transform only copies, tars, or losslessly compresses its input, FALSE if the transform changes the data. Allows for efficient reasoning about data duplication.')), + ('input', models.ForeignKey(help_text='A dataproduct that was the input of a transformation.', on_delete=django.db.models.deletion.PROTECT, related_name='inputs', to='tmssapp.Dataproduct')), + ('output', models.ForeignKey(help_text='A dataproduct that was produced from the input dataproduct.', on_delete=django.db.models.deletion.PROTECT, related_name='outputs', to='tmssapp.Dataproduct')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='DataproductHash', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('hash', models.CharField(help_text='Hash value.', max_length=128)), + ('algorithm', models.ForeignKey(help_text='Algorithm used (MD5, AES256).', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Algorithm')), + ('dataproduct', models.ForeignKey(help_text='The dataproduct to which this hash refers.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataproduct')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='DataproductArchiveInfo', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('storage_ticket', models.CharField(help_text='Archive-system identifier.', max_length=128)), + ('public_since', models.DateTimeField(help_text='Dataproduct is available for public download since this moment, or NULL if dataproduct is not (NULLable).', null=True)), + ('corrupted_since', models.DateTimeField(help_text='Earliest timestamp from which this dataproduct is known to be partially or fully corrupt, or NULL if dataproduct is not known to be corrupt (NULLable).', null=True)), + ('dataproduct', models.ForeignKey(help_text='A dataproduct residing in the archive.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataproduct')), + ], + options={ + 'abstract': False, + }, + ), + migrations.AddField( + model_name='dataproduct', + name='feedback_template', + field=models.ForeignKey(help_text='Schema used for feedback_doc.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.DataproductFeedbackTemplate'), + ), + migrations.AddField( + model_name='dataproduct', + name='producer', + field=models.ForeignKey(help_text='Subtask Output which generates this dataproduct.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskOutput'), + ), + migrations.AddField( + model_name='dataproduct', + name='specifications_template', + field=models.ForeignKey(help_text='Schema used for specifications_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.DataproductSpecificationsTemplate'), + ), + migrations.CreateModel( + name='AntennaSet', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('rcus', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=128)), + ('inputs', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, size=128)), + ('station_type', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.StationType')), + ], + options={ + 'abstract': False, + }, + ), + migrations.AddIndex( + model_name='taskrelationdraft', + index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_tas_tags_aeef84_gin'), + ), + migrations.AddIndex( + model_name='taskrelationblueprint', + index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_tas_tags_256437_gin'), + ), + migrations.AddIndex( + model_name='taskconnectors', + index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_tas_tags_0ebd6d_gin'), + ), + migrations.AddIndex( + model_name='subtaskoutput', + index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_sub_tags_e25b4c_gin'), + ), + migrations.AddIndex( + model_name='subtaskinput', + index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_sub_tags_fb9960_gin'), + ), + migrations.AddIndex( + model_name='subtaskconnector', + index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_sub_tags_60e299_gin'), + ), + migrations.AddIndex( + model_name='subtask', + index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_sub_tags_d2fc43_gin'), + ), + migrations.AddIndex( + model_name='defaultworkrelationselectiontemplate', + index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_def_tags_b652d9_gin'), + ), + migrations.AddIndex( + model_name='defaulttasktemplate', + index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_def_tags_c88200_gin'), + ), + migrations.AddIndex( + model_name='defaultsubtasktemplate', + index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_def_tags_e9c73d_gin'), + ), + migrations.AddIndex( + model_name='defaultschedulingunittemplate', + index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_def_tags_3ab2d6_gin'), + ), + migrations.AddIndex( + model_name='defaultgeneratortemplate', + index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_def_tags_89c89d_gin'), + ), + migrations.AddIndex( + model_name='defaultdataproductspecificationstemplate', + index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_def_tags_269b1f_gin'), + ), + migrations.AddIndex( + model_name='dataproducttransform', + index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_dat_tags_380c1f_gin'), + ), + migrations.AddIndex( + model_name='dataproducthash', + index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_dat_tags_dae145_gin'), + ), + migrations.AddIndex( + model_name='dataproductarchiveinfo', + index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_dat_tags_ebf2ef_gin'), + ), + migrations.AddIndex( + model_name='dataproduct', + index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_dat_tags_5932a3_gin'), + ), ] diff --git a/SAS/TMSS/src/tmss/tmssapp/migrations/0002_auto_20200123_1057.py b/SAS/TMSS/src/tmss/tmssapp/migrations/0002_auto_20200123_1057.py deleted file mode 100644 index 1bccdfe6ac7fbf01c71031f992938c4e25aca38d..0000000000000000000000000000000000000000 --- a/SAS/TMSS/src/tmss/tmssapp/migrations/0002_auto_20200123_1057.py +++ /dev/null @@ -1,933 +0,0 @@ -# Generated by Django 2.0.6 on 2020-01-23 10:57 - -import django.contrib.postgres.fields -import django.contrib.postgres.fields.jsonb -import django.contrib.postgres.indexes -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ('tmssapp', '0001_initial'), - ] - - operations = [ - migrations.CreateModel( - name='Algorithm', - fields=[ - ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='AntennaSet', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('rcus', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=128)), - ('inputs', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, size=128)), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='Cluster', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('location', models.CharField(help_text='Human-readable location of the cluster.', max_length=128)), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='CopyReason', - fields=[ - ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='Cycle', - fields=[ - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128, primary_key=True, serialize=False)), - ('start', models.DateTimeField(help_text='Moment at which the cycle starts, that is, when its projects can run.')), - ('stop', models.DateTimeField(help_text='Moment at which the cycle officially ends.')), - ('number', models.IntegerField(help_text='Cycle number.')), - ('standard_hours', models.IntegerField(help_text='Number of offered hours for standard observations.')), - ('expert_hours', models.IntegerField(help_text='Number of offered hours for expert observations.')), - ('filler_hours', models.IntegerField(help_text='Number of offered hours for filler observations.')), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='Dataformat', - fields=[ - ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='Dataproduct', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('filename', models.CharField(help_text='Name of the file (or top-level directory) of the dataproduct. Adheres to a naming convention, but is not meant for parsing.', max_length=128)), - ('directory', models.CharField(help_text='Directory where this dataproduct is (to be) stored.', max_length=1024)), - ('deleted_since', models.DateTimeField(help_text='When this dataproduct was removed from disk, or NULL if not deleted (NULLable).', null=True)), - ('pinned_since', models.DateTimeField(help_text='When this dataproduct was pinned to disk, that is, forbidden to be removed, or NULL if not pinned (NULLable).', null=True)), - ('specifications_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Dataproduct properties (f.e. beam, subband), to distinguish them when produced by the same task, and to act as input for selections in the Task Input and Work Request Relation Blueprint objects.')), - ('do_cancel', models.DateTimeField(help_text='When this dataproduct was cancelled (NULLable). Cancelling a dataproduct triggers cleanup if necessary.', null=True)), - ('expected_size', models.BigIntegerField(help_text='Expected size of dataproduct size, in bytes. Used for scheduling purposes. NULL if size is unknown (NULLable).', null=True)), - ('size', models.BigIntegerField(help_text='Dataproduct size, in bytes. Used for accounting purposes. NULL if size is (yet) unknown (NULLable).', null=True)), - ('feedback_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Dataproduct properties, as reported by the producing process.')), - ('dataformat', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataformat')), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='DataproductArchiveInfo', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('storage_ticket', models.CharField(help_text='Archive-system identifier.', max_length=128)), - ('public_since', models.DateTimeField(help_text='Dataproduct is available for public download since this moment, or NULL if dataproduct is not (NULLable).', null=True)), - ('corrupted_since', models.DateTimeField(help_text='Earliest timestamp from which this dataproduct is known to be partially or fully corrupt, or NULL if dataproduct is not known to be corrupt (NULLable).', null=True)), - ('dataproduct', models.ForeignKey(help_text='A dataproduct residing in the archive.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataproduct')), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='DataproductFeedbackTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), - ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='DataproductHash', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('hash', models.CharField(help_text='Hash value.', max_length=128)), - ('algorithm', models.ForeignKey(help_text='Algorithm used (MD5, AES256).', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Algorithm')), - ('dataproduct', models.ForeignKey(help_text='The dataproduct to which this hash refers.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataproduct')), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='DataproductSpecificationsTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), - ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='DataproductTransform', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('identity', models.BooleanField(help_text='TRUE if this transform only copies, tars, or losslessly compresses its input, FALSE if the transform changes the data. Allows for efficient reasoning about data duplication.')), - ('input', models.ForeignKey(help_text='A dataproduct that was the input of a transformation.', on_delete=django.db.models.deletion.PROTECT, related_name='inputs', to='tmssapp.Dataproduct')), - ('output', models.ForeignKey(help_text='A dataproduct that was produced from the input dataproduct.', on_delete=django.db.models.deletion.PROTECT, related_name='outputs', to='tmssapp.Dataproduct')), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='Datatype', - fields=[ - ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='DefaultDataproductSpecificationsTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(max_length=128, unique=True)), - ('template', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.DataproductSpecificationsTemplate')), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='DefaultGeneratorTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(max_length=128, unique=True)), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='DefaultSchedulingUnitTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(max_length=128, unique=True)), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='DefaultSubtaskTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(max_length=128, unique=True)), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='DefaultTaskTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(max_length=128, unique=True)), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='DefaultWorkRelationSelectionTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(max_length=128, unique=True)), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='Filesystem', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('capacity', models.BigIntegerField(help_text='Capacity in bytes')), - ('cluster', models.ForeignKey(help_text='Cluster hosting this filesystem.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Cluster')), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='GeneratorTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), - ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), - ('create_function', models.CharField(help_text='Python function to call to execute the generator.', max_length=128)), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='Project', - fields=[ - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128, primary_key=True, serialize=False)), - ('priority', models.IntegerField(default=0, help_text='Priority of this project w.r.t. other projects. Projects can interrupt observations of lower-priority projects.')), - ('can_trigger', models.BooleanField(default=False, help_text='True if this project is allowed to supply observation requests on the fly, possibly interrupting currently running observations (responsive telescope).')), - ('private_data', models.BooleanField(default=True, help_text='True if data of this project is sensitive. Sensitive data is not made public.')), - ('expert', models.BooleanField(default=False, help_text='Expert projects put more responsibility on the PI.')), - ('filler', models.BooleanField(default=False, help_text='Use this project to fill up idle telescope time.')), - ('cycle', models.ForeignKey(help_text='Cycle(s) to which this project belongs (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='projects', to='tmssapp.Cycle')), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='Role', - fields=[ - ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='ScheduleMethod', - fields=[ - ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='SchedulingSet', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('generator_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Parameters for the generator (NULLable).', null=True)), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='SchedulingUnitBlueprint', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('requirements_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Scheduling and/or quality requirements for this scheduling unit (IMMUTABLE).')), - ('do_cancel', models.BooleanField()), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='SchedulingUnitDraft', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('requirements_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Scheduling and/or quality requirements for this run.')), - ('generator_instance_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Parameter value that generated this run draft (NULLable).', null=True)), - ('copies', models.ForeignKey(help_text='Source reference, if we are a copy (NULLable).', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='copied_from', to='tmssapp.SchedulingUnitDraft')), - ('copy_reason', models.ForeignKey(help_text='Reason why source was copied (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.CopyReason')), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='SchedulingUnitTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), - ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='StationType', - fields=[ - ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='Subtask', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('start_time', models.DateTimeField(help_text='Start this subtask at the specified time (NULLable).', null=True)), - ('stop_time', models.DateTimeField(help_text='Stop this subtask at the specified time (NULLable).', null=True)), - ('specifications_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Final specifications, as input for the controller.')), - ('do_cancel', models.DateTimeField(help_text='Timestamp when the subtask has been ordered to cancel (NULLable).', null=True)), - ('priority', models.IntegerField(help_text='Absolute priority of this subtask (higher value means more important).')), - ('scheduler_input_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Partial specifications, as input for the scheduler.')), - ('cluster', models.ForeignKey(help_text='Where the Subtask is scheduled to run (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Cluster')), - ('schedule_method', models.ForeignKey(help_text='Which method to use for scheduling this Subtask. One of (MANUAL, BATCH, DYNAMIC).', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.ScheduleMethod')), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='SubtaskConnector', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('dataformats', models.ManyToManyField(blank=True, to='tmssapp.Dataformat')), - ('datatype', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Datatype')), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='SubtaskInput', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('selection_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Filter to apply to the dataproducts of the producer, to derive input dataproducts when scheduling.')), - ('connector', models.ForeignKey(help_text='Which connector this Task Input implements.', null=True, on_delete=django.db.models.deletion.SET_NULL, to='tmssapp.SubtaskConnector')), - ('dataproducts', models.ManyToManyField(help_text='The Dataproducts resulting from application of the filter at time of scheduling Although the dataproducts are simply the result of applying the filter on immutable data, the filter application could change over time. We thus store the result of this filtering directly to retain which input was specified for the task..', to='tmssapp.Dataproduct')), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='SubtaskInputSelectionTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), - ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='SubtaskOutput', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('connector', models.ForeignKey(help_text='Which connector this Subtask Output implements.', null=True, on_delete=django.db.models.deletion.SET_NULL, to='tmssapp.SubtaskConnector')), - ('subtask', models.ForeignKey(help_text='Subtask to which this output specification refers.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.Subtask')), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='SubtaskState', - fields=[ - ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='SubtaskTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), - ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), - ('queue', models.BooleanField(default=False)), - ('realtime', models.BooleanField(default=False)), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='SubtaskType', - fields=[ - ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='Tags', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('title', models.CharField(max_length=128)), - ('description', models.CharField(max_length=255)), - ], - ), - migrations.CreateModel( - name='TaskBlueprint', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('specifications_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schedulings for this task (IMMUTABLE).')), - ('do_cancel', models.BooleanField(help_text='Cancel this task.')), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='TaskConnectors', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('dataformats', models.ManyToManyField(blank=True, to='tmssapp.Dataformat')), - ('datatype', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Datatype')), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='TaskDraft', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('specifications_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Specifications for this task.')), - ('copies', models.ForeignKey(help_text='Source reference, if we are a copy (NULLable).', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='copied_from', to='tmssapp.TaskDraft')), - ('copy_reason', models.ForeignKey(help_text='Reason why source was copied (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.CopyReason')), - ('scheduling_unit_draft', models.ForeignKey(help_text='Scheduling Unit draft to which this task draft belongs.', on_delete=django.db.models.deletion.CASCADE, related_name='task_drafts', to='tmssapp.SchedulingUnitDraft')), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='TaskRelationBlueprint', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('selection_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Filter for selecting dataproducts from the output role.')), - ('consumer', models.ForeignKey(help_text='Task Blueprint that has the input connector.', on_delete=django.db.models.deletion.CASCADE, related_name='consumed_by', to='tmssapp.TaskBlueprint')), - ('dataformat', models.ForeignKey(help_text='Selected data format to use.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataformat')), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='TaskRelationDraft', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('selection_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Filter for selecting dataproducts from the output role.')), - ('consumer', models.ForeignKey(help_text='Task Draft that has the input connector.', on_delete=django.db.models.deletion.CASCADE, related_name='consumed_by', to='tmssapp.TaskDraft')), - ('dataformat', models.ForeignKey(help_text='Selected data format to use. One of (MS, HDF5).', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataformat')), - ('input', models.ForeignKey(help_text='Input connector of consumer.', on_delete=django.db.models.deletion.CASCADE, related_name='inputs_task_relation_draft', to='tmssapp.TaskConnectors')), - ('output', models.ForeignKey(help_text='Output connector of producer.', on_delete=django.db.models.deletion.CASCADE, related_name='outputs_task_relation_draft', to='tmssapp.TaskConnectors')), - ('producer', models.ForeignKey(help_text='Task Draft that has the output connector. NOTE: The producer does typically, but not necessarily, belong to the same Scheduling Unit (or even the same Project) as the consumer.', on_delete=django.db.models.deletion.CASCADE, related_name='produced_by', to='tmssapp.TaskDraft')), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='TaskTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), - ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), - ('validation_code_js', models.CharField(help_text='JavaScript code for additional (complex) validation.', max_length=128)), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='WorkRelationSelectionTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), - ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), - ], - options={ - 'abstract': False, - }, - ), - migrations.AddField( - model_name='taskrelationdraft', - name='selection_template', - field=models.ForeignKey(help_text='Schema used for selection_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.WorkRelationSelectionTemplate'), - ), - migrations.AddField( - model_name='taskrelationblueprint', - name='draft', - field=models.ForeignKey(help_text='Task Relation Draft which this work request instantiates.', on_delete=django.db.models.deletion.CASCADE, related_name='related_task_relation_blueprint', to='tmssapp.TaskRelationDraft'), - ), - migrations.AddField( - model_name='taskrelationblueprint', - name='input', - field=models.ForeignKey(help_text='Input connector of consumer.', on_delete=django.db.models.deletion.CASCADE, related_name='inputs_task_relation_blueprint', to='tmssapp.TaskConnectors'), - ), - migrations.AddField( - model_name='taskrelationblueprint', - name='output', - field=models.ForeignKey(help_text='Output connector of producer.', on_delete=django.db.models.deletion.CASCADE, related_name='outputs_task_relation_blueprint', to='tmssapp.TaskConnectors'), - ), - migrations.AddField( - model_name='taskrelationblueprint', - name='producer', - field=models.ForeignKey(help_text='Task Blueprint that has the output connector.', on_delete=django.db.models.deletion.CASCADE, related_name='produced_by', to='tmssapp.TaskBlueprint'), - ), - migrations.AddField( - model_name='taskrelationblueprint', - name='selection_template', - field=models.ForeignKey(help_text='Schema used for selection_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.WorkRelationSelectionTemplate'), - ), - migrations.AddField( - model_name='taskdraft', - name='specifications_template', - field=models.ForeignKey(help_text='Schema used for requirements_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.TaskTemplate'), - ), - migrations.AddField( - model_name='taskconnectors', - name='input_of', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='inputs', to='tmssapp.TaskTemplate'), - ), - migrations.AddField( - model_name='taskconnectors', - name='output_of', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='outputs', to='tmssapp.TaskTemplate'), - ), - migrations.AddField( - model_name='taskconnectors', - name='role', - field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Role'), - ), - migrations.AddField( - model_name='taskblueprint', - name='draft', - field=models.ForeignKey(help_text='Task Draft which this task instantiates.', on_delete=django.db.models.deletion.CASCADE, related_name='related_task_blueprint', to='tmssapp.TaskDraft'), - ), - migrations.AddField( - model_name='taskblueprint', - name='scheduling_unit_blueprint', - field=models.ForeignKey(help_text='Scheduling Unit Blueprint to which this task belongs.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.SchedulingUnitBlueprint'), - ), - migrations.AddField( - model_name='taskblueprint', - name='specifications_template', - field=models.ForeignKey(help_text='Schema used for specifications_doc (IMMUTABLE).', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.TaskTemplate'), - ), - migrations.AddField( - model_name='subtasktemplate', - name='type', - field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskType'), - ), - migrations.AddField( - model_name='subtaskinput', - name='producer', - field=models.ForeignKey(help_text='The Subtask Output providing the input dataproducts.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskOutput'), - ), - migrations.AddField( - model_name='subtaskinput', - name='selection_template', - field=models.ForeignKey(help_text='Schema used for selection_doc.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskInputSelectionTemplate'), - ), - migrations.AddField( - model_name='subtaskinput', - name='subtask', - field=models.ForeignKey(help_text='Subtask to which this input specification refers.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.Subtask'), - ), - migrations.AddField( - model_name='subtaskinput', - name='task_relation_blueprint', - field=models.ForeignKey(help_text='Task Relation Blueprint which this Subtask Input implements (NULLable).', null=True, on_delete=django.db.models.deletion.SET_NULL, to='tmssapp.TaskRelationBlueprint'), - ), - migrations.AddField( - model_name='subtaskconnector', - name='input_of', - field=models.ForeignKey(blank=True, on_delete=django.db.models.deletion.PROTECT, related_name='outputs', to='tmssapp.SubtaskTemplate'), - ), - migrations.AddField( - model_name='subtaskconnector', - name='output_of', - field=models.ForeignKey(blank=True, on_delete=django.db.models.deletion.PROTECT, related_name='inputs', to='tmssapp.SubtaskTemplate'), - ), - migrations.AddField( - model_name='subtaskconnector', - name='role', - field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Role'), - ), - migrations.AddField( - model_name='subtask', - name='specifications_template', - field=models.ForeignKey(help_text='Schema used for specifications_doc.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskTemplate'), - ), - migrations.AddField( - model_name='subtask', - name='state', - field=models.ForeignKey(help_text='Subtask state (see Subtask State Machine).', on_delete=django.db.models.deletion.PROTECT, related_name='task_states', to='tmssapp.SubtaskState'), - ), - migrations.AddField( - model_name='subtask', - name='task_blueprint', - field=models.ForeignKey(help_text='Task Blueprint to which this Subtask belongs.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='subtasks', to='tmssapp.TaskBlueprint'), - ), - migrations.AddField( - model_name='schedulingunitdraft', - name='requirements_template', - field=models.ForeignKey(help_text='Schema used for requirements_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.SchedulingUnitTemplate'), - ), - migrations.AddField( - model_name='schedulingunitdraft', - name='scheduling_set', - field=models.ForeignKey(help_text='Set to which this scheduling unit draft belongs.', on_delete=django.db.models.deletion.CASCADE, related_name='scheduling_unit_drafts', to='tmssapp.SchedulingSet'), - ), - migrations.AddField( - model_name='schedulingunitblueprint', - name='draft', - field=models.ForeignKey(help_text='Scheduling Unit Draft which this run instantiates.', on_delete=django.db.models.deletion.CASCADE, related_name='related_scheduling_unit_blueprint', to='tmssapp.SchedulingUnitDraft'), - ), - migrations.AddField( - model_name='schedulingunitblueprint', - name='requirements_template', - field=models.ForeignKey(help_text='Schema used for requirements_doc (IMMUTABLE).', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.SchedulingUnitTemplate'), - ), - migrations.AddField( - model_name='schedulingset', - name='generator_source', - field=models.ForeignKey(help_text='Reference for the generator to an existing collection of specifications (NULLable).', null=True, on_delete=django.db.models.deletion.SET_NULL, to='tmssapp.SchedulingUnitDraft'), - ), - migrations.AddField( - model_name='schedulingset', - name='generator_template', - field=models.ForeignKey(help_text='Generator for the scheduling units in this set (NULLable).', null=True, on_delete=django.db.models.deletion.SET_NULL, to='tmssapp.GeneratorTemplate'), - ), - migrations.AddField( - model_name='schedulingset', - name='project', - field=models.ForeignKey(help_text='Project to which this scheduling set belongs.', on_delete=django.db.models.deletion.PROTECT, related_name='scheduling_sets', to='tmssapp.Project'), - ), - migrations.AddField( - model_name='defaultworkrelationselectiontemplate', - name='template', - field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.WorkRelationSelectionTemplate'), - ), - migrations.AddField( - model_name='defaulttasktemplate', - name='template', - field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.TaskTemplate'), - ), - migrations.AddField( - model_name='defaultsubtasktemplate', - name='template', - field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskTemplate'), - ), - migrations.AddField( - model_name='defaultschedulingunittemplate', - name='template', - field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SchedulingUnitTemplate'), - ), - migrations.AddField( - model_name='defaultgeneratortemplate', - name='template', - field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.GeneratorTemplate'), - ), - migrations.AddField( - model_name='dataproduct', - name='feedback_template', - field=models.ForeignKey(help_text='Schema used for feedback_doc.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.DataproductFeedbackTemplate'), - ), - migrations.AddField( - model_name='dataproduct', - name='producer', - field=models.ForeignKey(help_text='Subtask Output which generates this dataproduct.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskOutput'), - ), - migrations.AddField( - model_name='dataproduct', - name='specifications_template', - field=models.ForeignKey(help_text='Schema used for specifications_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.DataproductSpecificationsTemplate'), - ), - migrations.AddField( - model_name='antennaset', - name='station_type', - field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.StationType'), - ), - migrations.AddIndex( - model_name='taskrelationdraft', - index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_tas_tags_aeef84_gin'), - ), - migrations.AddIndex( - model_name='taskrelationblueprint', - index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_tas_tags_256437_gin'), - ), - migrations.AddIndex( - model_name='taskconnectors', - index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_tas_tags_0ebd6d_gin'), - ), - migrations.AddIndex( - model_name='subtaskoutput', - index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_sub_tags_e25b4c_gin'), - ), - migrations.AddIndex( - model_name='subtaskinput', - index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_sub_tags_fb9960_gin'), - ), - migrations.AddIndex( - model_name='subtaskconnector', - index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_sub_tags_60e299_gin'), - ), - migrations.AddIndex( - model_name='subtask', - index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_sub_tags_d2fc43_gin'), - ), - migrations.AddIndex( - model_name='defaultworkrelationselectiontemplate', - index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_def_tags_b652d9_gin'), - ), - migrations.AddIndex( - model_name='defaulttasktemplate', - index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_def_tags_c88200_gin'), - ), - migrations.AddIndex( - model_name='defaultsubtasktemplate', - index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_def_tags_e9c73d_gin'), - ), - migrations.AddIndex( - model_name='defaultschedulingunittemplate', - index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_def_tags_3ab2d6_gin'), - ), - migrations.AddIndex( - model_name='defaultgeneratortemplate', - index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_def_tags_89c89d_gin'), - ), - migrations.AddIndex( - model_name='defaultdataproductspecificationstemplate', - index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_def_tags_269b1f_gin'), - ), - migrations.AddIndex( - model_name='dataproducttransform', - index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_dat_tags_380c1f_gin'), - ), - migrations.AddIndex( - model_name='dataproducthash', - index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_dat_tags_dae145_gin'), - ), - migrations.AddIndex( - model_name='dataproductarchiveinfo', - index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_dat_tags_ebf2ef_gin'), - ), - migrations.AddIndex( - model_name='dataproduct', - index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_dat_tags_5932a3_gin'), - ), - ] diff --git a/SAS/TMSS/src/tmss/tmssapp/migrations/0002_populate.py b/SAS/TMSS/src/tmss/tmssapp/migrations/0002_populate.py new file mode 100644 index 0000000000000000000000000000000000000000..91ceea132673e18cc1747f686b7c536c91d7557a --- /dev/null +++ b/SAS/TMSS/src/tmss/tmssapp/migrations/0002_populate.py @@ -0,0 +1,16 @@ + +from django.db import migrations + +from lofar.sas.tmss.tmss.tmssapp.populate import * + +class Migration(migrations.Migration): + + dependencies = [ + ('tmssapp', '0001_initial'), + ] + + # Start SubTask id with 2 000 000 to avoid overlap with 'old' (test/production) OTDB + operations = [ migrations.RunSQL('ALTER SEQUENCE tmssapp_SubTask_id_seq RESTART WITH 2000000;'), + migrations.RunPython(populate_choices), + migrations.RunPython(populate_misc), + migrations.RunPython(populate_lofar_json_schemas) ] diff --git a/SAS/TMSS/src/tmss/tmssapp/migrations/0003_populate.py b/SAS/TMSS/src/tmss/tmssapp/migrations/0003_populate.py deleted file mode 100644 index 8fcea39a130ebe08b53939653c6465d3343d71d5..0000000000000000000000000000000000000000 --- a/SAS/TMSS/src/tmss/tmssapp/migrations/0003_populate.py +++ /dev/null @@ -1,12 +0,0 @@ - -from django.db import migrations - -from ..populate import * - -class Migration(migrations.Migration): - - dependencies = [ - ('tmssapp', '0002_auto_20200123_1057'), - ] - - operations = [ migrations.RunPython(populate_choices) ] diff --git a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py index 4a0c2d191069203078c237d445d486bdb390ba71..f52967a410e9053ee8704bdaf260039a591f2560 100644 --- a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py @@ -8,6 +8,12 @@ from django.contrib.postgres.fields import ArrayField, JSONField from .specification import AbstractChoice, BasicCommon, Template, NamedCommon # , <TaskBlueprint from enum import Enum from rest_framework.serializers import HyperlinkedRelatedField + +from lofar.sas.tmss.tmss.exceptions import * + +import json +import jsonschema + # # I/O # @@ -127,7 +133,6 @@ class DataproductFeedbackTemplate(Template): # # Instance Objects # - class Subtask(BasicCommon): """ Represents a low-level task, which is an atomic unit of execution, such as running an observation, running @@ -147,6 +152,28 @@ class Subtask(BasicCommon): scheduler_input_doc = JSONField(help_text='Partial specifications, as input for the scheduler.') # resource_claim = ForeignKey("ResourceClaim", null=False, on_delete=PROTECT) # todo <-- how is this external reference supposed to work? + def validate_specification_against_schema(self): + if self.specifications_doc is None or self.specifications_template_id is None: + return + + try: + # ensure the specification and schema are both valid json in the first place + spec = json.loads(self.specifications_doc) if type(self.specifications_doc) == str else self.specifications_doc + schema = json.loads(self.specifications_template.schema) if type(self.specifications_template.schema) == str else self.specifications_template.schema + except json.decoder.JSONDecodeError as e: + raise SpecificationException("Invalid JSON: %s" % str(e)) + + try: + jsonschema.validate(spec, schema) + except jsonschema.ValidationError as e: + raise SpecificationException(str(e)) + + def save(self, force_insert=False, force_update=False, using=None, update_fields=None): + '''override of normal save method, doing a validation of the specification against the schema first + :raises SpecificationException in case the specification does not validate against the schema''' + self.validate_specification_against_schema() + super().save(force_insert, force_update, using, update_fields) + class SubtaskInput(BasicCommon): subtask = ForeignKey('Subtask', null=False, on_delete=CASCADE, help_text='Subtask to which this input specification refers.') diff --git a/SAS/TMSS/src/tmss/tmssapp/models/specification.py b/SAS/TMSS/src/tmss/tmssapp/models/specification.py index 60ae93b3054ff5b45ab7727214b734cc6ae98d03..86f621bfa1e2cf6e3130c2a83f8c270ac7ab0330 100644 --- a/SAS/TMSS/src/tmss/tmssapp/models/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/models/specification.py @@ -6,7 +6,6 @@ from django.db.models import Model, CharField, DateTimeField, BooleanField, Fore from django.contrib.postgres.fields import ArrayField, JSONField from django.contrib.postgres.indexes import GinIndex from enum import Enum -from django_json_widget.widgets import JSONEditorWidget # # Common @@ -17,7 +16,7 @@ from django_json_widget.widgets import JSONEditorWidget class BasicCommon(Model): # todo: we cannot use foreign keys in the array here, so we have to keep the Tags table up to date by trigger or so. # todo: we could switch to a manytomany field instead? - tags = ArrayField(CharField(max_length=128), size=8, blank=True, help_text='User-defined search keywords for object.') + tags = ArrayField(CharField(max_length=128), size=8, blank=True, help_text='User-defined search keywords for object.', default=list) created_at = DateTimeField(auto_now_add=True, help_text='Moment of object creation.') updated_at = DateTimeField(auto_now=True, help_text='Moment of last object update.') @@ -144,16 +143,12 @@ class TaskConnectors(BasicCommon): # abstract models class Template(NamedCommon): - formfield_overrides = { - JSONField: {'widget': JSONEditorWidget}, - } version = CharField(max_length=128, help_text='Version of this template (with respect to other templates of the same name).') schema = JSONField(help_text='Schema for the configurable parameters needed to use this template.') class Meta: abstract = True - # concrete models class GeneratorTemplate(Template): diff --git a/SAS/TMSS/src/tmss/tmssapp/populate.py b/SAS/TMSS/src/tmss/tmssapp/populate.py index 4419038b31d3880aaa6bea271a16362cf93643d3..e7a39909a37ae2e9296d81d8d63c87c7e664699f 100644 --- a/SAS/TMSS/src/tmss/tmssapp/populate.py +++ b/SAS/TMSS/src/tmss/tmssapp/populate.py @@ -1,4 +1,3 @@ - """ This module 'populate' defines methods to populate the database with predefined ('static') data, according to the proposed Django way: https://docs.djangoproject.com/en/2.0/topics/migrations/#data-migrations @@ -15,8 +14,11 @@ class Migration(migrations.Migration): """ -from .models.specification import Role, Datatype, Dataformat, CopyReason -from .models.scheduling import SubtaskState, SubtaskType, StationType, Algorithm, ScheduleMethod +import json +from lofar.sas.tmss.tmss.tmssapp.models.specification import Role, Datatype, Dataformat, CopyReason, TaskTemplate +from lofar.sas.tmss.tmss.tmssapp.models.scheduling import SubtaskState, SubtaskType, SubtaskTemplate, Subtask, \ + StationType, Algorithm, ScheduleMethod, Cluster, Filesystem +from lofar.common.json_utils import * def populate_choices(apps, schema_editor): ''' @@ -28,3 +30,752 @@ def populate_choices(apps, schema_editor): SubtaskState, SubtaskType, StationType, Algorithm, ScheduleMethod]: choice_class.objects.bulk_create([choice_class(value=x.value) for x in choice_class.Choices]) + +def populate_lofar_json_schemas(apps, schema_editor): + _populate_correlator_calibrator_schema() + _populate_obscontrol_schema() + _populate_stations_schema() + + _populate_example_data() + +def populate_misc(apps, schema_editor): + cluster = Cluster.objects.create(name="CEP4", location="CIT") + fs = Filesystem.objects.create(name="LustreFS", cluster=cluster, capacity=3.6e15) + + +def _populate_correlator_calibrator_schema(): + task_template_data = {"name": "correlator schema", + "description": 'correlator schema for calibrator observations', + "version": '0.1', + "schema": json.loads(''' +{ + "$id": "http://example.com/example.json", + "type": "object", + "$schema": "http://json-schema.org/draft-06/schema#", + "definitions": { + "pointing": { + "type": "object", + "additionalProperties": false, + "properties": { + "direction_type": { + "type": "string", + "title": "Reference frame", + "description": "", + "default": "J2000", + "enum": [ + "J2000", + "SUN", + "MOON", + "MERCURY", + "VENUS", + "MARS", + "JUPITER", + "SATURN", + "URANUS", + "NEPTUNE", + "PLUTO" + ] + }, + "angle1": { + "type": "number", + "title": "Angle 1", + "description": "First angle (f.e. RA)", + "default": 0 + }, + "angle2": { + "type": "number", + "title": "Angle 2", + "description": "Second angle (f.e. DEC)", + "default": 0 + } + } + } + }, + "additionalProperties": false, + "properties": { + "duration": { + "type": "number", + "title": "Duration (seconds)", + "description": "Duration of this observation", + "default": 60, + "minimum": 1 + }, + "calibrator": { + "type": "object", + "additionalProperties": false, + "default": {}, + "properties": { + "enabled": { + "type": "boolean", + "title": "Calibrator", + "description": "Replace targets by calibrators", + "default": false + }, + "autoselect": { + "type": "boolean", + "title": "Auto-select", + "description": "Auto-select calibrator based on elevation", + "default": false + }, + "pointing": { + "title": "Digital pointing", + "$ref": "#/definitions/pointing", + "default": {} + } + } + }, + "channels_per_subband": { + "type": "integer", + "title": "Channels/subband", + "description": "Number of frequency bands per subband", + "default": 64, + "minimum": 8, + "enum": [ + 8, + 16, + 32, + 64, + 128, + 256, + 512, + 1024 + ] + }, + "integration_time": { + "type": "number", + "title": "Integration time (seconds)", + "description": "Desired integration period", + "default": 1, + "minimum": 0.1 + }, + "storage_cluster": { + "type": "string", + "title": "Storage cluster", + "description": "Cluster to write output to", + "default": "CEP4", + "enum": [ + "CEP4", + "DragNet" + ] + } + } +}'''), "tags": []} + + TaskTemplate.objects.create(**task_template_data) + + +def _populate_example_data(): + try: + from datetime import datetime + from lofar.sas.tmss.tmss.tmssapp import models + from lofar.sas.tmss.test.tmss_test_data_django_models import TaskDraft_test_data, TaskBlueprint_test_data, SubtaskOutput_test_data, Dataproduct_test_data, Subtask_test_data + + task_template = models.TaskTemplate.objects.get(name='correlator schema') + task_draft_data = TaskDraft_test_data(name="my test obs", specifications_template=task_template) + task_draft = models.TaskDraft.objects.create(**task_draft_data) + + task_blueprint_data = TaskBlueprint_test_data(task_draft=task_draft) + task_blueprint = models.TaskBlueprint.objects.create(**task_blueprint_data) + + subtask_template = models.SubtaskTemplate.objects.get(name='observationcontrol schema') + specifications_doc = { + "stations": {"station_list": ["CS001", "CS002"], + "antenna_set": "HBA_DUAL", + "filter": "HBA_110_190", + "analog_pointing": {"direction_type": "J2000", + "angle1": 45, + "angle2": 20}, + "digital_pointings": [{"name": "beam01", + "pointing": {"direction_type": "J2000", + "angle1": 45, + "angle2": 20}, + "subbands": list(range(0, 16)) + }] + } + } + + specifications_doc = add_defaults_to_json_object_for_schema(specifications_doc, subtask_template.schema) + subtask_data = Subtask_test_data(task_blueprint=task_blueprint, subtask_template=subtask_template, specifications_doc=specifications_doc) + subtask = models.Subtask.objects.create(**subtask_data) + + subtask_output = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=subtask)) + for sb_nr in specifications_doc['stations']['digital_pointings'][0]['subbands']: + dataproduct: models.Dataproduct = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=subtask_output, filename="dataproduct_SB_%03d.h5"%sb_nr)) + except ImportError: + pass + +def _populate_obscontrol_schema(): + subtask_template_data = {"type": SubtaskType.objects.get(value='observation'), + "name": "observationcontrol schema", + "description": 'observationcontrol schema for observation subtask', + "version": '0.1', + "schema": json.loads(''' +{ + "$id": "http://example.com/example.json", + "type": "object", + "$schema": "http://json-schema.org/draft-06/schema#", + "definitions": { + "pointing": { + "type": "object", + "additionalProperties": false, + "properties": { + "direction_type": { + "type": "string", + "title": "Reference frame", + "description": "", + "default": "J2000", + "enum": [ + "J2000", + "SUN", + "MOON", + "MERCURY", + "VENUS", + "MARS", + "JUPITER", + "SATURN", + "URANUS", + "NEPTUNE", + "PLUTO" + ] + }, + "angle1": { + "type": "number", + "title": "Angle 1", + "description": "First angle (f.e. RA)", + "default": 0 + }, + "angle2": { + "type": "number", + "title": "Angle 2", + "description": "Second angle (f.e. DEC)", + "default": 0 + } + } + } + }, + "additionalProperties": false, + "properties": { + "stations": { + "type": "object", + "default": {}, + "properties": { + "station_list": { + "title": "Station list", + "type": "array", + "additionalItems": false, + "additionalProperties": false, + "default": ["CS001"], + "items": { + "type": "string", + "enum": [ + "CS001", + "CS002", + "CS003", + "CS004", + "CS005", + "CS006", + "CS007", + "CS011", + "CS013", + "CS017", + "CS021", + "CS024", + "CS026", + "CS028", + "CS030", + "CS031", + "CS032", + "CS101", + "CS103", + "CS201", + "CS301", + "CS302", + "CS401", + "CS501", + "RS106", + "RS205", + "RS208", + "RS210", + "RS305", + "RS306", + "RS307", + "RS310", + "RS406", + "RS407", + "RS409", + "RS503", + "RS508", + "RS509", + "DE601", + "DE602", + "DE603", + "DE604", + "DE605", + "FR606", + "SE607", + "UK608", + "DE609", + "PL610", + "PL611", + "PL612", + "IE613", + "LV614" + ], + "title": "Station", + "description": "" + }, + "minItems": 1, + "uniqueItems": true + }, + "antenna_set": { + "type": "string", + "title": "Antenna set", + "description": "Which antennas & fields to use on each station", + "default": "HBA_DUAL", + "enum": [ + "HBA_DUAL", + "HBA_DUAL_INNER", + "HBA_ONE", + "HBA_ONE_INNER", + "HBA_ZERO", + "HBA_ZERO_INNER", + "LBA_INNER", + "LBA_OUTER", + "LBA_SPARSE_EVEN", + "LBA_SPARSE_ODD", + "LBA_ALL" + ] + }, + "filter": { + "type": "string", + "title": "Band-pass filter", + "description": "Must match antenna type", + "default": "HBA_110_190", + "enum": [ + "LBA_10_90", + "LBA_30_90", + "HBA_110_190", + "HBA_210_250" + ] + }, + "analog_pointing": { + "title": "Analog pointing", + "description": "HBA only", + "$ref": "#/definitions/pointing", + "default": {} + }, + "digital_pointings": { + "type": "array", + "title": "Beams", + "additionalItems": false, + "default": [{}], + "items": { + "title": "Beam", + "headerTemplate": "{{ i0 }} - {{ self.name }}", + "type": "object", + "additionalProperties": false, + "properties": { + "name": { + "type": "string", + "title": "Name/target", + "description": "Custom identifier for this beam. Same name is same beam.", + "default": "" + }, + "pointing": { + "title": "Digital pointing", + "$ref": "#/definitions/pointing", + "default": {} + }, + "subbands": { + "type": "array", + "title": "Subband list", + "additionalItems": false, + "default": [], + "items": { + "type": "integer", + "title": "Subband", + "minimum": 0, + "maximum": 511 + } + } + } + } + } + } + }, + "COBALT": { + "type": "object", + "title": "COBALT correlator/beamformer", + "additionalProperties": false, + "default": {}, + "properties": { + "blocksize": { + "type": "integer", + "title": "Block size (samples)", + "description": "Size of blocks COBALT works on, must be a multiple of all processing requirements", + "default": 196608, + "minimum": 97656, + "maximum": 292968 + }, + "delay_compensation": { + "type": "boolean", + "title": "Apply delay compensation", + "description": "Compensate for geometric and clock differences", + "default": true + }, + "bandpass_correction": { + "type": "boolean", + "title": "Apply band-pass correction", + "description": "Compensate for differences in station sensitivity within a subband", + "default": true + }, + "correlator": { + "title": "Correlator", + "type": "object", + "default": {}, + "oneOf": [ + { + "type": "object", + "title": "Enabled", + "additionalProperties": false, + "default": {}, + "properties": { + "enabled": { + "type": "boolean", + "title": "Enabled", + "description": "", + "default": true, + "options": { + "hidden": true + }, + "enum": [ + true + ] + }, + "channels_per_subband": { + "type": "integer", + "title": "Channels/subband", + "description": "Number of frequency bands per subband", + "default": 64, + "minimum": 1, + "enum": [ + 1, + 8, + 16, + 32, + 64, + 128, + 256, + 512, + 1024 + ] + }, + "blocks_per_integration": { + "type": "integer", + "title": "Blocks per integration", + "description": "Number of blocks to integrate", + "default": 1, + "minimum": 1 + }, + "integrations_per_block": { + "type": "integer", + "title": "Integrations per block", + "description": "Number of integrations to fit within each block", + "default": 1, + "minimum": 1 + }, + "phase_centers": { + "type": "array", + "title": "Custom phase centers", + "additionalItems": false, + "default": [{}], + "items": { + "title": "Beam", + "headerTemplate": "Beam {{ self.index }}", + "type": "object", + "additionalProperties": false, + "default": {}, + "properties": { + "index": { + "type": "integer", + "title": "Station beam index", + "description": "Apply to this station beam", + "minimum": 0, + "default": 0 + }, + "pointing": { + "title": "Correlator pointing", + "$ref": "#/definitions/pointing", + "default": {} + } + } + } + } + } + }, + { + "type": "object", + "title": "Disabled", + "additionalProperties": false, + "default": {}, + "properties": {} + } + ] + } + } + } + } +}'''), + "realtime": True, + "queue": False, + "tags": []} + + SubtaskTemplate.objects.create(**subtask_template_data) + + +def _populate_stations_schema(): + task_template_data = { "name": "stations schema", + "description": 'Generic station settings and selection', + "version": '0.1', + "schema": json.loads(''' +{ + "$id": "http://example.com/example.json", + "type": "object", + "$schema": "http://json-schema.org/draft-06/schema#", + "definitions": { + "pointing": { + "type": "object", + "additionalProperties": false, + "properties": { + "direction_type": { + "type": "string", + "title": "Reference frame", + "description": "", + "default": "J2000", + "enum": [ + "J2000", + "SUN", + "MOON", + "MERCURY", + "VENUS", + "MARS", + "JUPITER", + "SATURN", + "URANUS", + "NEPTUNE", + "PLUTO" + ] + }, + "angle1": { + "type": "number", + "title": "Angle 1", + "description": "First angle (f.e. RA)", + "default": 0 + }, + "angle2": { + "type": "number", + "title": "Angle 2", + "description": "Second angle (f.e. DEC)", + "default": 0 + } + } + } + }, + "additionalProperties": false, + "properties": { + "stations": { + "title": "Station list", + "type": "array", + "default": [{}], + "oneOf": [{ + "type": "array", + "title": "Fixed list", + "additionalItems": false, + "additionalProperties": false, + "default": ["CS001"], + "items": { + "type": "string", + "enum": [ + "CS001", + "CS002", + "CS003", + "CS004", + "CS005", + "CS006", + "CS007", + "CS011", + "CS013", + "CS017", + "CS021", + "CS024", + "CS026", + "CS028", + "CS030", + "CS031", + "CS032", + "CS101", + "CS103", + "CS201", + "CS301", + "CS302", + "CS401", + "CS501", + "RS104", + "RS106", + "RS205", + "RS208", + "RS210", + "RS305", + "RS306", + "RS307", + "RS310", + "RS406", + "RS407", + "RS409", + "RS410", + "RS503", + "RS508", + "RS509", + "DE601", + "DE602", + "DE603", + "DE604", + "DE605", + "FR606", + "SE607", + "UK608", + "DE609", + "PL610", + "PL611", + "PL612", + "IE613", + "LV614" + ], + "title": "Station", + "description": "" + }, + "minItems": 1, + "uniqueItems": true + }, + { + "title": "Dynamic list", + "type": "array", + "additionalItems": false, + "default": [{}], + "items": { + "type": "object", + "title": "Station set", + "headerTemplate": "{{ self.group }}", + "additionalProperties": false, + "default": {}, + "properties": { + "group": { + "type": "string", + "title": "Group/station", + "description": "Which (group of) station(s) to select from", + "default": "ALL", + "enum": [ + "ALL", + "SUPERTERP", + "CORE", + "REMOTE", + "DUTCH", + "INTERNATIONAL" + ] + }, + "min_stations": { + "type": "integer", + "title": "Minimum nr of stations", + "description": "Number of stations to use within group/station", + "default": 1, + "minimum": 0 + } + } + } + } + ] + }, + "antenna_set": { + "type": "string", + "title": "Antenna set", + "description": "Fields & antennas to use", + "default": "HBA_DUAL", + "enum": [ + "HBA_DUAL", + "HBA_DUAL_INNER", + "HBA_JOINED", + "HBA_JOINED_INNER", + "HBA_ONE", + "HBA_ONE_INNER", + "HBA_ZERO", + "HBA_ZERO_INNER", + "LBA_INNER", + "LBA_OUTER", + "LBA_SPARSE_EVEN", + "LBA_SPARSE_ODD", + "LBA_ALL" + ] + }, + "filter": { + "type": "string", + "title": "Band-pass filter", + "description": "Must match antenna type", + "default": "HBA_110_190", + "enum": [ + "LBA_10_70", + "LBA_30_70", + "LBA_10_90", + "LBA_30_90", + "HBA_110_190", + "HBA_210_250" + ] + }, + "analog_pointing": { + "title": "Analog pointing", + "description": "HBA only", + "default": {}, + "$ref": "#/definitions/pointing" + }, + "beams": { + "type": "array", + "title": "Beams", + "additionalItems": false, + "default": [{}], + "items": { + "title": "Beam", + "headerTemplate": "{{ i0 }} - {{ self.name }}", + "type": "object", + "additionalProperties": false, + "default": {}, + "properties": { + "name": { + "type": "string", + "title": "Name/target", + "description": "Identifier for this beam", + "default": "" + }, + "digital_pointing": { + "title": "Digital pointing", + "default": {}, + "$ref": "#/definitions/pointing" + }, + "subbands": { + "type": "array", + "title": "Subband list", + "additionalItems": false, + "default": [], + "items": { + "type": "integer", + "title": "Subband", + "minimum": 0, + "maximum": 511 + } + } + } + } + } + } +}'''), + "tags": []} + + TaskTemplate.objects.create(**task_template_data) diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py index c108d4fbebefe8fa507cd505c518c213dcad55c5..7e06297f42bb8e1ff4b2dc75b272a558ec3ac8a9 100644 --- a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py @@ -2,6 +2,9 @@ This file contains the serializers (for the elsewhere defined data models) """ +import logging +logger = logging.getLogger(__name__) + from rest_framework import serializers from .. import models from .specification import RelationalHyperlinkedModelSerializer @@ -137,3 +140,31 @@ class DataproductHashSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = models.DataproductHash fields = '__all__' + + +class SubtaskSerializerJSONeditorOnline(RelationalHyperlinkedModelSerializer): + + # Create a JSON editor form to replace the simple text field based on the schema in the template that this + # draft refers to. If that fails, the JSONField remains a standard text input. + # + # Note: I feel a bit uneasy with this since I feel there should be a more straight-forward solution tham + # ...intercepting the init process to determine the schema (or template uri or so) for the style attribute. + # ...Hoewever, I did not manage to simply pass the value(!) of e.g. the template field as a style attribute + # ...of the JSONField via a SerializerMethodField or similar, although I feel that should be possible. + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + import json + + try: + schema = self.instance.specifications_template.schema + self.fields['specifications_doc'] = serializers.JSONField( + style={'template': 'josdejong_jsoneditor_widget.html', + 'schema': json.dumps(schema)}) + except Exception as e: + # todo: Shall we use one of the default templates for the init? + logger.exception('Could not determine schema, hence no fancy JSON form. Expected for list view.') + + class Meta: + model = models.Subtask + fields = '__all__' \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py index 586472fce556ef0d72ad4c3c93f2183ac7ede1a0..e3804396d7e8601705d9adac99cee64fc8dd9f4a 100644 --- a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py @@ -163,8 +163,7 @@ class TaskRelationBlueprintSerializer(serializers.HyperlinkedModelSerializer): # ----- JSON - -class TaskBlueprintSerializerReactJSONform(RelationalHyperlinkedModelSerializer): +class TaskBlueprintSerializerJSONeditorOnline(RelationalHyperlinkedModelSerializer): # Create a JSON editor form to replace the simple text field based on the schema in the template that this # blueprint refers to. If that fails, the JSONField remains a standard text input. @@ -179,57 +178,25 @@ class TaskBlueprintSerializerReactJSONform(RelationalHyperlinkedModelSerializer) import json try: - schema = self.instance.template.schema - - self.fields['requirements_doc'] = serializers.JSONField( - style={'template': 'react_jsonschema_form_widget.html', + schema = self.instance.specifications_template.schema + self.fields['specifications_doc'] = serializers.JSONField( + style={'template': 'josdejong_jsoneditor_widget.html', 'schema': json.dumps(schema)}) except: # todo: Shall we use one of the default templates for the init? print('Could not determine schema, hence no fancy JSON form. Expected for list view.') - class Meta: model = models.TaskBlueprint fields = '__all__' extra_fields = ['subtasks', 'produced_by', 'consumed_by'] -class TaskBlueprintSerializerJSONeditor(RelationalHyperlinkedModelSerializer): - - # Create a JSON editor form to replace the simple text field based on the schema in the template that this - # blueprint refers to. If that fails, the JSONField remains a standard text input. - # - # Note: I feel a bit uneasy with this since I feel there should be a more straight-forward solution tham - # ...intercepting the init process to determine the schema (or template uri or so) for the style attribute. - # ...Hoewever, I did not manage to simply pass the value(!) of e.g. the template field as a style attribute - # ...of the JSONField via a SerializerMethodField or similar, although I feel that should be possible. - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - import json - - try: - schema = self.instance.template.schema - self.fields['requirements_doc'] = serializers.JSONField( - style={'template': 'json_editor_widget.html', - 'schema': json.dumps(schema)}) - - except: - # todo: Shall we use one of the default templates for the init? - print('Could not determine schema, hence no fancy JSON form. Expected for list view.') - - - #requirements_doc = serializers.JSONField(style={'template': 'json_editor_form_2.html', 'template_schema_uri': "http://localhost:8000/task_template/2/?format=json"}) - class Meta: - model = models.TaskBlueprint - fields = '__all__' - extra_fields = ['subtasks', 'produced_by', 'consumed_by'] -class TaskBlueprintSerializerJSONeditorOnline(RelationalHyperlinkedModelSerializer): +class TaskDraftSerializerJSONeditorOnline(RelationalHyperlinkedModelSerializer): # Create a JSON editor form to replace the simple text field based on the schema in the template that this - # blueprint refers to. If that fails, the JSONField remains a standard text input. + # draft refers to. If that fails, the JSONField remains a standard text input. # # Note: I feel a bit uneasy with this since I feel there should be a more straight-forward solution tham # ...intercepting the init process to determine the schema (or template uri or so) for the style attribute. @@ -241,8 +208,8 @@ class TaskBlueprintSerializerJSONeditorOnline(RelationalHyperlinkedModelSerializ import json try: - schema = self.instance.template.schema - self.fields['requirements_doc'] = serializers.JSONField( + schema = self.instance.specifications_template.schema + self.fields['specifications_doc'] = serializers.JSONField( style={'template': 'josdejong_jsoneditor_widget.html', 'schema': json.dumps(schema)}) @@ -250,17 +217,9 @@ class TaskBlueprintSerializerJSONeditorOnline(RelationalHyperlinkedModelSerializ # todo: Shall we use one of the default templates for the init? print('Could not determine schema, hence no fancy JSON form. Expected for list view.') - - #requirements_doc = serializers.JSONField(style={'template': 'json_editor_form_2.html', 'template_schema_uri': "http://localhost:8000/task_template/2/?format=json"}) class Meta: - model = models.TaskBlueprint + model = models.TaskDraft fields = '__all__' - extra_fields = ['subtasks', 'produced_by', 'consumed_by'] - + extra_fields = ['related_task_blueprint', 'produced_by', 'consumed_by'] -class TaskBlueprintJSONSerializer(RelationalHyperlinkedModelSerializer): - class Meta: - model = models.TaskBlueprint - fields = ('requirements_doc',) - extra_fields = ['subtasks', 'produced_by', 'consumed_by'] \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/tmssapp/views.py b/SAS/TMSS/src/tmss/tmssapp/views.py new file mode 100644 index 0000000000000000000000000000000000000000..92b02f26838dea48e5152d0c43dd68ebeba4d9f5 --- /dev/null +++ b/SAS/TMSS/src/tmss/tmssapp/views.py @@ -0,0 +1,22 @@ + +from django.http import HttpResponse, JsonResponse +from django.shortcuts import get_object_or_404 +from lofar.sas.tmss.tmss.tmssapp import models +from lofar.common.json_utils import get_default_json_object_for_schema +from lofar.sas.tmss.tmss.tmssapp.adapters.parset import convert_to_parset + +def subtask_template_default_specification(request, subtask_template_pk:int): + subtask_template = get_object_or_404(models.SubtaskTemplate, pk=subtask_template_pk) + spec = get_default_json_object_for_schema(subtask_template.schema) + return JsonResponse(spec) + +def task_template_default_specification(request, task_template_pk:int): + task_template = get_object_or_404(models.TaskTemplate, pk=task_template_pk) + spec = get_default_json_object_for_schema(task_template.schema) + return JsonResponse(spec) + +def subtask_parset(request, subtask_pk:int): + subtask = get_object_or_404(models.Subtask, pk=subtask_pk) + parset = convert_to_parset(subtask) + return HttpResponse(str(parset), content_type='text/plain') + diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py index 063413aab4f459fd81d02a99acdeb981b048e0ac..bf52ee5614c0b2db59cdce9a56b79021485587c3 100644 --- a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py @@ -7,6 +7,30 @@ from rest_framework import viewsets from .lofar_viewset import LOFARViewSet from .. import models from .. import serializers +from django_filters import rest_framework as filters +from lofar.sas.tmss.tmss.tmssapp.models.scheduling import Subtask + + +class subTaskFilter(filters.FilterSet): + class Meta: + model = Subtask + fields = { + 'start_time': ['lt', 'gt'], + 'stop_time': ['lt', 'gt'], + 'cluster__name': ['exact', 'icontains'], + } + +from rest_framework.decorators import action +from django.http import HttpResponse, JsonResponse +from drf_yasg.utils import swagger_auto_schema + +from lofar.sas.tmss.tmss.tmssapp.viewsets.lofar_viewset import LOFARViewSet +from lofar.sas.tmss.tmss.tmssapp import models +from lofar.sas.tmss.tmss.tmssapp import serializers + +from lofar.common.json_utils import get_default_json_object_for_schema +from lofar.sas.tmss.tmss.tmssapp.adapters.parset import convert_to_parset + class SubtaskConnectorViewSet(LOFARViewSet): queryset = models.SubtaskConnector.objects.all() @@ -42,6 +66,25 @@ class SubtaskTemplateViewSet(LOFARViewSet): queryset = models.SubtaskTemplate.objects.all() serializer_class = serializers.SubtaskTemplateSerializer + def get_queryset(self): + queryset = models.SubtaskTemplate.objects.all() + + # query by name + name = self.request.query_params.get('name', None) + if name is not None: + return queryset.filter(name=name) + + return queryset + + @swagger_auto_schema(responses={200: 'JSON object with all the defaults from the schema filled in', + 403: 'forbidden'}, + operation_description="Get a JSON object with all the defaults from the schema filled in.") + @action(methods=['get'], detail=True) + def default_specification(self, request, pk=None): + subtask_template = get_object_or_404(models.SubtaskTemplate, pk=pk) + spec = get_default_json_object_for_schema(subtask_template.schema) + return JsonResponse(spec) + class DefaultSubtaskTemplateViewSet(LOFARViewSet): queryset = models.DefaultSubtaskTemplate.objects.all() @@ -71,6 +114,8 @@ class DataproductFeedbackTemplateViewSet(LOFARViewSet): class SubtaskViewSet(LOFARViewSet): queryset = models.Subtask.objects.all() serializer_class = serializers.SubtaskSerializer + filter_backends = (filters.DjangoFilterBackend,) + filter_class = subTaskFilter def get_queryset(self): if 'task_blueprint_pk' in self.kwargs: @@ -79,6 +124,14 @@ class SubtaskViewSet(LOFARViewSet): else: return models.Subtask.objects.all() + @swagger_auto_schema(responses={200: 'A LOFAR parset for this subtask', + 403: 'forbidden'}, + operation_description="Get a a LOFAR parset for the specifications of this subtask") + @action(methods=['get'], detail=True) + def default_specification(self, request, pk=None): + subtask = get_object_or_404(models.Subtask, pk=pk) + parset = convert_to_parset(subtask) + return HttpResponse(str(parset), content_type='text/plain') class SubtaskInputViewSet(LOFARViewSet): queryset = models.SubtaskInput.objects.all() @@ -123,3 +176,36 @@ class DataproductArchiveInfoViewSet(LOFARViewSet): class DataproductHashViewSet(LOFARViewSet): queryset = models.DataproductHash.objects.all() serializer_class = serializers.DataproductHashSerializer + + +# --- JSON + +class SubtaskViewSetJSONeditorOnline(LOFARViewSet): + queryset = models.Subtask.objects.all() + serializer_class = serializers.SubtaskSerializerJSONeditorOnline + filter_backends = (filters.DjangoFilterBackend,) + filter_class = subTaskFilter + + def get_view_name(self): # override name because DRF auto-naming dot_tmssapp_scheduling_djangoes not produce something usable here + name = "Subtask" + if self.suffix: + name += ' ' + self.suffix + return name + + def get_queryset(self): + if 'task_blueprint_pk' in self.kwargs: + task_blueprint = get_object_or_404(models.TaskBlueprint, pk=self.kwargs['task_blueprint_pk']) + return task_blueprint.subtasks.all() + else: + return models.Subtask.objects.all() + + @swagger_auto_schema(responses={200: 'A LOFAR parset for this subtask (as plain text, not json)', + 403: 'forbidden', + 404: 'Not found'}, + operation_description="Get a LOFAR parset for the specifications of this subtask") + @action(methods=['get'], detail=True) + def parset(self, request, pk=None): + subtask = get_object_or_404(models.Subtask, pk=pk) + parset = convert_to_parset(subtask) + return HttpResponse(str(parset), content_type='text/plain') + diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py index 4c31debc36ccbf9840521a52d310624f58fa7025..b9451879335d806cc5cee91068af0e0543e6d0d6 100644 --- a/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py @@ -3,10 +3,19 @@ This file contains the viewsets (based on the elsewhere defined data models and """ from django.shortcuts import get_object_or_404 -from rest_framework import viewsets -from .lofar_viewset import LOFARViewSet -from .. import models, serializers -from rest_framework.renderers import BrowsableAPIRenderer, TemplateHTMLRenderer +from django.http import JsonResponse + +from rest_framework.decorators import permission_classes +from rest_framework.permissions import IsAuthenticatedOrReadOnly, DjangoModelPermissions +from rest_framework.decorators import action + +from drf_yasg.utils import swagger_auto_schema + +from lofar.sas.tmss.tmss.tmssapp.viewsets.lofar_viewset import LOFARViewSet +from lofar.sas.tmss.tmss.tmssapp import models +from lofar.sas.tmss.tmss.tmssapp import serializers + +from lofar.common.json_utils import get_default_json_object_for_schema class TagsViewSet(LOFARViewSet): queryset = models.Tags.objects.all() @@ -37,6 +46,15 @@ class TaskTemplateViewSet(LOFARViewSet): queryset = models.TaskTemplate.objects.all() serializer_class = serializers.TaskTemplateSerializer + @swagger_auto_schema(responses={200: 'JSON object with all the defaults from the schema filled in', + 403: 'forbidden'}, + operation_description="Get a JSON object with all the defaults from the schema filled in.") + @action(methods=['get'], detail=True) + def default_specification(self, request, pk=None): + template = get_object_or_404(models.TaskTemplate, pk=pk) + spec = get_default_json_object_for_schema(template.schema) + return JsonResponse(spec) + class DefaultTaskTemplateViewSet(LOFARViewSet): queryset = models.DefaultTaskTemplate.objects.all() @@ -78,6 +96,7 @@ class TaskConnectorsViewSet(LOFARViewSet): serializer_class = serializers.TaskConnectorsSerializer +@permission_classes((DjangoModelPermissions,)) # example override of default permissions per viewset | todo: review for production class CycleViewSet(LOFARViewSet): queryset = models.Cycle.objects.all() serializer_class = serializers.CycleSerializer @@ -125,15 +144,15 @@ class SchedulingUnitBlueprintViewSet(LOFARViewSet): class TaskDraftViewSet(LOFARViewSet): - queryset = models.TaskDraft.objects.all() - serializer_class = serializers.TaskDraftSerializer + queryset = models.TaskDraft.objects.all() + serializer_class = serializers.TaskDraftSerializer - def get_queryset(self): - if 'scheduling_unit_draft_pk' in self.kwargs: - scheduling_unit_draft = get_object_or_404(models.SchedulingUnitDraft, pk=self.kwargs['scheduling_unit_draft_pk']) - return scheduling_unit_draft.task_drafts.all() - else: - return models.TaskDraft.objects.all() + def get_queryset(self): + if 'scheduling_unit_draft_pk' in self.kwargs: + scheduling_unit_draft = get_object_or_404(models.SchedulingUnitDraft, pk=self.kwargs['scheduling_unit_draft_pk']) + return scheduling_unit_draft.task_drafts.all() + else: + return models.TaskDraft.objects.all() class TaskBlueprintViewSet(LOFARViewSet): @@ -178,27 +197,29 @@ class TaskRelationBlueprintViewSet(LOFARViewSet): # --- JSON - -class TaskBlueprintViewSetReactJSONform(LOFARViewSet): - queryset = models.TaskBlueprint.objects.all() - serializer_class = serializers.TaskBlueprintSerializerReactJSONform - - -class TaskBlueprintViewSetJSONeditor(LOFARViewSet): - queryset = models.TaskBlueprint.objects.all() - serializer_class = serializers.TaskBlueprintSerializerJSONeditor - - class TaskBlueprintViewSetJSONeditorOnline(LOFARViewSet): queryset = models.TaskBlueprint.objects.all() serializer_class = serializers.TaskBlueprintSerializerJSONeditorOnline + def get_view_name(self): # override name because DRF auto-naming does not produce something usable here + name = "Task Blueprint" + if self.suffix: + name += ' ' + self.suffix + return name -# # todo: this is experimental / for demo purposes. Remove or make functional. -# class JSONEditorViewSet(LOFARViewSet): -# renderer_classes = [TemplateHTMLRenderer] -# template_name = 'react_jsonschema_form.html' -# queryset = models.TaskBlueprint.objects.all() -# serializer_class = serializers.TaskBlueprintJSONSerializer +class TaskDraftViewSetJSONeditorOnline(LOFARViewSet): + queryset = models.TaskDraft.objects.all() + serializer_class = serializers.TaskDraftSerializerJSONeditorOnline + def get_view_name(self): # override name because DRF auto-naming does not produce something usable here + name = "Task Draft" + if self.suffix: + name += ' ' + self.suffix + return name + def get_queryset(self): + if 'scheduling_unit_draft_pk' in self.kwargs: + scheduling_unit_draft = get_object_or_404(models.SchedulingUnitDraft, pk=self.kwargs['scheduling_unit_draft_pk']) + return scheduling_unit_draft.task_drafts.all() + else: + return models.TaskDraft.objects.all() \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/urls.py b/SAS/TMSS/src/tmss/urls.py index fd80e0ea5a09c91653c7fb3148b134a048ae3f9c..c60cd303450774b7c912cd3afc03e0c728b5a218 100644 --- a/SAS/TMSS/src/tmss/urls.py +++ b/SAS/TMSS/src/tmss/urls.py @@ -21,7 +21,7 @@ from django.conf.urls import url, include from django.views.generic.base import TemplateView from rest_framework import routers, permissions -from .tmssapp import viewsets, models, serializers +from .tmssapp import viewsets, models, serializers, views from rest_framework.documentation import include_docs_urls from drf_yasg.views import get_schema_view from drf_yasg import openapi @@ -88,7 +88,7 @@ router.register(r'project', viewsets.ProjectViewSet) router.register(r'scheduling_set', viewsets.SchedulingSetViewSet) router.register(r'scheduling_unit_draft', viewsets.SchedulingUnitDraftViewSet) router.register(r'scheduling_unit_blueprint', viewsets.SchedulingUnitBlueprintViewSet) -router.register(r'task_draft', viewsets.TaskDraftViewSet) +#router.register(r'task_draft', viewsets.TaskDraftViewSet) # todo: default view, re-activate or remove the JSON editor one in bottom router.register(r'task_blueprint', viewsets.TaskBlueprintViewSet) router.register(r'task_relation_draft', viewsets.TaskRelationDraftViewSet) router.register(r'task_relation_blueprint', viewsets.TaskRelationBlueprintViewSet) @@ -97,7 +97,7 @@ router.register(r'task_relation_blueprint', viewsets.TaskRelationBlueprintViewSe router.register(r'cycle/(?P<cycle_pk>[\w\-]+)/project', viewsets.ProjectViewSet) router.register(r'scheduling_set/(?P<scheduling_set_pk>\d+)/scheduling_unit_draft', viewsets.SchedulingUnitDraftViewSet) router.register(r'scheduling_unit_draft/(?P<scheduling_unit_draft_pk>\d+)/scheduling_unit_blueprint', viewsets.SchedulingUnitBlueprintViewSet) -router.register(r'scheduling_unit_draft/(?P<scheduling_unit_draft_pk>\d+)/task_draft', viewsets.TaskDraftViewSet) +#router.register(r'scheduling_unit_draft/(?P<scheduling_unit_draft_pk>\d+)/task_draft', viewsets.TaskDraftViewSet) # todo: default view, re-activate or remove the JSON editor one in bottom router.register(r'task_draft/(?P<task_draft_pk>\d+)/task_blueprint', viewsets.TaskBlueprintViewSet) router.register(r'task_draft/(?P<task_draft_pk>\d+)/task_relation_draft', viewsets.TaskRelationDraftViewSet) router.register(r'task_relation_draft/(?P<task_relation_draft_pk>\d+)/task_relation_blueprint', viewsets.TaskRelationBlueprintViewSet) @@ -123,9 +123,8 @@ router.register(r'subtask_input_selection_template', viewsets.SubtaskInputSelect router.register(r'dataproduct_feedback_template', viewsets.DataproductFeedbackTemplateViewSet) # instances -router.register(r'subtask', viewsets.SubtaskViewSet) +#router.register(r'subtask', viewsets.SubtaskViewSet) # todo: default view, re-activate or remove the JSON editor one in bottom router.register(r'dataproduct', viewsets.DataproductViewSet) -#router.register(r'dataproduct_relation', viewsets.DataproductRelationViewSet) router.register(r'subtask_input', viewsets.SubtaskInputViewSet) router.register(r'subtask_output', viewsets.SubtaskOutputViewSet) router.register(r'antenna_set', viewsets.AntennaSetViewSet) @@ -140,12 +139,13 @@ router.register(r'task_relation_blueprint', viewsets.TaskRelationBlueprintViewSe # --- # JSON -router.register(r'task_blueprint_A', viewsets.TaskBlueprintViewSetJSONeditor) -router.register(r'task_blueprint_B', viewsets.TaskBlueprintViewSetReactJSONform) -router.register(r'task_blueprint_C', viewsets.TaskBlueprintViewSetJSONeditorOnline) -#router.register(r'json_editor', viewsets.JSONEditorViewSet) +router.register(r'task_draft', viewsets.TaskDraftViewSetJSONeditorOnline) +router.register(r'scheduling_unit_draft/(?P<scheduling_unit_draft_pk>\d+)/task_draft', viewsets.TaskDraftViewSetJSONeditorOnline) +router.register(r'subtask', viewsets.SubtaskViewSetJSONeditorOnline) urlpatterns.extend(router.urls) # prefix everything for proxy -urlpatterns = [url(r'^api/', include(urlpatterns)), url(r'^oidc/', include('mozilla_django_oidc.urls')),] \ No newline at end of file +urlpatterns = [url(r'^api/', include(urlpatterns)), url(r'^oidc/', include('mozilla_django_oidc.urls')),] + + diff --git a/SAS/TMSS/test/CMakeLists.txt b/SAS/TMSS/test/CMakeLists.txt index 296452d581ff427d0e7c57919f01a28328943640..8ef07b8e1060f819cd8cc1af85f4c7040a83f823 100644 --- a/SAS/TMSS/test/CMakeLists.txt +++ b/SAS/TMSS/test/CMakeLists.txt @@ -15,6 +15,8 @@ if(BUILD_TESTING) ldap_test_service.py tmss_database_unittest_setup.py tmss_test_environment_unittest_setup.py + tmss_test_data_django_models.py + tmss_test_data_rest.py DESTINATION lofar/sas/tmss/test) lofar_add_test(t_tmss_test_database) @@ -22,9 +24,8 @@ if(BUILD_TESTING) lofar_add_test(t_tmssapp_specification_functional) lofar_add_test(t_tmssapp_scheduling_django) lofar_add_test(t_tmssapp_scheduling_functional) + lofar_add_test(t_subtask_validation) + lofar_add_test(t_tmssapp_specification_permissions) # set_tests_properties(t_tmssapp_scheduling_functional PROPERTIES TIMEOUT 300) endif() - - - diff --git a/SAS/TMSS/test/ldap_test_service.py b/SAS/TMSS/test/ldap_test_service.py index 11dd386437c3840a794e35cea90c4945e858c846..59eb0b60c041495333804830b90075ed0d472baa 100644 --- a/SAS/TMSS/test/ldap_test_service.py +++ b/SAS/TMSS/test/ldap_test_service.py @@ -49,7 +49,6 @@ class TestLDAPServer(): @property def dbcreds(self): - # return dbcredentials.DBCredentials().get('tmss_ldap_test') return self._tmp_creds.dbcreds def start(self): @@ -72,13 +71,40 @@ class TestLDAPServer(): {'objectclass': 'organizationUnit', 'dn': 'ou=Users,o=lofar,c=eu', 'attributes': {'ou': 'Users'}}, + {'objectclass': 'lofarPerson', + 'dn': 'cn=paulus,ou=users,o=lofar,c=eu', + 'attributes': {'cn': 'paulus', + 'userPassword': 'pauluspass', + 'mail': 'paulus@boskabouter.nl', + 'givenName': 'Paulus', + 'sn': 'Boskabouter', + 'lofarPersonSystemrole': 'cn=support,ou=Roles,o=lofar,c=eu'}}, + {'objectclass': 'lofarPerson', + 'dn': 'cn=paula,ou=users,o=lofar,c=eu', + 'attributes': {'cn': 'paula', + 'userPassword': 'paulapass', + 'mail': 'paulus@boskabouter.nl', + 'givenName': 'Paulus', + 'sn': 'Boskabouter', + 'lofarPersonSystemrole': 'cn=user,ou=Roles,o=lofar,c=eu'}}, {'objectclass': 'lofarPerson', 'dn': 'cn=%s,ou=users,o=lofar,c=eu' % self.dbcreds.user, 'attributes': {'cn': self.dbcreds.user, 'userPassword': self.dbcreds.password, - 'mail': 'gen@eric.nl', - 'givenName': 'Gen', - 'sn': 'Eric'}}]}) + 'mail': '%s@lofar.test' % self.dbcreds.user, + 'givenName': self.dbcreds.user, + 'sn': 'lofar_test'}}, + {'objectclass': 'organizationUnit', + 'dn': 'ou=Roles,o=lofar,c=eu', + 'attributes': {'ou': 'Roles'}}, + {'objectclass': 'lofarSystemrole', + 'dn': 'cn=user,ou=roles,o=lofar,c=eu', + 'attributes': {'cn': 'user'}}, + {'objectclass': 'lofarSystemrole', + 'dn': 'cn=support,ou=roles,o=lofar,c=eu', + 'attributes': {'cn': 'support'}}, + ] + }) self._server.start() os.environ["TMSS_LDAPCREDENTIALS"] = self.dbcreds_id @@ -106,7 +132,6 @@ class TestLDAPServer(): connection.search(search_base='o=lofar,c=eu', search_filter='(objectclass=*)') logger.info(connection.response) - def main(): """ Start an isolated LDAP server it alive until interrupted by Ctrl-C. diff --git a/SAS/TMSS/test/oidc/docker-test-mozilla-django-oidc/dockerfiles/oidc_testprovider b/SAS/TMSS/test/oidc/docker-test-mozilla-django-oidc/dockerfiles/oidc_testprovider index 464d90658afcef9c571ddcd4e6fb1f64bad37fe8..e93084d98a19041a1e16b974caee552d6e8ba5d3 100644 --- a/SAS/TMSS/test/oidc/docker-test-mozilla-django-oidc/dockerfiles/oidc_testprovider +++ b/SAS/TMSS/test/oidc/docker-test-mozilla-django-oidc/dockerfiles/oidc_testprovider @@ -1,7 +1,10 @@ FROM python:3.6 EXPOSE 8088 -COPY testprovider /code/ WORKDIR /code +COPY testprovider/requirements.txt /code/ RUN pip install -r requirements.txt + +COPY testprovider /code/ + CMD ./bin/run.sh diff --git a/SAS/TMSS/test/oidc/docker-test-mozilla-django-oidc/testprovider/bin/run.sh b/SAS/TMSS/test/oidc/docker-test-mozilla-django-oidc/testprovider/bin/run.sh index 7008ec74df50c3a95634872328fa89ff59d98f42..feb6f884154e0df78810046aad8659ba0cf531a4 100755 --- a/SAS/TMSS/test/oidc/docker-test-mozilla-django-oidc/testprovider/bin/run.sh +++ b/SAS/TMSS/test/oidc/docker-test-mozilla-django-oidc/testprovider/bin/run.sh @@ -1,7 +1,8 @@ #!/bin/sh +if [ -n "$OIDC_OP_CALLBACK_ENDPOINT" ]; then sed -i "s|http://localhost:8008/oidc/callback/|$OIDC_OP_CALLBACK_ENDPOINT|" fixtures.json; fi + python manage.py migrate --noinput python manage.py loaddata fixtures.json python ./manage.py createuser paulus pauluspass paulus@localhost python manage.py runserver 0.0.0.0:8088 - diff --git a/SAS/TMSS/test/oidc/docker-test-mozilla-django-oidc/testprovider/oidcprovider/management/commands/createuser.py b/SAS/TMSS/test/oidc/docker-test-mozilla-django-oidc/testprovider/oidcprovider/management/commands/createuser.py index 11fc496c521cf65641dbe5bc4e10509e379f231c..76eb780c1f5247a94400bdb7421761416ad20548 100644 --- a/SAS/TMSS/test/oidc/docker-test-mozilla-django-oidc/testprovider/oidcprovider/management/commands/createuser.py +++ b/SAS/TMSS/test/oidc/docker-test-mozilla-django-oidc/testprovider/oidcprovider/management/commands/createuser.py @@ -17,11 +17,13 @@ class Command(BaseCommand): parser.add_argument("username", help="account username") parser.add_argument("password", help="account password") parser.add_argument("email", help="account email address") + parser.add_argument("groups", help="account user groups", nargs='*') def handle(self, **options): username = options["username"] password = options["password"] email = options["email"] + groups = options["groups"] if User.objects.filter(username=username).exists(): self.stdout.write("User {} already exists.".format(username)) @@ -29,5 +31,6 @@ class Command(BaseCommand): user = User.objects.create(username=username, email=email) user.set_password(password) + user.groups.set(groups) user.save() self.stdout.write("User {} created.".format(username)) diff --git a/SAS/TMSS/test/t_parset_adapter.py b/SAS/TMSS/test/t_parset_adapter.py new file mode 100755 index 0000000000000000000000000000000000000000..d68d9668b2760c835854e6ae6da981ad979a7657 --- /dev/null +++ b/SAS/TMSS/test/t_parset_adapter.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2018 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id: $ + +import os +import unittest +import requests + +import logging +logger = logging.getLogger(__name__) +logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) + +# Do Mandatory setup step: +# use setup/teardown magic for tmss test database, ldap server and django server +# (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_test_environment_unittest_setup module) +from lofar.sas.tmss.test.tmss_test_environment_unittest_setup import * + +from lofar.sas.tmss.test.tmss_test_data_django_models import * + +# import and setup rest test data creator +from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator +rest_data_creator = TMSSRESTTestDataCreator(BASE_URL, AUTH) + +from lofar.sas.tmss.tmss.tmssapp import models +from lofar.sas.tmss.tmss.tmssapp.adapters.parset import convert_to_parset +from lofar.common.json_utils import get_default_json_object_for_schema + +class ParsetAdapterTest(unittest.TestCase): + def test_01(self): + subtask_template = models.SubtaskTemplate.objects.get(name='obscontrol schema') + specifications_doc = get_default_json_object_for_schema(subtask_template.schema) + for dp in specifications_doc['stations']['digital_pointings']: + dp['subbands'] = list(range(8)) + subtask_data = Subtask_test_data(subtask_template, specifications_doc) + subtask:models.Subtask = models.Subtask.objects.create(**subtask_data) + subtask_output = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=subtask)) + dataproduct:models.Dataproduct = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=subtask_output)) + + parset = convert_to_parset(subtask) + +if __name__ == "__main__": + os.environ['TZ'] = 'UTC' + unittest.main() diff --git a/SAS/TMSS/test/t_parset_adapter.run b/SAS/TMSS/test/t_parset_adapter.run new file mode 100755 index 0000000000000000000000000000000000000000..0ec442af1b13271e84299c07fc7966bc7560ac82 --- /dev/null +++ b/SAS/TMSS/test/t_parset_adapter.run @@ -0,0 +1,6 @@ +#!/bin/bash + +# Run the unit test +source python-coverage.sh +python_coverage_test "*tmss*" t_parset_adapter.py + diff --git a/SAS/TMSS/test/t_parset_adapter.sh b/SAS/TMSS/test/t_parset_adapter.sh new file mode 100755 index 0000000000000000000000000000000000000000..d75b13ea9fd228e65ff103625be8bda753a84b6a --- /dev/null +++ b/SAS/TMSS/test/t_parset_adapter.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +./runctest.sh t_parset_adapter \ No newline at end of file diff --git a/SAS/TMSS/test/t_subtask_validation.py b/SAS/TMSS/test/t_subtask_validation.py new file mode 100755 index 0000000000000000000000000000000000000000..53bcd501117f3ca079e2016d43a01de7dbeb0e9a --- /dev/null +++ b/SAS/TMSS/test/t_subtask_validation.py @@ -0,0 +1,205 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2018 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id: $ + +import os +import unittest +import requests + +import logging +logger = logging.getLogger(__name__) +logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) + +# Do Mandatory setup step: +# use setup/teardown magic for tmss test database, ldap server and django server +# (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_test_environment_unittest_setup module) +from lofar.sas.tmss.test.tmss_test_environment_unittest_setup import * + + +from lofar.sas.tmss.test.tmss_test_data_django_models import * + +# import and setup rest test data creator +from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator +rest_data_creator = TMSSRESTTestDataCreator(BASE_URL, AUTH) + +from lofar.sas.tmss.tmss.tmssapp import models +from lofar.sas.tmss.tmss.exceptions import SpecificationException + +import requests + +class SubtaskValidationTest(unittest.TestCase): + @staticmethod + def create_subtask_template(schema): + subtask_template_data = SubtaskTemplate_test_data(schema=schema) + return models.SubtaskTemplate.objects.create(**subtask_template_data) + + def test_validate_simple_string_schema_with_valid_specification(self): + subtask_template = self.create_subtask_template('{"type": "string"}') + specifications_doc = '"a random string"' + subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc) + + subtask = models.Subtask.objects.create(**subtask_data) + self.assertIsNotNone(subtask) + + def test_validate_simple_string_schema_with_invalid_specification(self): + subtask_template = self.create_subtask_template('{"type": "string"}') + specifications_doc = '42' + subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc) + + with self.assertRaises(SpecificationException): + models.Subtask.objects.create(**subtask_data) + + + def test_validate_simple_string_schema_when_updating_valid_to_invalid_specification(self): + subtask_template = self.create_subtask_template('{"type": "string"}') + valid_spec = '"a random string"' + subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=valid_spec) + + subtask = models.Subtask.objects.create(**subtask_data) + self.assertIsNotNone(subtask) + + # updating the specification with an invalid should fail + invalid_spec = '42' + with self.assertRaises(SpecificationException): + subtask.specifications_doc = invalid_spec + subtask.save() + self.assertEqual(invalid_spec, subtask.specifications_doc) + + # revert invalid update, and check + subtask.refresh_from_db() + self.assertEqual(valid_spec, subtask.specifications_doc) + + def test_validate_simple_string_schema_with_valid_specification(self): + subtask_template = self.create_subtask_template('{"type": "string"}') + specifications_doc = '"a random string"' + subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc) + + subtask = models.Subtask.objects.create(**subtask_data) + self.assertIsNotNone(subtask) + + def test_validate_flawed_json_schema(self): + subtask_template = self.create_subtask_template('{ this is not a json object }') + specifications_doc = '"a random string"' + subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc) + + with self.assertRaises(SpecificationException) as context: + models.Subtask.objects.create(**subtask_data) + self.assertTrue('invalid json' in str(context.exception).lower()) + + def test_validate_flawed_json_specification(self): + subtask_template = self.create_subtask_template('{"type": "string"}') + specifications_doc = '{ this is not a json object }' + subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc) + + with self.assertRaises(SpecificationException) as context: + models.Subtask.objects.create(**subtask_data) + self.assertTrue('invalid json' in str(context.exception).lower()) + + def test_validate_correlator_schema_with_valid_specification(self): + # fetch correlator_schema for Dupplo UC1 which should be in the initially populated database + subtask_template = models.SubtaskTemplate.objects.get(name='observationcontrol schema') + self.assertIsNotNone(subtask_template) + + specifications_doc = get_default_json_object_for_schema(subtask_template.schema) + subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc) + + subtask = models.Subtask.objects.create(**subtask_data) + self.assertIsNotNone(subtask) + + def test_validate_correlator_schema_with_invalid_specification(self): + # fetch correlator_schema for Dupplo UC1 which should be in the initially populated database + subtask_template = models.SubtaskTemplate.objects.get(name='observationcontrol schema') + self.assertIsNotNone(subtask_template) + + # test with invalid json + with self.assertRaises(SpecificationException) as context: + subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc="bogus spec") + models.Subtask.objects.create(**subtask_data) + self.assertTrue('invalid json' in str(context.exception).lower()) + + # test with valid json, but not according to schema + with self.assertRaises(SpecificationException) as context: + specifications_doc = '''{ "duration": -10 }''' + subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc) + models.Subtask.objects.create(**subtask_data) + self.assertTrue('-10' in str(context.exception).lower()) + + def test_validate_simple_string_schema_with_valid_specification_via_rest(self): + template = rest_data_creator.SubtaskTemplate(schema='{"type": "string"}') + schema_url = rest_data_creator.post_data_and_get_url(template, '/subtask_template/') + + specifications_doc = '"a random string"' + subtask_test_data = rest_data_creator.Subtask(specifications_template_url=schema_url, specifications_doc=specifications_doc) + + # POST and GET a new item and assert correctness + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', subtask_test_data, 201, subtask_test_data) + url = r_dict['url'] + GET_and_assert_expected_response(self, url, 200, subtask_test_data) + + def test_validate_simple_string_schema_with_invalid_specification_via_rest(self): + template = rest_data_creator.SubtaskTemplate(schema='{"type": "string"}') + schema_url = rest_data_creator.post_data_and_get_url(template, '/subtask_template/') + + specifications_doc = 42 # not a string, so not compliant with schema + subtask_test_data = rest_data_creator.Subtask(specifications_template_url=schema_url, specifications_doc=specifications_doc) + + # POST and GET a new item and assert correctness + response_content = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', subtask_test_data, 500, {}) + + self.assertTrue("SpecificationException at /api/subtask/" in response_content) + self.assertTrue("42 is not of type 'string'" in response_content) + + def test_validate_correlator_schema_with_valid_specification_via_rest(self): + # fetch correlator_schema for Dupplo UC1 which should be in the initially populated database + response = requests.get(BASE_URL+"/subtask_template/", {"format": "json", "name": "observationcontrol schema"}, auth=AUTH) + self.assertEqual(200, response.status_code) + json_response = response.json() + self.assertEqual(1, json_response.get('count')) + + template = json_response['results'][0] + schema_url = template['url'] + schema = template['schema'] + + specifications_doc = get_default_json_object_for_schema(schema) + subtask_test_data = rest_data_creator.Subtask(specifications_template_url=schema_url, specifications_doc=specifications_doc) + + # POST and GET a new item and assert correctness + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', subtask_test_data, 201, subtask_test_data) + url = r_dict['url'] + GET_and_assert_expected_response(self, url, 200, subtask_test_data) + + def test_validate_correlator_schema_with_invalid_specification_via_rest(self): + # fetch correlator_schema for Dupplo UC1 which should be in the initially populated database + response = requests.get(BASE_URL+"/subtask_template/", {"format": "json", "name": "observationcontrol schema"}, auth=AUTH) + self.assertEqual(200, response.status_code) + json_response = response.json() + self.assertEqual(1, json_response.get('count')) + + template = json_response['results'][0] + schema_url = template['url'] + + specifications_doc = "bogus spec" + subtask_test_data = rest_data_creator.Subtask(specifications_template_url=schema_url, specifications_doc=specifications_doc) + + POST_and_assert_expected_response(self, BASE_URL + '/subtask/', subtask_test_data, 500, {}) + +if __name__ == "__main__": + os.environ['TZ'] = 'UTC' + unittest.main() diff --git a/SAS/TMSS/test/t_subtask_validation.run b/SAS/TMSS/test/t_subtask_validation.run new file mode 100755 index 0000000000000000000000000000000000000000..003e3a3835db2f9e6d2df5fccdff8b1e4bc53a40 --- /dev/null +++ b/SAS/TMSS/test/t_subtask_validation.run @@ -0,0 +1,6 @@ +#!/bin/bash + +# Run the unit test +source python-coverage.sh +python_coverage_test "*tmss*" t_subtask_validation.py + diff --git a/SAS/TMSS/test/t_subtask_validation.sh b/SAS/TMSS/test/t_subtask_validation.sh new file mode 100755 index 0000000000000000000000000000000000000000..794215da61eb5518f882f78504cd28ec840e26d9 --- /dev/null +++ b/SAS/TMSS/test/t_subtask_validation.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +./runctest.sh t_subtask_validation \ No newline at end of file diff --git a/SAS/TMSS/test/t_tmss_test_database.py b/SAS/TMSS/test/t_tmss_test_database.py index 418ad85448359e3b7791e7059cd2e0c83a73dfe3..3e99b742d72f12c0f4456f6191aff3f05b7153df 100755 --- a/SAS/TMSS/test/t_tmss_test_database.py +++ b/SAS/TMSS/test/t_tmss_test_database.py @@ -46,14 +46,15 @@ class TMSSPostgresTestMixinTestCase(TMSSPostgresTestMixin, unittest.TestCase): def test_db_basics(self): '''Can we do some simple plain sql queries?''' with PostgresDatabaseConnection(self.dbcreds) as db: - self.assertEqual(0, db.executeQuery("SELECT COUNT(*) FROM tmssapp_cycle;", fetch=FETCH_ONE)['count']) + cycle_count = db.executeQuery("SELECT COUNT(*) FROM tmssapp_cycle;", fetch=FETCH_ONE)['count'] + self.assertGreaterEqual(cycle_count, 0) now = datetime.utcnow() db.executeQuery('''INSERT INTO tmssapp_cycle VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);''', qargs=([], now, now, "my_description", "my_name", now, now, 0, 1, 2, 3)) - self.assertEqual(1, db.executeQuery("SELECT COUNT(*) FROM tmssapp_cycle;", fetch=FETCH_ONE)['count']) + self.assertEqual(cycle_count+1, db.executeQuery("SELECT COUNT(*) FROM tmssapp_cycle;", fetch=FETCH_ONE)['count']) if __name__ == "__main__": diff --git a/SAS/TMSS/test/t_tmssapp_scheduling_django.py b/SAS/TMSS/test/t_tmssapp_scheduling_django.py index 5218d25ba70b5f37816b71b5efc39d8d0e995da0..28bdfefadbe5b2c11567f73c26fc8aa8bd167306 100755 --- a/SAS/TMSS/test/t_tmssapp_scheduling_django.py +++ b/SAS/TMSS/test/t_tmssapp_scheduling_django.py @@ -37,6 +37,8 @@ logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=loggin # (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_database_unittest_setup module) from lofar.sas.tmss.test.tmss_database_unittest_setup import * +from lofar.sas.tmss.test.tmss_test_data_django_models import * + from django.db.utils import IntegrityError # TODO: rest API testing should be moved out of this test module. diff --git a/SAS/TMSS/test/t_tmssapp_scheduling_functional.py b/SAS/TMSS/test/t_tmssapp_scheduling_functional.py index 492a70decf961c9e8a558bb7761e833734b59309..ba1681a9c1983c8af828c608eb1b7b24fb25f2db 100755 --- a/SAS/TMSS/test/t_tmssapp_scheduling_functional.py +++ b/SAS/TMSS/test/t_tmssapp_scheduling_functional.py @@ -27,6 +27,7 @@ # todo: behavior in a controlled way. # todo: We should probably also fully test behavior wrt mandatory and nullable fields. +from datetime import datetime, timedelta import unittest import logging logger = logging.getLogger(__name__) @@ -36,6 +37,14 @@ logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=loggin # use setup/teardown magic for tmss test database, ldap server and django server # (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_test_environment_unittest_setup module) from lofar.sas.tmss.test.tmss_test_environment_unittest_setup import * +from lofar.sas.tmss.test.tmss_test_data_django_models import * +from lofar.sas.tmss.tmss.tmssapp import models + +# import and setup test data creator +from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator +test_data_creator = TMSSRESTTestDataCreator(BASE_URL, AUTH) + +DJANGO_TIMEFORMAT = "%Y-%m-%dT%H:%M:%S" class SubtaskTemplateTestCase(unittest.TestCase): @@ -48,7 +57,7 @@ class SubtaskTemplateTestCase(unittest.TestCase): GET_and_assert_expected_response(self, BASE_URL + '/subtask_template/1234321/', 404, {}) def test_subtask_template_POST_and_GET(self): - st_test_data = SubtaskTemplate_test_data() + st_test_data = test_data_creator.SubtaskTemplate() # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_template/', st_test_data, 201, st_test_data) @@ -56,12 +65,12 @@ class SubtaskTemplateTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, st_test_data) def test_subtask_template_PUT_invalid_raises_error(self): - st_test_data = SubtaskTemplate_test_data() + st_test_data = test_data_creator.SubtaskTemplate() PUT_and_assert_expected_response(self, BASE_URL + '/subtask_template/9876789876/', st_test_data, 404, {}) def test_subtask_template_PUT(self): - st_test_data = SubtaskTemplate_test_data(name="the one") - st_test_data2 = SubtaskTemplate_test_data(name="the other") + st_test_data = test_data_creator.SubtaskTemplate(name="the one") + st_test_data2 = test_data_creator.SubtaskTemplate(name="the other") # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_template/', st_test_data, 201, st_test_data) @@ -73,7 +82,7 @@ class SubtaskTemplateTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, st_test_data2) def test_subtask_template_PATCH(self): - st_test_data = SubtaskTemplate_test_data() + st_test_data = test_data_creator.SubtaskTemplate() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_template/', st_test_data, 201, st_test_data) @@ -92,7 +101,7 @@ class SubtaskTemplateTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_subtask_template_DELETE(self): - st_test_data = SubtaskTemplate_test_data() + st_test_data = test_data_creator.SubtaskTemplate() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_template/', st_test_data, 201, st_test_data) @@ -103,7 +112,7 @@ class SubtaskTemplateTestCase(unittest.TestCase): DELETE_and_assert_gone(self, url) def test_subtask_template_PROTECT_behavior_on_type_choice_deleted(self): - st_test_data = SubtaskTemplate_test_data() + st_test_data = test_data_creator.SubtaskTemplate() # create dependency that is safe to delete (enums are not populated / re-established between tests) type_data = {'value': 'kickme'} @@ -134,7 +143,7 @@ class DataproductSpecificationsTemplateTestCase(unittest.TestCase): GET_and_assert_expected_response(self, BASE_URL + '/dataproduct_specifications_template/1234321/', 404, {}) def test_dataproduct_specifications_template_POST_and_GET(self): - dst_test_data = DataproductSpecificationsTemplate_test_data() + dst_test_data = test_data_creator.DataproductSpecificationsTemplate() # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_specifications_template/', dst_test_data, 201, dst_test_data) @@ -142,13 +151,13 @@ class DataproductSpecificationsTemplateTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, dst_test_data) def test_dataproduct_specifications_template_PUT_invalid_raises_error(self): - dst_test_data = DataproductSpecificationsTemplate_test_data() + dst_test_data = test_data_creator.DataproductSpecificationsTemplate() PUT_and_assert_expected_response(self, BASE_URL + '/dataproduct_specifications_template/9876789876/', dst_test_data, 404, {}) def test_dataproduct_specifications_template_PUT(self): - dst_test_data = DataproductSpecificationsTemplate_test_data(name="the one") - dst_test_data2 = DataproductSpecificationsTemplate_test_data(name="the other") + dst_test_data = test_data_creator.DataproductSpecificationsTemplate(name="the one") + dst_test_data2 = test_data_creator.DataproductSpecificationsTemplate(name="the other") # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_specifications_template/', dst_test_data, 201, dst_test_data) @@ -160,7 +169,7 @@ class DataproductSpecificationsTemplateTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, dst_test_data2) def test_dataproduct_specifications_template_PATCH(self): - dst_test_data = DataproductSpecificationsTemplate_test_data() + dst_test_data = test_data_creator.DataproductSpecificationsTemplate() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_specifications_template/', dst_test_data, 201, dst_test_data) @@ -178,7 +187,7 @@ class DataproductSpecificationsTemplateTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_dataproduct_specifications_template_DELETE(self): - dst_test_data = DataproductSpecificationsTemplate_test_data() + dst_test_data = test_data_creator.DataproductSpecificationsTemplate() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_specifications_template/', dst_test_data, 201, dst_test_data) @@ -201,19 +210,19 @@ class SubtaskInputSelectionTemplateTestCase(unittest.TestCase): class DefaultSubtaskTemplatesTestCase(unittest.TestCase): def test_default_subtask_template_POST(self): - template_url = post_data_and_get_url(SubtaskTemplate_test_data(), '/subtask_template/') - dst_test_data = DefaultSubtaskTemplates_test_data(template_url=template_url) + template_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskTemplate(), '/subtask_template/') + dst_test_data = test_data_creator.DefaultSubtaskTemplates(template_url=template_url) POST_and_assert_expected_response(self, BASE_URL + '/default_subtask_template/', dst_test_data, 201, dst_test_data) def test_default_dataproduct_specifications_template_POST(self): - template_url = post_data_and_get_url(DataproductSpecificationsTemplate_test_data(), '/dataproduct_specifications_template/') - dst_test_data = DefaultSubtaskTemplates_test_data(template_url=template_url) + template_url = test_data_creator.post_data_and_get_url(test_data_creator.DataproductSpecificationsTemplate(), '/dataproduct_specifications_template/') + dst_test_data = test_data_creator.DefaultSubtaskTemplates(template_url=template_url) POST_and_assert_expected_response(self, BASE_URL + '/default_dataproduct_specifications_template/', dst_test_data, 201, dst_test_data) def test_default_subtask_template_PROTECT_behavior_on_template_deleted(self): - st_test_data = SubtaskTemplate_test_data() - template_url = post_data_and_get_url(st_test_data, '/subtask_template/') - dst_test_data = DefaultSubtaskTemplates_test_data(template_url=template_url) + st_test_data = test_data_creator.SubtaskTemplate() + template_url = test_data_creator.post_data_and_get_url(st_test_data, '/subtask_template/') + dst_test_data = test_data_creator.DefaultSubtaskTemplates(template_url=template_url) # POST with dependency POST_and_assert_expected_response(self, BASE_URL + '/default_subtask_template/', dst_test_data, 201, dst_test_data) @@ -226,9 +235,9 @@ class DefaultSubtaskTemplatesTestCase(unittest.TestCase): GET_and_assert_expected_response(self, template_url, 200, st_test_data) def test_default_dataproduct_specifications_template_PROTECT_behavior_on_template_deleted(self): - dpst_test_data = DataproductSpecificationsTemplate_test_data() - template_url = post_data_and_get_url(dpst_test_data, '/dataproduct_specifications_template/') - dst_test_data = DefaultSubtaskTemplates_test_data(template_url=template_url) + dpst_test_data = test_data_creator.DataproductSpecificationsTemplate() + template_url = test_data_creator.post_data_and_get_url(dpst_test_data, '/dataproduct_specifications_template/') + dst_test_data = test_data_creator.DefaultSubtaskTemplates(template_url=template_url) # POST with dependency POST_and_assert_expected_response(self, BASE_URL + '/default_dataproduct_specifications_template/', dst_test_data, 201, dst_test_data) @@ -251,21 +260,24 @@ class SubtaskTestCase(unittest.TestCase): GET_and_assert_expected_response(self, BASE_URL + '/subtask/1234321/', 404, {}) def test_subtask_POST_and_GET(self): - st_test_data = Subtask_test_data() + st_test_data = test_data_creator.Subtask() # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data) url = r_dict['url'] GET_and_assert_expected_response(self, url, 200, st_test_data) + minimium_subtaskid = 2000000 + subtask_id = url.split("subtask/")[1].replace("/","") + self.assertGreaterEqual(int(subtask_id), minimium_subtaskid) def test_subtask_PUT_invalid_raises_error(self): - st_test_data = Subtask_test_data() + st_test_data = test_data_creator.Subtask() PUT_and_assert_expected_response(self, BASE_URL + '/subtask/9876789876/', st_test_data, 404, {}) def test_subtask_PUT(self): - st_test_data = Subtask_test_data() - st_test_data2 = Subtask_test_data() + st_test_data = test_data_creator.Subtask() + st_test_data2 = test_data_creator.Subtask() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data) @@ -277,7 +289,7 @@ class SubtaskTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, st_test_data2) def test_subtask_PATCH(self): - st_test_data = Subtask_test_data() + st_test_data = test_data_creator.Subtask() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data) @@ -293,7 +305,7 @@ class SubtaskTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_subtask_DELETE(self): - st_test_data = Subtask_test_data() + st_test_data = test_data_creator.Subtask() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data) @@ -304,7 +316,7 @@ class SubtaskTestCase(unittest.TestCase): DELETE_and_assert_gone(self, url) def test_subtask_PROTECT_behavior_on_state_choice_deleted(self): - st_test_data = Subtask_test_data() + st_test_data = test_data_creator.Subtask() # create dependency that is safe to delete (enums are not populated / re-established between tests) state_data = {'value': 'kickme'} @@ -325,9 +337,9 @@ class SubtaskTestCase(unittest.TestCase): GET_and_assert_expected_response(self, state_url, 200, state_data) def test_subtask_SET_NULL_behavior_on_task_blueprint_deleted(self): - tbp_test_data = TaskBlueprint_test_data() - task_blueprint_url = post_data_and_get_url(tbp_test_data, '/task_blueprint/') - st_test_data = Subtask_test_data(task_blueprint_url=task_blueprint_url) + tbp_test_data = test_data_creator.TaskBlueprint() + task_blueprint_url = test_data_creator.post_data_and_get_url(tbp_test_data, '/task_blueprint/') + st_test_data = test_data_creator.Subtask(task_blueprint_url=task_blueprint_url) # POST new item and verify url = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data)['url'] @@ -342,9 +354,9 @@ class SubtaskTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_subtask_PROTECT_behavior_on_template_deleted(self): - stt_test_data = SubtaskTemplate_test_data() - specifications_template_url = post_data_and_get_url(stt_test_data, '/subtask_template/') - st_test_data = Subtask_test_data(specifications_template_url=specifications_template_url) + stt_test_data = test_data_creator.SubtaskTemplate() + specifications_template_url = test_data_creator.post_data_and_get_url(stt_test_data, '/subtask_template/') + st_test_data = test_data_creator.Subtask(specifications_template_url=specifications_template_url) # POST new item and verify url = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data)['url'] @@ -368,7 +380,7 @@ class DataproductTestCase(unittest.TestCase): GET_and_assert_expected_response(self, BASE_URL + '/dataproduct/1234321/', 404, {}) def test_dataproduct_POST_and_GET(self): - dp_test_data = Dataproduct_test_data() + dp_test_data = test_data_creator.Dataproduct() # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct/', dp_test_data, 201, dp_test_data) @@ -376,13 +388,13 @@ class DataproductTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, dp_test_data) def test_dataproduct_PUT_invalid_raises_error(self): - dp_test_data = Dataproduct_test_data() + dp_test_data = test_data_creator.Dataproduct() PUT_and_assert_expected_response(self, BASE_URL + '/dataproduct/9876789876/', dp_test_data, 404, {}) def test_dataproduct_PUT(self): - dp_test_data = Dataproduct_test_data() - dp_test_data2 = Dataproduct_test_data() + dp_test_data = test_data_creator.Dataproduct() + dp_test_data2 = test_data_creator.Dataproduct() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct/', dp_test_data, 201, dp_test_data) @@ -394,7 +406,7 @@ class DataproductTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, dp_test_data2) def test_dataproduct_PATCH(self): - dp_test_data = Dataproduct_test_data() + dp_test_data = test_data_creator.Dataproduct() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct/', dp_test_data, 201, dp_test_data) @@ -411,7 +423,7 @@ class DataproductTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_dataproduct_DELETE(self): - dp_test_data = Dataproduct_test_data() + dp_test_data = test_data_creator.Dataproduct() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct/', dp_test_data, 201, dp_test_data) @@ -422,7 +434,7 @@ class DataproductTestCase(unittest.TestCase): DELETE_and_assert_gone(self, url) def test_dataproduct_PROTECT_behavior_on_dataformat_deleted(self): - dp_test_data = Dataproduct_test_data() + dp_test_data = test_data_creator.Dataproduct() # create dependency that is safe to delete (enums are not populated / re-established between tests) dataformat_data = {'value': 'kickme'} @@ -443,8 +455,8 @@ class DataproductTestCase(unittest.TestCase): GET_and_assert_expected_response(self, dataformat_url, 200, dataformat_data) def test_dataproduct_CASCADE_behavior_on_specifications_template_deleted(self): - specifications_template_url = post_data_and_get_url(SubtaskTemplate_test_data(), '/dataproduct_specifications_template/') - dp_test_data = Dataproduct_test_data(specifications_template_url=specifications_template_url) + specifications_template_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskTemplate(), '/dataproduct_specifications_template/') + dp_test_data = test_data_creator.Dataproduct(specifications_template_url=specifications_template_url) # POST new item, verify url = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct/', dp_test_data, 201, dp_test_data)['url'] @@ -467,7 +479,7 @@ class SubtaskConnectorTestCase(unittest.TestCase): GET_and_assert_expected_response(self, BASE_URL + '/subtask_connector/1234321/', 404, {}) def test_subtask_connector_POST_and_GET(self): - stc_test_data = SubtaskConnector_test_data() + stc_test_data = test_data_creator.SubtaskConnector() # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_connector/', stc_test_data, 201, stc_test_data) @@ -475,13 +487,13 @@ class SubtaskConnectorTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, stc_test_data) def test_subtask_connector_PUT_invalid_raises_error(self): - stc_test_data = SubtaskConnector_test_data() + stc_test_data = test_data_creator.SubtaskConnector() PUT_and_assert_expected_response(self, BASE_URL + '/subtask_connector/9876789876/', stc_test_data, 404, {}) def test_subtask_connector_PUT(self): - stc_test_data = SubtaskConnector_test_data() - stc_test_data2 = SubtaskConnector_test_data() + stc_test_data = test_data_creator.SubtaskConnector() + stc_test_data2 = test_data_creator.SubtaskConnector() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_connector/', stc_test_data, 201, stc_test_data) @@ -493,7 +505,7 @@ class SubtaskConnectorTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, stc_test_data2) def test_subtask_connector_PATCH(self): - stc_test_data = SubtaskConnector_test_data() + stc_test_data = test_data_creator.SubtaskConnector() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_connector/', stc_test_data, 201, stc_test_data) @@ -510,7 +522,7 @@ class SubtaskConnectorTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_subtask_connector_DELETE(self): - stc_test_data = SubtaskConnector_test_data() + stc_test_data = test_data_creator.SubtaskConnector() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_connector/', stc_test_data, 201, stc_test_data) @@ -521,7 +533,7 @@ class SubtaskConnectorTestCase(unittest.TestCase): DELETE_and_assert_gone(self, url) def test_subtask_connector_PROTECT_behavior_on_role_deleted(self): - stc_test_data = SubtaskConnector_test_data() + stc_test_data = test_data_creator.SubtaskConnector() # create dependency that is safe to delete (enums are not populated / re-established between tests) role_data = {'value': 'kickme'} @@ -544,7 +556,7 @@ class SubtaskConnectorTestCase(unittest.TestCase): def test_subtask_connector_PROTECT_behavior_on_datatype_deleted(self): - stc_test_data = SubtaskConnector_test_data() + stc_test_data = test_data_creator.SubtaskConnector() # create new dependency that is safe to delete (enums are not populated / re-established between tests) datatype_data = {'value': 'kickme'} @@ -575,7 +587,7 @@ class SubtaskInputTestCase(unittest.TestCase): GET_and_assert_expected_response(self, BASE_URL + '/subtask_input/1234321/', 404, {}) def test_subtask_input_POST_and_GET(self): - sti_test_data = SubtaskInput_test_data() + sti_test_data = test_data_creator.SubtaskInput() # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data) @@ -583,13 +595,12 @@ class SubtaskInputTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, sti_test_data) def test_subtask_input_PUT_invalid_raises_error(self): - sti_test_data = SubtaskInput_test_data() + sti_test_data = test_data_creator.SubtaskInput() PUT_and_assert_expected_response(self, BASE_URL + '/subtask_input/9876789876/', sti_test_data, 404, {}) def test_subtask_input_PUT(self): - sti_test_data = SubtaskInput_test_data() - sti_test_data2 = SubtaskInput_test_data() + sti_test_data = test_data_creator.SubtaskInput() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data) @@ -597,18 +608,19 @@ class SubtaskInputTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, sti_test_data) # PUT new values, verify + sti_test_data2 = test_data_creator.SubtaskInput() PUT_and_assert_expected_response(self, url, sti_test_data2, 200, sti_test_data2) GET_and_assert_expected_response(self, url, 200, sti_test_data2) def test_subtask_input_PATCH(self): - sti_test_data = SubtaskInput_test_data() + sti_test_data = test_data_creator.SubtaskInput() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data) url = r_dict['url'] GET_and_assert_expected_response(self, url, 200, sti_test_data) - subtask_url = post_data_and_get_url(Subtask_test_data(), '/subtask/') + subtask_url = test_data_creator.post_data_and_get_url(test_data_creator.Subtask(), '/subtask/') test_patch = {"subtask": subtask_url, "tags": ['FANCYTAG'], } @@ -620,7 +632,7 @@ class SubtaskInputTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_subtask_input_DELETE(self): - sti_test_data = SubtaskInput_test_data() + sti_test_data = test_data_creator.SubtaskInput() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data) @@ -631,8 +643,8 @@ class SubtaskInputTestCase(unittest.TestCase): DELETE_and_assert_gone(self, url) def test_subtask_input_CASCADE_behavior_on_subtask_deleted(self): - subtask_url = post_data_and_get_url(Subtask_test_data(), '/subtask/') - sti_test_data = SubtaskInput_test_data(subtask_url=subtask_url) + subtask_url = test_data_creator.post_data_and_get_url(test_data_creator.Subtask(), '/subtask/') + sti_test_data = test_data_creator.SubtaskInput(subtask_url=subtask_url) # POST new item, verify url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url'] @@ -645,8 +657,8 @@ class SubtaskInputTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 404, {}) def test_subtask_input_SET_NULL_behavior_on_connector_deleted(self): - subtask_connector_url = post_data_and_get_url(SubtaskConnector_test_data(), '/subtask_connector/') - sti_test_data = SubtaskInput_test_data(subtask_connector_url=subtask_connector_url) + subtask_connector_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskConnector(), '/subtask_connector/') + sti_test_data = test_data_creator.SubtaskInput(subtask_connector_url=subtask_connector_url) # POST new item, verify url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url'] @@ -661,8 +673,8 @@ class SubtaskInputTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_subtask_input_SET_NULL_behavior_on_task_relation_blueprint_deleted(self): - task_relation_blueprint_url = post_data_and_get_url(TaskRelationBlueprint_test_data(), '/task_relation_blueprint/') - sti_test_data = SubtaskInput_test_data(task_relation_blueprint_url=task_relation_blueprint_url) + task_relation_blueprint_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskRelationBlueprint(), '/task_relation_blueprint/') + sti_test_data = test_data_creator.SubtaskInput(task_relation_blueprint_url=task_relation_blueprint_url) # POST new item, verify url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url'] @@ -677,8 +689,8 @@ class SubtaskInputTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_subtask_input_PROTECT_behavior_on_producer_deleted(self): - subtask_output_url = post_data_and_get_url(SubtaskOutput_test_data(), '/subtask_output/') - sti_test_data = SubtaskInput_test_data(subtask_output_url=subtask_output_url) + subtask_output_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(), '/subtask_output/') + sti_test_data = test_data_creator.SubtaskInput(subtask_output_url=subtask_output_url) # POST with dependency url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url'] @@ -692,8 +704,8 @@ class SubtaskInputTestCase(unittest.TestCase): GET_and_assert_expected_response(self, subtask_output_url, 200, {}) def test_subtask_input_PROTECT_behavior_on_selection_template_deleted(self): - subtask_input_selection_template_url = post_data_and_get_url(SubtaskInputSelectionTemplate_test_data(), '/subtask_input_selection_template/') - sti_test_data = SubtaskInput_test_data(subtask_input_selection_template_url=subtask_input_selection_template_url) + subtask_input_selection_template_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskInputSelectionTemplate(), '/subtask_input_selection_template/') + sti_test_data = test_data_creator.SubtaskInput(subtask_input_selection_template_url=subtask_input_selection_template_url) # POST with dependency url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url'] @@ -717,7 +729,7 @@ class SubtaskOutputTestCase(unittest.TestCase): GET_and_assert_expected_response(self, BASE_URL + '/subtask_output/1234321/', 404, {}) def test_subtask_output_POST_and_GET(self): - sto_test_data = SubtaskOutput_test_data() + sto_test_data = test_data_creator.SubtaskOutput() # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201, @@ -726,12 +738,12 @@ class SubtaskOutputTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, sto_test_data) def test_subtask_output_PUT_invalid_raises_error(self): - sto_test_data = SubtaskOutput_test_data() + sto_test_data = test_data_creator.SubtaskOutput() PUT_and_assert_expected_response(self, BASE_URL + '/subtask_output/9876789876/', sto_test_data, 404, {}) def test_subtask_output_PUT(self): - sto_test_data = SubtaskOutput_test_data() - sto_test_data2 = SubtaskOutput_test_data() + sto_test_data = test_data_creator.SubtaskOutput() + sto_test_data2 = test_data_creator.SubtaskOutput() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201,sto_test_data) @@ -743,8 +755,8 @@ class SubtaskOutputTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, sto_test_data2) def test_subtask_output_PATCH(self): - sto_test_data = SubtaskOutput_test_data() - sto_test_data2 = SubtaskOutput_test_data() + sto_test_data = test_data_creator.SubtaskOutput() + sto_test_data2 = test_data_creator.SubtaskOutput() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201, @@ -762,7 +774,7 @@ class SubtaskOutputTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_subtask_output_DELETE(self): - sto_test_data = SubtaskOutput_test_data() + sto_test_data = test_data_creator.SubtaskOutput() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201, @@ -774,9 +786,9 @@ class SubtaskOutputTestCase(unittest.TestCase): DELETE_and_assert_gone(self, url) def test_subtask_output_CASCADE_behavior_on_subtask_deleted(self): - st_test_data = Subtask_test_data() - subtask_url = post_data_and_get_url(st_test_data, '/subtask/') - sto_test_data = SubtaskOutput_test_data(subtask_url=subtask_url) + st_test_data = test_data_creator.Subtask() + subtask_url = test_data_creator.post_data_and_get_url(st_test_data, '/subtask/') + sto_test_data = test_data_creator.SubtaskOutput(subtask_url=subtask_url) # POST new item, verify url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201, sto_test_data)['url'] @@ -789,7 +801,7 @@ class SubtaskOutputTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 404, {}) def test_subtask_output_SET_NULL_behavior_on_connector_deleted(self): - sto_test_data = SubtaskOutput_test_data() + sto_test_data = test_data_creator.SubtaskOutput() # POST new item, verify url = \ @@ -817,7 +829,7 @@ class AntennaSetTestCase(unittest.TestCase): GET_and_assert_expected_response(self, BASE_URL + '/antenna_set/1234321/', 404, {}) def test_antenna_set_POST_and_GET(self): - antennaset_test_data = AntennaSet_test_data() + antennaset_test_data = test_data_creator.AntennaSet() # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/antenna_set/', antennaset_test_data, 201, antennaset_test_data) @@ -825,13 +837,13 @@ class AntennaSetTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, antennaset_test_data) def test_antenna_set_PUT_invalid_raises_error(self): - antennaset_test_data = AntennaSet_test_data() + antennaset_test_data = test_data_creator.AntennaSet() PUT_and_assert_expected_response(self, BASE_URL + '/antenna_set/9876789876/', antennaset_test_data, 404, {}) def test_antenna_set_PUT(self): - antennaset_test_data = AntennaSet_test_data(name="the one") - antennaset_test_data2 = AntennaSet_test_data(name="the other") + antennaset_test_data = test_data_creator.AntennaSet(name="the one") + antennaset_test_data2 = test_data_creator.AntennaSet(name="the other") # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/antenna_set/', antennaset_test_data, 201, antennaset_test_data) @@ -843,7 +855,7 @@ class AntennaSetTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, antennaset_test_data2) def test_antenna_set_PATCH(self): - antennaset_test_data = AntennaSet_test_data() + antennaset_test_data = test_data_creator.AntennaSet() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/antenna_set/', antennaset_test_data, 201, antennaset_test_data) @@ -860,7 +872,7 @@ class AntennaSetTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_antenna_set_DELETE(self): - antennaset_test_data = AntennaSet_test_data() + antennaset_test_data = test_data_creator.AntennaSet() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/antenna_set/', antennaset_test_data, 201, antennaset_test_data) @@ -871,7 +883,7 @@ class AntennaSetTestCase(unittest.TestCase): DELETE_and_assert_gone(self, url) def test_antenna_set_PROTECT_behavior_on_station_type_deleted(self): - antennaset_test_data = AntennaSet_test_data() + antennaset_test_data = test_data_creator.AntennaSet() # create dependency that is safe to delete (enums are not populated / re-established between tests) dataformat_data = {'value': 'kickme'} @@ -902,7 +914,7 @@ class DataproductTransformTestCase(unittest.TestCase): GET_and_assert_expected_response(self, BASE_URL + '/dataproduct_transform/1234321/', 404, {}) def test_dataproduct_transform_POST_and_GET(self): - dpt_test_data = DataproductTransform_test_data() + dpt_test_data = test_data_creator.DataproductTransform() # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_transform/', dpt_test_data, 201, dpt_test_data) @@ -910,13 +922,13 @@ class DataproductTransformTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, dpt_test_data) def test_dataproduct_transform_PUT_invalid_raises_error(self): - dpt_test_data = DataproductTransform_test_data() + dpt_test_data = test_data_creator.DataproductTransform() PUT_and_assert_expected_response(self, BASE_URL + '/dataproduct_transform/9876789876/', dpt_test_data, 404, {}) def test_dataproduct_transform_PUT(self): - dpt_test_data = DataproductTransform_test_data() - dpt_test_data2 = DataproductTransform_test_data() + dpt_test_data = test_data_creator.DataproductTransform() + dpt_test_data2 = test_data_creator.DataproductTransform() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_transform/', dpt_test_data, 201, dpt_test_data) @@ -928,14 +940,14 @@ class DataproductTransformTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, dpt_test_data2) def test_dataproduct_transform_PATCH(self): - dpt_test_data = DataproductTransform_test_data() + dpt_test_data = test_data_creator.DataproductTransform() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_transform/', dpt_test_data, 201, dpt_test_data) url = r_dict['url'] GET_and_assert_expected_response(self, url, 200, dpt_test_data) - output_dataproduct_url = post_data_and_get_url(Dataproduct_test_data(), '/dataproduct/') + output_dataproduct_url = test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(), '/dataproduct/') test_patch = {"output": output_dataproduct_url, "identity": False } @@ -947,7 +959,7 @@ class DataproductTransformTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_dataproduct_transform_DELETE(self): - dpt_test_data = DataproductTransform_test_data() + dpt_test_data = test_data_creator.DataproductTransform() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_transform/', dpt_test_data, 201, dpt_test_data) @@ -958,9 +970,9 @@ class DataproductTransformTestCase(unittest.TestCase): DELETE_and_assert_gone(self, url) def test_dataproduct_transform_PROTECT_behavior_on_input_deleted(self): - input_dp_test_data = Dataproduct_test_data() - input_dataproduct_url = post_data_and_get_url(input_dp_test_data, '/dataproduct/') - dpt_test_data = DataproductTransform_test_data(input_dataproduct_url=input_dataproduct_url) + input_dp_test_data = test_data_creator.Dataproduct() + input_dataproduct_url = test_data_creator.post_data_and_get_url(input_dp_test_data, '/dataproduct/') + dpt_test_data = test_data_creator.DataproductTransform(input_dataproduct_url=input_dataproduct_url) # POST new item and verify url = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_transform/', dpt_test_data, 201, dpt_test_data)['url'] @@ -974,9 +986,9 @@ class DataproductTransformTestCase(unittest.TestCase): GET_and_assert_expected_response(self, input_dataproduct_url, 200, input_dp_test_data) def test_dataproduct_transform_PROTECT_behavior_on_output_deleted(self): - output_dp_test_data = Dataproduct_test_data() - output_dataproduct_url = post_data_and_get_url(output_dp_test_data, '/dataproduct/') - dpt_test_data = DataproductTransform_test_data(output_dataproduct_url=output_dataproduct_url) + output_dp_test_data = test_data_creator.Dataproduct() + output_dataproduct_url = test_data_creator.post_data_and_get_url(output_dp_test_data, '/dataproduct/') + dpt_test_data = test_data_creator.DataproductTransform(output_dataproduct_url=output_dataproduct_url) # POST new item and verify url = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_transform/', dpt_test_data, 201, dpt_test_data)['url'] @@ -1000,7 +1012,7 @@ class FilesystemTestCase(unittest.TestCase): GET_and_assert_expected_response(self, BASE_URL + '/filesystem/1234321/', 404, {}) def test_filesystem_POST_and_GET(self): - fs_test_data = Filesystem_test_data() + fs_test_data = test_data_creator.Filesystem() # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/filesystem/', fs_test_data, 201, fs_test_data) @@ -1008,13 +1020,13 @@ class FilesystemTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, fs_test_data) def test_filesystem_PUT_invalid_raises_error(self): - fs_test_data = Filesystem_test_data() + fs_test_data = test_data_creator.Filesystem() PUT_and_assert_expected_response(self, BASE_URL + '/filesystem/9876789876/', fs_test_data, 404, {}) def test_filesystem_PUT(self): - fs_test_data = Filesystem_test_data() + fs_test_data = test_data_creator.Filesystem() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/filesystem/', fs_test_data, @@ -1022,15 +1034,15 @@ class FilesystemTestCase(unittest.TestCase): url = r_dict['url'] GET_and_assert_expected_response(self, url, 200, fs_test_data) - fs_test_data2 = Filesystem_test_data() + fs_test_data2 = test_data_creator.Filesystem() # PUT new values, verify PUT_and_assert_expected_response(self, url, fs_test_data2, 200, fs_test_data2) GET_and_assert_expected_response(self, url, 200, fs_test_data2) def test_filesystem_PATCH(self): - cluster_url = post_data_and_get_url(Cluster_test_data(), '/cluster/') - fs_test_data = Filesystem_test_data(cluster_url=cluster_url) + cluster_url = test_data_creator.post_data_and_get_url(test_data_creator.Cluster(), '/cluster/') + fs_test_data = test_data_creator.Filesystem(cluster_url=cluster_url) # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/filesystem/', fs_test_data, @@ -1038,7 +1050,7 @@ class FilesystemTestCase(unittest.TestCase): url = r_dict['url'] GET_and_assert_expected_response(self, url, 200, fs_test_data) - cluster_url2 = post_data_and_get_url(Cluster_test_data(), '/cluster/') + cluster_url2 = test_data_creator.post_data_and_get_url(test_data_creator.Cluster(), '/cluster/') test_patch = {"cluster": cluster_url2, "capacity": 3333333333} @@ -1049,7 +1061,7 @@ class FilesystemTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_filesystem_DELETE(self): - fs_test_data = Filesystem_test_data() + fs_test_data = test_data_creator.Filesystem() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/filesystem/', fs_test_data, @@ -1061,7 +1073,7 @@ class FilesystemTestCase(unittest.TestCase): DELETE_and_assert_gone(self, url) def test_filesystem_PROTECT_behavior_on_cluster_deleted(self): - fs_test_data = Filesystem_test_data() + fs_test_data = test_data_creator.Filesystem() # POST new item and verify url = POST_and_assert_expected_response(self, BASE_URL + '/filesystem/', fs_test_data, 201, @@ -1086,7 +1098,7 @@ class ClusterTestCase(unittest.TestCase): GET_and_assert_expected_response(self, BASE_URL + '/cluster/1234321/', 404, {}) def test_cluster_POST_and_GET(self): - c_test_data = Cluster_test_data() + c_test_data = test_data_creator.Cluster() # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/cluster/', c_test_data, 201, c_test_data) @@ -1094,25 +1106,25 @@ class ClusterTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, c_test_data) def test_cluster_PUT_invalid_raises_error(self): - c_test_data = Cluster_test_data() + c_test_data = test_data_creator.Cluster() PUT_and_assert_expected_response(self, BASE_URL + '/cluster/9876789876/', c_test_data, 404, {}) def test_cluster_PUT(self): - c_test_data = Cluster_test_data() + c_test_data = test_data_creator.Cluster() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/cluster/', c_test_data, 201, c_test_data) url = r_dict['url'] GET_and_assert_expected_response(self, url, 200, c_test_data) - c_test_data2 = Cluster_test_data() + c_test_data2 = test_data_creator.Cluster() # PUT new values, verify PUT_and_assert_expected_response(self, url, c_test_data2, 200, c_test_data2) GET_and_assert_expected_response(self, url, 200, c_test_data2) def test_cluster_PATCH(self): - c_test_data = Cluster_test_data() + c_test_data = test_data_creator.Cluster() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/cluster/', c_test_data, 201, c_test_data) @@ -1128,7 +1140,7 @@ class ClusterTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_cluster_DELETE(self): - c_test_data = Cluster_test_data() + c_test_data = test_data_creator.Cluster() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/cluster/', c_test_data, 201, c_test_data) @@ -1149,7 +1161,7 @@ class DataproductHashTestCase(unittest.TestCase): GET_and_assert_expected_response(self, BASE_URL + '/dataproduct_hash/1234321/', 404, {}) def test_dataproduct_hash_POST_and_GET(self): - dph_test_data = DataproductHash_test_data() + dph_test_data = test_data_creator.DataproductHash() # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_hash/', dph_test_data, @@ -1158,14 +1170,14 @@ class DataproductHashTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, dph_test_data) def test_dataproduct_hash_PUT_invalid_raises_error(self): - dph_test_data = DataproductHash_test_data() + dph_test_data = test_data_creator.DataproductHash() PUT_and_assert_expected_response(self, BASE_URL + '/dataproduct_hash/9876789876/', dph_test_data, 404, {}) def test_dataproduct_hash_PUT(self): - dph_test_data = DataproductHash_test_data(hash="the one") - dph_test_data2 = DataproductHash_test_data(hash="the other") + dph_test_data = test_data_creator.DataproductHash(hash="the one") + dph_test_data2 = test_data_creator.DataproductHash(hash="the other") # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_hash/', dph_test_data, @@ -1178,7 +1190,7 @@ class DataproductHashTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, dph_test_data2) def test_dataproduct_hash_PATCH(self): - dph_test_data = DataproductHash_test_data() + dph_test_data = test_data_creator.DataproductHash() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_hash/', dph_test_data, @@ -1196,7 +1208,7 @@ class DataproductHashTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_dataproduct_hash_DELETE(self): - dph_test_data = DataproductHash_test_data() + dph_test_data = test_data_creator.DataproductHash() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_hash/', dph_test_data, @@ -1208,7 +1220,7 @@ class DataproductHashTestCase(unittest.TestCase): DELETE_and_assert_gone(self, url) def test_dataproduct_hash_PROTECT_behavior_on_dataproduct_deleted(self): - dph_test_data = DataproductHash_test_data() + dph_test_data = test_data_creator.DataproductHash() # POST new item and verify url = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_hash/', dph_test_data, 201, @@ -1223,7 +1235,7 @@ class DataproductHashTestCase(unittest.TestCase): GET_and_assert_expected_response(self, dph_test_data['dataproduct'], 200, {}) def test_dataproduct_hash_PROTECT_behavior_on_algorithm_deleted(self): - dph_test_data = DataproductHash_test_data() + dph_test_data = test_data_creator.DataproductHash() # POST new item and verify url = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_hash/', dph_test_data, 201, @@ -1248,7 +1260,7 @@ class DataproductArchiveInfoTestCase(unittest.TestCase): GET_and_assert_expected_response(self, BASE_URL + '/dataproduct_archive_info/1234321/', 404, {}) def test_dataproduct_archive_info_POST_and_GET(self): - dpai_test_data = DataproductArchiveInfo_test_data() + dpai_test_data = test_data_creator.DataproductArchiveInfo() # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_archive_info/', dpai_test_data, @@ -1257,14 +1269,14 @@ class DataproductArchiveInfoTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, dpai_test_data) def test_dataproduct_archive_info_PUT_invalid_raises_error(self): - dpai_test_data = DataproductArchiveInfo_test_data() + dpai_test_data = test_data_creator.DataproductArchiveInfo() PUT_and_assert_expected_response(self, BASE_URL + '/dataproduct_archive_info/9876789876/', dpai_test_data, 404, {}) def test_dataproduct_archive_info_PUT(self): - dpai_test_data = DataproductArchiveInfo_test_data() - dpai_test_data2 = DataproductArchiveInfo_test_data() + dpai_test_data = test_data_creator.DataproductArchiveInfo() + dpai_test_data2 = test_data_creator.DataproductArchiveInfo() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_archive_info/', dpai_test_data, @@ -1277,7 +1289,7 @@ class DataproductArchiveInfoTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, dpai_test_data2) def test_dataproduct_archive_info_PATCH(self): - dpai_test_data = DataproductArchiveInfo_test_data() + dpai_test_data = test_data_creator.DataproductArchiveInfo() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_archive_info/', dpai_test_data, @@ -1294,7 +1306,7 @@ class DataproductArchiveInfoTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_dataproduct_archive_info_DELETE(self): - dpai_test_data = DataproductArchiveInfo_test_data() + dpai_test_data = test_data_creator.DataproductArchiveInfo() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_archive_info/', dpai_test_data, @@ -1306,7 +1318,7 @@ class DataproductArchiveInfoTestCase(unittest.TestCase): DELETE_and_assert_gone(self, url) def test_dataproduct_archive_info_PROTECT_behavior_on_dataproduct_deleted(self): - dpai_test_data = DataproductArchiveInfo_test_data() + dpai_test_data = test_data_creator.DataproductArchiveInfo() # POST new item and verify url = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_archive_info/', dpai_test_data, 201, @@ -1321,6 +1333,219 @@ class DataproductArchiveInfoTestCase(unittest.TestCase): GET_and_assert_expected_response(self, dpai_test_data['dataproduct'], 200, {}) +class SubtaskQuery(unittest.TestCase): + """ + Test queries on the subtask REST api: + - query cluster only + - query start and stop time and cluster + - query start and stop time + - query start time and cluster + - query stop time and cluster + - query with incorrect input + """ + + def check_response_OK_and_result_count(self, response, expected_count): + """ + Check http response on status_code OK and the expected count number of the results list + """ + self.assertEqual(200, response.status_code) + json_response = response.json() + self.assertEqual(expected_count, json_response.get('count')) + + @staticmethod + def get_total_number_of_subtasks(): + """ + Retrieve the total number of current subtasks objects + """ + response = requests.get(BASE_URL + '/subtask/', auth=AUTH) + json_response = response.json() + return json_response.get('count') + + @staticmethod + def create_cluster_object(cluster_name): + cluster_data = Cluster_test_data(name=cluster_name) + return models.Cluster.objects.create(**cluster_data) + + @staticmethod + def create_multiple_subtask_object(total_number, cluster_name): + """ + Create multiple subtasks for a given number of days with start_time 2 hours from now and + stop_time 4 hours from now + """ + cluster_object = SubtaskQuery.create_cluster_object(cluster_name) + for day_idx in range(0, total_number): + start_time = datetime.now() + timedelta(hours=2, days=day_idx) + stop_time = datetime.now() + timedelta(hours=4, days=day_idx) + subtask_data = Subtask_test_data(start_time=start_time.strftime(DJANGO_TIMEFORMAT), + stop_time=stop_time.strftime(DJANGO_TIMEFORMAT), + cluster_object=cluster_object) + models.Subtask.objects.create(**subtask_data) + + subtasks_test_data_with_start_stop_time = {'clusterB': 50, 'clusterC': 30 } + + @classmethod + def setUpClass(cls) -> None: + """ + Setup once before test is running. Create multiple subtask objects + clusterA 1 subtasks with start and stop time now + clusterB 50 subtasks with start 2hr and stop time 4hr from now, recurring 'every day' + clusterC 30 subtasks with start 2hr and stop time 4hr from now, recurring 'every day' + """ + cluster_object = SubtaskQuery.create_cluster_object("clusterA") + subtask_data = Subtask_test_data(cluster_object=cluster_object) + models.Subtask.objects.create(**subtask_data) + for cluster_name, period_length_in_days in SubtaskQuery.subtasks_test_data_with_start_stop_time.items(): + SubtaskQuery.create_multiple_subtask_object(period_length_in_days, cluster_name) + + + def test_query_cluster_only(self): + """ + Check the query on cluster name. Check status code and response length + """ + logger.info("Check query on clusterA") + response = requests.get(BASE_URL + '/subtask/?cluster__name=clusterA', auth=AUTH) + self.check_response_OK_and_result_count(response, 1) + + for cluster_name, period_length_in_days in SubtaskQuery.subtasks_test_data_with_start_stop_time.items(): + logger.info("Check query on %s" % cluster_name) + response = requests.get(BASE_URL + '/subtask/?cluster__name=%s' % cluster_name, auth=AUTH) + self.check_response_OK_and_result_count(response, period_length_in_days) + + def test_query_start_and_stop_time_and_cluster(self): + """ + Check if I can query on the start and stop time and cluster name (B and C) over a period + Check status code and response length + """ + for cluster_name, period_length_in_days in SubtaskQuery.subtasks_test_data_with_start_stop_time.items(): + start_time = datetime.now() + stop_time = start_time + timedelta(days=period_length_in_days) + expected_count = period_length_in_days + logger.info("Check query in a period (%s until %s) for %s", + (start_time.strftime(DJANGO_TIMEFORMAT), stop_time.strftime(DJANGO_TIMEFORMAT), cluster_name)) + response = requests.get(BASE_URL + '/subtask/?start_time__gt=%s&stop_time__lt=%s&cluster__name=%s' % + (start_time, stop_time, cluster_name), auth=AUTH) + self.check_response_OK_and_result_count(response, expected_count) + + logger.info("Check number of subtasks every day for %s" % cluster_name) + for day_idx in range(0, period_length_in_days): + start_time = datetime.now() + timedelta(days=day_idx) + stop_time = start_time + timedelta(days=1) + response = requests.get(BASE_URL + '/subtask/?start_time__gt=%s&stop_time__lt=%s&cluster__name=%s' % + (start_time, stop_time, cluster_name), auth=AUTH) + self.check_response_OK_and_result_count(response, 1) + + logger.info("Check query in a period (%s until %s) for clusterNotExist" % + (start_time.strftime(DJANGO_TIMEFORMAT), stop_time.strftime(DJANGO_TIMEFORMAT))) + response = requests.get(BASE_URL + '/subtask/?start_time__gt=%s&stop_time__lt=%s&cluster__name=%s' % + (start_time, stop_time, "clusterNotExist"), auth=AUTH) + self.check_response_OK_and_result_count(response, 0) + + def test_query_start_and_stop_time(self): + """ + Check if I can query on the start and stop time over a period + Check status code and response length + """ + period_length_in_days = 50 # max(B+C) + expected_count = 80 # B+C + start_time = datetime.now() + stop_time = start_time + timedelta(days=period_length_in_days) + logger.info("Check query in a period (%s until %s)" % + (start_time.strftime(DJANGO_TIMEFORMAT), stop_time.strftime(DJANGO_TIMEFORMAT))) + response = requests.get(BASE_URL + '/subtask/?start_time__gt=%s&stop_time__lt=%s' % + (start_time, stop_time), auth=AUTH) + self.check_response_OK_and_result_count(response, expected_count) + + logger.info("Check number of subtasks every day") + for day_idx in range(0, period_length_in_days): + start_time = datetime.now() + timedelta(days=day_idx) + stop_time = start_time + timedelta(days=1) + response = requests.get(BASE_URL + '/subtask/?start_time__gt=%s&stop_time__lt=%s' % + (start_time, stop_time), auth=AUTH) + if day_idx >= 30: + expected_count = 1 # B + else: + expected_count = 2 # B+C + self.check_response_OK_and_result_count(response, expected_count) + + def test_query_start_and_cluster(self): + """ + Check if I can query on the start time and cluster name (B and C) over a period + Check status code and response length + """ + for cluster_name, period_length_in_days in SubtaskQuery.subtasks_test_data_with_start_stop_time.items(): + start_time = datetime.now() + expected_count = period_length_in_days + logger.info("Check query greater than start_time (%s) for %s " % + (start_time.strftime(DJANGO_TIMEFORMAT), cluster_name)) + response = requests.get(BASE_URL + '/subtask/?start_time__gt=%s&cluster__name=%s' % + (start_time, cluster_name), auth=AUTH) + self.check_response_OK_and_result_count(response, expected_count) + + logger.info("Check number of subtasks every day for %s" % cluster_name) + expected_count = period_length_in_days + for day_idx in range(0, period_length_in_days): + start_time = datetime.now() + timedelta(days=day_idx) + response = requests.get(BASE_URL + '/subtask/?start_time__gt=%s&cluster__name=%s' % + (start_time, cluster_name), auth=AUTH) + self.check_response_OK_and_result_count(response, expected_count) + expected_count -= 1 # every another day one less + + def test_query_stop_and_cluster(self): + """ + Check if I can query on the stop time and cluster name (B and C) over a period + Check status code and response length + """ + for cluster_name, period_length_in_days in SubtaskQuery.subtasks_test_data_with_start_stop_time.items(): + stop_time = datetime.now() + timedelta(days=period_length_in_days) + logger.info("Check query less than stop_time (%s) for %s " % + (stop_time.strftime(DJANGO_TIMEFORMAT), cluster_name)) + response = requests.get(BASE_URL + '/subtask/?stop_time__lt=%s&cluster__name=%s' % + (stop_time, cluster_name), auth=AUTH) + self.check_response_OK_and_result_count(response, period_length_in_days) + + logger.info("Check number of subtasks every day for %s" % cluster_name) + expected_count = 1 + for day_idx in range(0, period_length_in_days): + stop_time = datetime.now() + timedelta(days=day_idx+1) + response = requests.get(BASE_URL + '/subtask/?stop_time__lt=%s&cluster__name=%s' % + (stop_time, cluster_name), auth=AUTH) + self.check_response_OK_and_result_count(response, expected_count) + expected_count += 1 # every another day one more + + def test_query_wrong_input(self): + """ + Check the query when wrong input is given; + - query on a none existing cluster name + - query start time larger than stop time + - query start_time less than and stop_time greater than + - wrong query name + Note! when error in query name, REST will return ALL (in this case 82 objects) + """ + response = requests.get(BASE_URL + '/subtask/?cluster__name=clusterNotExist', auth=AUTH) + self.check_response_OK_and_result_count(response, 0) + + # Check how many is 'ALL' + total_subtasks = SubtaskQuery.get_total_number_of_subtasks() + response = requests.get(BASE_URL + '/subtask/?cluster__error_in_query=clusterA', auth=AUTH) + self.check_response_OK_and_result_count(response, total_subtasks) + + period_length_in_days = 50 # max(B+C) + stop_time = datetime.now() + start_time = stop_time + timedelta(days=period_length_in_days) + logger.info("Check 'wrong' query in a period (%s until %s)" % + (start_time.strftime(DJANGO_TIMEFORMAT), stop_time.strftime(DJANGO_TIMEFORMAT))) + response = requests.get(BASE_URL + '/subtask/?start_time__gt=%s&stop_time__lt=%s' % + (start_time, stop_time), auth=AUTH) + self.check_response_OK_and_result_count(response, 0) + + start_time = datetime.now() + stop_time = start_time + timedelta(days=period_length_in_days) + logger.info("Check 'wrong' query in a period (%s until %s)" % + (start_time.strftime(DJANGO_TIMEFORMAT), stop_time.strftime(DJANGO_TIMEFORMAT))) + response = requests.get(BASE_URL + '/subtask/?start_time__lt=%s&stop_time__gt=%s' % + (start_time, stop_time), auth=AUTH) + self.check_response_OK_and_result_count(response, 0) + if __name__ == "__main__": unittest.main() diff --git a/SAS/TMSS/test/t_tmssapp_specification_django.py b/SAS/TMSS/test/t_tmssapp_specification_django.py index 0159d195d9ed03968dd37bdf6ff615501137c8c2..2682fd8abb405f0540149e5a568de48f14becaab 100755 --- a/SAS/TMSS/test/t_tmssapp_specification_django.py +++ b/SAS/TMSS/test/t_tmssapp_specification_django.py @@ -36,9 +36,9 @@ logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=loggin # (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_database_unittest_setup module) from lofar.sas.tmss.test.tmss_database_unittest_setup import * +from lofar.sas.tmss.test.tmss_test_data_django_models import * from django.db.utils import IntegrityError -from django.contrib.auth.models import User # TODO: rest API testing should be moved out of this test module. # import rest_framework.test diff --git a/SAS/TMSS/test/t_tmssapp_specification_functional.py b/SAS/TMSS/test/t_tmssapp_specification_functional.py index e995cbbea84b746cefa496906e8171f73bd3ed91..2aba43f467afcbf8f133184e618112a0a7258a5e 100755 --- a/SAS/TMSS/test/t_tmssapp_specification_functional.py +++ b/SAS/TMSS/test/t_tmssapp_specification_functional.py @@ -27,6 +27,7 @@ # todo: behavior in a controlled way. # todo: We should probably also fully test behavior wrt mandatory and nullable fields. +from datetime import datetime import unittest import logging logger = logging.getLogger(__name__) @@ -37,6 +38,9 @@ logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=loggin # (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_test_environment_unittest_setup module) from lofar.sas.tmss.test.tmss_test_environment_unittest_setup import * +# import and setup test data creator +from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator +test_data_creator = TMSSRESTTestDataCreator(BASE_URL, AUTH) class BasicFunctionTestCase(unittest.TestCase): # todo: test_welcome_page (once we have one :)) @@ -55,46 +59,46 @@ class GeneratorTemplateTestCase(unittest.TestCase): def test_generator_template_POST_and_GET(self): # POST and GET a new item and assert correctness - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', GeneratorTemplate_test_data(), 201, GeneratorTemplate_test_data()) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data_creator.GeneratorTemplate(), 201, test_data_creator.GeneratorTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, GeneratorTemplate_test_data()) + GET_and_assert_expected_response(self, url, 200, test_data_creator.GeneratorTemplate()) def test_generator_template_PUT_invalid_raises_error(self): - PUT_and_assert_expected_response(self, BASE_URL + '/generator_template/9876789876/', GeneratorTemplate_test_data(), 404, {}) + PUT_and_assert_expected_response(self, BASE_URL + '/generator_template/9876789876/', test_data_creator.GeneratorTemplate(), 404, {}) def test_generator_template_PUT(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', GeneratorTemplate_test_data(), 201, GeneratorTemplate_test_data()) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data_creator.GeneratorTemplate(), 201, test_data_creator.GeneratorTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, GeneratorTemplate_test_data()) + GET_and_assert_expected_response(self, url, 200, test_data_creator.GeneratorTemplate()) # PUT new values, verify - PUT_and_assert_expected_response(self, url, GeneratorTemplate_test_data("generatortemplate2"), 200, GeneratorTemplate_test_data("generatortemplate2")) - GET_and_assert_expected_response(self, url, 200, GeneratorTemplate_test_data("generatortemplate2")) + PUT_and_assert_expected_response(self, url, test_data_creator.GeneratorTemplate("generatortemplate2"), 200, test_data_creator.GeneratorTemplate("generatortemplate2")) + GET_and_assert_expected_response(self, url, 200, test_data_creator.GeneratorTemplate("generatortemplate2")) def test_generator_template_PATCH(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', GeneratorTemplate_test_data(), 201, GeneratorTemplate_test_data()) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data_creator.GeneratorTemplate(), 201, test_data_creator.GeneratorTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, GeneratorTemplate_test_data()) + GET_and_assert_expected_response(self, url, 200, test_data_creator.GeneratorTemplate()) test_patch = {"version": 'v6.28318530718', "schema": {"mykey": "my better value"}} # PATCH item and verify PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) - expected_data = dict(GeneratorTemplate_test_data()) + expected_data = dict(test_data_creator.GeneratorTemplate()) expected_data.update(test_patch) GET_and_assert_expected_response(self, url, 200, expected_data) def test_generator_template_DELETE(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', GeneratorTemplate_test_data(), 201, GeneratorTemplate_test_data()) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data_creator.GeneratorTemplate(), 201, test_data_creator.GeneratorTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, GeneratorTemplate_test_data()) + GET_and_assert_expected_response(self, url, 200, test_data_creator.GeneratorTemplate()) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -112,46 +116,46 @@ class SchedulingUnitTemplateTestCase(unittest.TestCase): def test_scheduling_unit_template_POST_and_GET(self): # POST and GET a new item and assert correctness - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', SchedulingUnitTemplate_test_data(), 201, SchedulingUnitTemplate_test_data()) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', test_data_creator.SchedulingUnitTemplate(), 201, test_data_creator.SchedulingUnitTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url+'?format=json', 200, SchedulingUnitTemplate_test_data()) + GET_and_assert_expected_response(self, url+'?format=json', 200, test_data_creator.SchedulingUnitTemplate()) def test_scheduling_unit_template_PUT_invalid_raises_error(self): - PUT_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/9876789876/', SchedulingUnitTemplate_test_data(), 404, {}) + PUT_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/9876789876/', test_data_creator.SchedulingUnitTemplate(), 404, {}) def test_scheduling_unit_template_PUT(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', SchedulingUnitTemplate_test_data(), 201, SchedulingUnitTemplate_test_data()) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', test_data_creator.SchedulingUnitTemplate(), 201, test_data_creator.SchedulingUnitTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, SchedulingUnitTemplate_test_data()) + GET_and_assert_expected_response(self, url, 200, test_data_creator.SchedulingUnitTemplate()) # PUT new values, verify - PUT_and_assert_expected_response(self, url, SchedulingUnitTemplate_test_data("schedulingunittemplate2"), 200, SchedulingUnitTemplate_test_data("schedulingunittemplate2")) - GET_and_assert_expected_response(self, url, 200, SchedulingUnitTemplate_test_data("schedulingunittemplate2")) + PUT_and_assert_expected_response(self, url, test_data_creator.SchedulingUnitTemplate("schedulingunittemplate2"), 200, test_data_creator.SchedulingUnitTemplate("schedulingunittemplate2")) + GET_and_assert_expected_response(self, url, 200, test_data_creator.SchedulingUnitTemplate("schedulingunittemplate2")) def test_scheduling_unit_template_PATCH(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', SchedulingUnitTemplate_test_data(), 201, SchedulingUnitTemplate_test_data()) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', test_data_creator.SchedulingUnitTemplate(), 201, test_data_creator.SchedulingUnitTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, SchedulingUnitTemplate_test_data()) + GET_and_assert_expected_response(self, url, 200, test_data_creator.SchedulingUnitTemplate()) test_patch = {"version": 'v6.28318530718', "schema": {"mykey": "my better value"}} # PATCH item and verify PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) - expected_data = dict(SchedulingUnitTemplate_test_data()) + expected_data = dict(test_data_creator.SchedulingUnitTemplate()) expected_data.update(test_patch) GET_and_assert_expected_response(self, url, 200, expected_data) def test_scheduling_unit_template_DELETE(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', SchedulingUnitTemplate_test_data(), 201, SchedulingUnitTemplate_test_data()) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', test_data_creator.SchedulingUnitTemplate(), 201, test_data_creator.SchedulingUnitTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, SchedulingUnitTemplate_test_data()) + GET_and_assert_expected_response(self, url, 200, test_data_creator.SchedulingUnitTemplate()) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -168,47 +172,47 @@ class TaskTemplateTestCase(unittest.TestCase): def test_task_template_POST_and_GET(self): # POST and GET a new item and assert correctness - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', TaskTemplate_test_data(), 201, - TaskTemplate_test_data()) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data_creator.TaskTemplate(), 201, + test_data_creator.TaskTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url + '?format=json', 200, TaskTemplate_test_data()) + GET_and_assert_expected_response(self, url + '?format=json', 200, test_data_creator.TaskTemplate()) def test_task_template_PUT_invalid_raises_error(self): - PUT_and_assert_expected_response(self, BASE_URL + '/task_template/9876789876/', TaskTemplate_test_data(), 404, {}) + PUT_and_assert_expected_response(self, BASE_URL + '/task_template/9876789876/', test_data_creator.TaskTemplate(), 404, {}) def test_task_template_PUT(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', TaskTemplate_test_data(), 201, - TaskTemplate_test_data()) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data_creator.TaskTemplate(), 201, + test_data_creator.TaskTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, TaskTemplate_test_data()) + GET_and_assert_expected_response(self, url, 200, test_data_creator.TaskTemplate()) # PUT new values, verify - PUT_and_assert_expected_response(self, url, TaskTemplate_test_data("tasktemplate2"), 200, TaskTemplate_test_data("tasktemplate2")) - GET_and_assert_expected_response(self, url, 200, TaskTemplate_test_data("tasktemplate2")) + PUT_and_assert_expected_response(self, url, test_data_creator.TaskTemplate("tasktemplate2"), 200, test_data_creator.TaskTemplate("tasktemplate2")) + GET_and_assert_expected_response(self, url, 200, test_data_creator.TaskTemplate("tasktemplate2")) def test_task_template_PATCH(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', TaskTemplate_test_data(), 201, - TaskTemplate_test_data()) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data_creator.TaskTemplate(), 201, + test_data_creator.TaskTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, TaskTemplate_test_data()) + GET_and_assert_expected_response(self, url, 200, test_data_creator.TaskTemplate()) test_patch = {"version": 'v6.28318530718', "schema": {"mykey": "my better value"}, } # PATCH item and verify PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) - expected_data = dict(TaskTemplate_test_data()) + expected_data = dict(test_data_creator.TaskTemplate()) expected_data.update(test_patch) GET_and_assert_expected_response(self, url, 200, expected_data) def test_task_template_DELETE(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', TaskTemplate_test_data(), 201, - TaskTemplate_test_data()) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data_creator.TaskTemplate(), 201, + test_data_creator.TaskTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, TaskTemplate_test_data()) + GET_and_assert_expected_response(self, url, 200, test_data_creator.TaskTemplate()) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -226,30 +230,30 @@ class WorkRelationSelectionTemplateTestCase(unittest.TestCase): def test_work_relation_selection_template_POST_and_GET(self): # POST and GET a new item and assert correctness - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', WorkRelationSelectionTemplate_test_data(), 201, WorkRelationSelectionTemplate_test_data()) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data_creator.WorkRelationSelectionTemplate(), 201, test_data_creator.WorkRelationSelectionTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url+'?format=json', 200, WorkRelationSelectionTemplate_test_data()) + GET_and_assert_expected_response(self, url+'?format=json', 200, test_data_creator.WorkRelationSelectionTemplate()) def test_work_relation_selection_template_PUT_invalid_raises_error(self): - PUT_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/9876789876/', WorkRelationSelectionTemplate_test_data(), 404, {}) + PUT_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/9876789876/', test_data_creator.WorkRelationSelectionTemplate(), 404, {}) def test_work_relation_selection_template_PUT(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', WorkRelationSelectionTemplate_test_data(), 201, WorkRelationSelectionTemplate_test_data()) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data_creator.WorkRelationSelectionTemplate(), 201, test_data_creator.WorkRelationSelectionTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, WorkRelationSelectionTemplate_test_data()) + GET_and_assert_expected_response(self, url, 200, test_data_creator.WorkRelationSelectionTemplate()) # PUT new values, verify - PUT_and_assert_expected_response(self, url, WorkRelationSelectionTemplate_test_data("workrelationselectiontemplate2"), 200, WorkRelationSelectionTemplate_test_data("workrelationselectiontemplate2")) - GET_and_assert_expected_response(self, url, 200, WorkRelationSelectionTemplate_test_data("workrelationselectiontemplate2")) + PUT_and_assert_expected_response(self, url, test_data_creator.WorkRelationSelectionTemplate("workrelationselectiontemplate2"), 200, test_data_creator.WorkRelationSelectionTemplate("workrelationselectiontemplate2")) + GET_and_assert_expected_response(self, url, 200, test_data_creator.WorkRelationSelectionTemplate("workrelationselectiontemplate2")) def test_work_relation_selection_template_PATCH(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', WorkRelationSelectionTemplate_test_data(), 201, WorkRelationSelectionTemplate_test_data()) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data_creator.WorkRelationSelectionTemplate(), 201, test_data_creator.WorkRelationSelectionTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, WorkRelationSelectionTemplate_test_data()) + GET_and_assert_expected_response(self, url, 200, test_data_creator.WorkRelationSelectionTemplate()) test_patch = {"version": 'v6.28318530718', "schema": {"mykey": "my better value"}, @@ -257,16 +261,16 @@ class WorkRelationSelectionTemplateTestCase(unittest.TestCase): # PATCH item and verify PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) - expected_data = dict(WorkRelationSelectionTemplate_test_data()) + expected_data = dict(test_data_creator.WorkRelationSelectionTemplate()) expected_data.update(test_patch) GET_and_assert_expected_response(self, url, 200, expected_data) def test_work_relation_selection_template_DELETE(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', WorkRelationSelectionTemplate_test_data(), 201, WorkRelationSelectionTemplate_test_data()) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data_creator.WorkRelationSelectionTemplate(), 201, test_data_creator.WorkRelationSelectionTemplate()) url = r_dict['url'] - GET_and_assert_expected_response(self, url, 200, WorkRelationSelectionTemplate_test_data()) + GET_and_assert_expected_response(self, url, 200, test_data_creator.WorkRelationSelectionTemplate()) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -283,7 +287,7 @@ class TaskConnectorsTestCase(unittest.TestCase): GET_and_assert_expected_response(self, BASE_URL + '/task_connectors/1234321/', 404, {}) def test_task_connectors_POST_and_GET(self): - tc_test_data = TaskConnectors_test_data() + tc_test_data = test_data_creator.TaskConnectors() # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', tc_test_data, 201, tc_test_data) url = r_dict['url'] @@ -292,7 +296,7 @@ class TaskConnectorsTestCase(unittest.TestCase): def test_task_connectors_POST_invalid_role_raises_error(self): # POST a new item with invalid choice - test_data_invalid_role = dict(TaskConnectors_test_data()) + test_data_invalid_role = dict(test_data_creator.TaskConnectors()) test_data_invalid_role['role'] = BASE_URL + '/role/forbidden/' r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', test_data_invalid_role, 400, {}) self.assertTrue('Invalid hyperlink' in str(r_dict['role'])) @@ -300,7 +304,7 @@ class TaskConnectorsTestCase(unittest.TestCase): def test_task_connectors_POST_invalid_datatype_raises_error(self): # POST a new item with invalid choice - test_data_invalid = dict(TaskConnectors_test_data()) + test_data_invalid = dict(test_data_creator.TaskConnectors()) test_data_invalid['datatype'] = BASE_URL + '/datatype/forbidden/' r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', test_data_invalid, 400, {}) self.assertTrue('Invalid hyperlink' in str(r_dict['datatype'])) @@ -308,7 +312,7 @@ class TaskConnectorsTestCase(unittest.TestCase): def test_task_connectors_POST_invalid_dataformats_raises_error(self): # POST a new item with invalid choice - test_data_invalid = dict(TaskConnectors_test_data()) + test_data_invalid = dict(test_data_creator.TaskConnectors()) test_data_invalid['dataformats'] = [BASE_URL + '/dataformat/forbidden/'] r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', test_data_invalid, 400, {}) self.assertTrue('Invalid hyperlink' in str(r_dict['dataformats'])) @@ -316,7 +320,7 @@ class TaskConnectorsTestCase(unittest.TestCase): def test_task_connectors_POST_nonexistant_input_of_raises_error(self): # POST a new item with wrong reference - test_data_invalid = dict(TaskConnectors_test_data()) + test_data_invalid = dict(test_data_creator.TaskConnectors()) test_data_invalid['input_of'] = BASE_URL + "/task_template/6353748/" r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', test_data_invalid, 400, {}) self.assertTrue('Invalid hyperlink' in str(r_dict['input_of'])) @@ -324,7 +328,7 @@ class TaskConnectorsTestCase(unittest.TestCase): def test_task_connectors_POST_nonexistant_output_of_raises_error(self): # POST a new item with wrong reference - test_data_invalid = dict(TaskConnectors_test_data()) + test_data_invalid = dict(test_data_creator.TaskConnectors()) test_data_invalid['output_of'] = BASE_URL + "/task_template/6353748/" r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', test_data_invalid, 400, {}) self.assertTrue('Invalid hyperlink' in str(r_dict['output_of'])) @@ -332,20 +336,20 @@ class TaskConnectorsTestCase(unittest.TestCase): def test_task_connectors_POST_existing_outputs_works(self): # First POST a new item to reference - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', TaskTemplate_test_data(), 201, TaskTemplate_test_data()) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data_creator.TaskTemplate(), 201, test_data_creator.TaskTemplate()) url = r_dict['url'] # POST a new item with correct reference - test_data_valid = dict(TaskConnectors_test_data()) + test_data_valid = dict(test_data_creator.TaskConnectors()) test_data_valid['output_of'] = url POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', test_data_valid, 201, test_data_valid) def test_task_connectors_PUT_nonexistant_raises_error(self): - PUT_and_assert_expected_response(self, BASE_URL + '/task_connectors/9876789876/', TaskConnectors_test_data(), 404, {}) + PUT_and_assert_expected_response(self, BASE_URL + '/task_connectors/9876789876/', test_data_creator.TaskConnectors(), 404, {}) def test_task_connectors_PUT(self): - tc_test_data1 = TaskConnectors_test_data(role="correlator") - tc_test_data2 = TaskConnectors_test_data(role="beamformer") + tc_test_data1 = test_data_creator.TaskConnectors(role="correlator") + tc_test_data2 = test_data_creator.TaskConnectors(role="beamformer") # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', tc_test_data1, 201, tc_test_data1) @@ -357,7 +361,7 @@ class TaskConnectorsTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, tc_test_data2) def test_task_connectors_PATCH(self): - tc_test_data = TaskConnectors_test_data() + tc_test_data = test_data_creator.TaskConnectors() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', tc_test_data, 201, tc_test_data) @@ -375,7 +379,7 @@ class TaskConnectorsTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_task_connectors_DELETE(self): - tc_test_data = TaskConnectors_test_data() + tc_test_data = test_data_creator.TaskConnectors() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', tc_test_data, 201, tc_test_data) @@ -387,7 +391,7 @@ class TaskConnectorsTestCase(unittest.TestCase): def test_task_relation_blueprint_CASCADE_behavior_on_inputs_template_deleted(self): - tc_test_data = TaskConnectors_test_data() + tc_test_data = test_data_creator.TaskConnectors() # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', tc_test_data, 201, tc_test_data)['url'] @@ -403,7 +407,7 @@ class TaskConnectorsTestCase(unittest.TestCase): def test_task_relation_blueprint_CASCADE_behavior_on_outputs_template_deleted(self): - tc_test_data = TaskConnectors_test_data() + tc_test_data = test_data_creator.TaskConnectors() # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', tc_test_data, 201, tc_test_data)['url'] @@ -421,42 +425,42 @@ class TaskConnectorsTestCase(unittest.TestCase): class DefaultTemplates(unittest.TestCase): def test_default_generator_template_POST(self): r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', - GeneratorTemplate_test_data(), 201, - GeneratorTemplate_test_data()) + test_data_creator.GeneratorTemplate(), 201, + test_data_creator.GeneratorTemplate()) url = r_dict['url'] - test_data_1 = dict(DefaultTemplates_test_data()) + test_data_1 = dict(test_data_creator.DefaultTemplates()) test_data_1['template'] = url POST_and_assert_expected_response(self, BASE_URL + '/default_generator_template/', test_data_1, 201, test_data_1) def test_default_scheduling_unit_template_POST(self): r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', - SchedulingUnitTemplate_test_data(), 201, - SchedulingUnitTemplate_test_data()) + test_data_creator.SchedulingUnitTemplate(), 201, + test_data_creator.SchedulingUnitTemplate()) url = r_dict['url'] - test_data_1 = dict(DefaultTemplates_test_data()) + test_data_1 = dict(test_data_creator.DefaultTemplates()) test_data_1['template'] = url POST_and_assert_expected_response(self, BASE_URL + '/default_scheduling_unit_template/', test_data_1, 201, test_data_1) def test_default_task_template_POST(self): r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', - TaskTemplate_test_data(), 201, - TaskTemplate_test_data()) + test_data_creator.TaskTemplate(), 201, + test_data_creator.TaskTemplate()) url = r_dict['url'] - test_data_1 = dict(DefaultTemplates_test_data()) + test_data_1 = dict(test_data_creator.DefaultTemplates()) test_data_1['template'] = url POST_and_assert_expected_response(self, BASE_URL + '/default_task_template/', test_data_1, 201, test_data_1) def test_default_work_relation_selection_template_POST(self): r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', - WorkRelationSelectionTemplate_test_data(), 201, - WorkRelationSelectionTemplate_test_data()) + test_data_creator.WorkRelationSelectionTemplate(), 201, + test_data_creator.WorkRelationSelectionTemplate()) url = r_dict['url'] - test_data_1 = dict(DefaultTemplates_test_data()) + test_data_1 = dict(test_data_creator.DefaultTemplates()) test_data_1['template'] = url POST_and_assert_expected_response(self, BASE_URL + '/default_work_relation_selection_template/', test_data_1, 201, test_data_1) @@ -464,9 +468,9 @@ class DefaultTemplates(unittest.TestCase): # POST with dependency template_url = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', - GeneratorTemplate_test_data(), 201, - GeneratorTemplate_test_data())['url'] - test_data = dict(DefaultTemplates_test_data("defaulttemplate2")) + test_data_creator.GeneratorTemplate(), 201, + test_data_creator.GeneratorTemplate())['url'] + test_data = dict(test_data_creator.DefaultTemplates("defaulttemplate2")) test_data['template'] = template_url POST_and_assert_expected_response(self, BASE_URL + '/default_generator_template/', test_data, 201, test_data) @@ -476,15 +480,15 @@ class DefaultTemplates(unittest.TestCase): response = requests.delete(template_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, template_url, 200, GeneratorTemplate_test_data()) + GET_and_assert_expected_response(self, template_url, 200, test_data_creator.GeneratorTemplate()) def test_default_scheduling_unit_template_PROTECT_behavior_on_template_deleted(self): # POST with dependency template_url = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', - SchedulingUnitTemplate_test_data(), 201, - SchedulingUnitTemplate_test_data())['url'] - test_data = dict(DefaultTemplates_test_data("defaulttemplate2")) + test_data_creator.SchedulingUnitTemplate(), 201, + test_data_creator.SchedulingUnitTemplate())['url'] + test_data = dict(test_data_creator.DefaultTemplates("defaulttemplate2")) test_data['template'] = template_url POST_and_assert_expected_response(self, BASE_URL + '/default_scheduling_unit_template/', test_data, 201, test_data) @@ -494,16 +498,16 @@ class DefaultTemplates(unittest.TestCase): response = requests.delete(template_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, template_url, 200, SchedulingUnitTemplate_test_data()) + GET_and_assert_expected_response(self, template_url, 200, test_data_creator.SchedulingUnitTemplate()) def test_default_task_template_PROTECT_behavior_on_template_deleted(self): # POST with dependency template_url = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', - TaskTemplate_test_data(), 201, - TaskTemplate_test_data())['url'] - test_data = dict(DefaultTemplates_test_data("defaulttemplate2")) + test_data_creator.TaskTemplate(), 201, + test_data_creator.TaskTemplate())['url'] + test_data = dict(test_data_creator.DefaultTemplates("defaulttemplate2")) test_data['template'] = template_url POST_and_assert_expected_response(self, BASE_URL + '/default_task_template/', test_data, 201, test_data) @@ -513,15 +517,15 @@ class DefaultTemplates(unittest.TestCase): response = requests.delete(template_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, template_url, 200, TaskTemplate_test_data()) + GET_and_assert_expected_response(self, template_url, 200, test_data_creator.TaskTemplate()) def test_default_work_relation_selection_template_PROTECT_behavior_on_template_deleted(self): # POST with dependency template_url = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', - WorkRelationSelectionTemplate_test_data(), 201, - WorkRelationSelectionTemplate_test_data())['url'] - test_data = dict(DefaultTemplates_test_data("defaulttemplate2")) + test_data_creator.WorkRelationSelectionTemplate(), 201, + test_data_creator.WorkRelationSelectionTemplate())['url'] + test_data = dict(test_data_creator.DefaultTemplates("defaulttemplate2")) test_data['template'] = template_url POST_and_assert_expected_response(self, BASE_URL + '/default_work_relation_selection_template/', test_data, 201, test_data) @@ -531,7 +535,7 @@ class DefaultTemplates(unittest.TestCase): response = requests.delete(template_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_and_assert_expected_response(self, template_url, 200, WorkRelationSelectionTemplate_test_data()) + GET_and_assert_expected_response(self, template_url, 200, test_data_creator.WorkRelationSelectionTemplate()) class CycleTestCase(unittest.TestCase): @@ -546,16 +550,16 @@ class CycleTestCase(unittest.TestCase): def test_cycle_POST_and_GET(self): # POST and GET a new item and assert correctness - cycle_test_data = Cycle_test_data() + cycle_test_data = test_data_creator.Cycle() r_dict = POST_and_assert_expected_response(self, BASE_URL + '/cycle/', cycle_test_data, 201, cycle_test_data) url = r_dict['url'] GET_and_assert_expected_response(self, url, 200, cycle_test_data) def test_cycle_PUT_invalid_raises_error(self): - PUT_and_assert_expected_response(self, BASE_URL + '/cycle/9876789876/', Cycle_test_data(), 404, {}) + PUT_and_assert_expected_response(self, BASE_URL + '/cycle/9876789876/', test_data_creator.Cycle(), 404, {}) def test_cycle_PUT(self): - cycle_test_data = Cycle_test_data() + cycle_test_data = test_data_creator.Cycle() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/cycle/', cycle_test_data, 201, cycle_test_data) @@ -563,13 +567,13 @@ class CycleTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, cycle_test_data) # PUT new values, verify - test_data = dict(Cycle_test_data("other description")) + test_data = dict(test_data_creator.Cycle("other description")) test_data['name'] = cycle_test_data['name'] # since name is PK, need to keep that unchanged PUT_and_assert_expected_response(self, url, test_data, 200, test_data) GET_and_assert_expected_response(self, url, 200, test_data) def test_cycle_PATCH(self): - cycle_test_data = Cycle_test_data() + cycle_test_data = test_data_creator.Cycle() # POST new item, verify @@ -586,7 +590,7 @@ class CycleTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_cycle_DELETE(self): - cycle_test_data = Cycle_test_data() + cycle_test_data = test_data_creator.Cycle() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/cycle/', cycle_test_data, 201, cycle_test_data) @@ -607,7 +611,7 @@ class ProjectTestCase(unittest.TestCase): GET_and_assert_expected_response(self, BASE_URL + '/project/1234321/', 404, {}) def test_project_POST_and_GET(self): - project_test_data = Project_test_data() + project_test_data = test_data_creator.Project() # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/project/', project_test_data, 201, project_test_data) @@ -615,10 +619,10 @@ class ProjectTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, project_test_data) def test_project_PUT_invalid_raises_error(self): - PUT_and_assert_expected_response(self, BASE_URL + '/project/9876789876/', Project_test_data(), 404, {}) + PUT_and_assert_expected_response(self, BASE_URL + '/project/9876789876/', test_data_creator.Project(), 404, {}) def test_project_PUT(self): - project_test_data = Project_test_data() + project_test_data = test_data_creator.Project() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/project/', project_test_data, 201, project_test_data) @@ -626,13 +630,13 @@ class ProjectTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, project_test_data) # PUT new values, verify - test_data = dict(Project_test_data("other description")) + test_data = dict(test_data_creator.Project("other description")) test_data['name'] = project_test_data['name'] # since name is PK, need to keep that unchanged PUT_and_assert_expected_response(self, url, test_data, 200, test_data) GET_and_assert_expected_response(self, url, 200, test_data) def test_project_PATCH(self): - project_test_data = Project_test_data() + project_test_data = test_data_creator.Project() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/project/', project_test_data, 201, project_test_data) @@ -649,7 +653,7 @@ class ProjectTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_project_DELETE(self): - project_test_data = Project_test_data() + project_test_data = test_data_creator.Project() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/project/', project_test_data, 201, project_test_data) @@ -662,9 +666,9 @@ class ProjectTestCase(unittest.TestCase): def test_project_PROTECT_behavior_on_cycle_deleted(self): # POST new item with dependencies - cycle_test_data = Cycle_test_data() + cycle_test_data = test_data_creator.Cycle() cycle_url = POST_and_assert_expected_response(self, BASE_URL + '/cycle/', cycle_test_data, 201, cycle_test_data)['url'] - test_data = dict(Project_test_data()) + test_data = dict(test_data_creator.Project()) test_data['cycle'] = cycle_url url = POST_and_assert_expected_response(self, BASE_URL + '/project/', test_data, 201, test_data)['url'] @@ -692,7 +696,7 @@ class SchedulingSetTestCase(unittest.TestCase): GET_and_assert_expected_response(self, BASE_URL + '/scheduling_set/1234321/', 404, {}) def test_scheduling_set_POST_and_GET(self): - schedulingset_test_data = SchedulingSet_test_data() + schedulingset_test_data = test_data_creator.SchedulingSet() # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_set/', schedulingset_test_data, 201, schedulingset_test_data) @@ -700,25 +704,25 @@ class SchedulingSetTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, schedulingset_test_data) def test_scheduling_set_PUT_invalid_raises_error(self): - schedulingset_test_data = SchedulingSet_test_data() + schedulingset_test_data = test_data_creator.SchedulingSet() PUT_and_assert_expected_response(self, BASE_URL + '/scheduling_set/9876789876/', schedulingset_test_data, 404, {}) def test_scheduling_set_PUT(self): - project_url = post_data_and_get_url(Project_test_data(), '/project/') - schedulingset_test_data = SchedulingSet_test_data(project_url=project_url) + project_url = test_data_creator.post_data_and_get_url(test_data_creator.Project(), '/project/') + schedulingset_test_data = test_data_creator.SchedulingSet(project_url=project_url) # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_set/', schedulingset_test_data, 201, schedulingset_test_data) url = r_dict['url'] GET_and_assert_expected_response(self, url, 200, schedulingset_test_data) - schedulingset_test_data2 = SchedulingSet_test_data("schedulingset2", project_url=project_url) + schedulingset_test_data2 = test_data_creator.SchedulingSet("schedulingset2", project_url=project_url) # PUT new values, verify PUT_and_assert_expected_response(self, url, schedulingset_test_data2, 200, schedulingset_test_data2) GET_and_assert_expected_response(self, url, 200, schedulingset_test_data2) def test_scheduling_set_PATCH(self): - schedulingset_test_data = SchedulingSet_test_data() + schedulingset_test_data = test_data_creator.SchedulingSet() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_set/', schedulingset_test_data, 201, schedulingset_test_data) @@ -735,7 +739,7 @@ class SchedulingSetTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_scheduling_set_DELETE(self): - schedulingset_test_data = SchedulingSet_test_data() + schedulingset_test_data = test_data_creator.SchedulingSet() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_set/', schedulingset_test_data, 201, schedulingset_test_data) @@ -746,9 +750,9 @@ class SchedulingSetTestCase(unittest.TestCase): DELETE_and_assert_gone(self, url) def test_scheduling_set_PROTECT_behavior_on_project_deleted(self): - project_url = post_data_and_get_url(Project_test_data(), '/project/') + project_url = test_data_creator.post_data_and_get_url(test_data_creator.Project(), '/project/') project_test_data = GET_and_assert_expected_response(self, project_url, 200, {}) - schedulingset_test_data = SchedulingSet_test_data(project_url=project_url) + schedulingset_test_data = test_data_creator.SchedulingSet(project_url=project_url) # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_set/', schedulingset_test_data, 201, schedulingset_test_data)['url'] @@ -764,8 +768,8 @@ class SchedulingSetTestCase(unittest.TestCase): GET_and_assert_expected_response(self, project_url, 200, project_test_data) def test_scheduling_set_SET_NULL_behavior_on_generator_template_deleted(self): - generator_template_url = post_data_and_get_url(GeneratorTemplate_test_data(), '/generator_template/') - schedulingset_test_data = SchedulingSet_test_data(generator_template_url=generator_template_url) + generator_template_url = test_data_creator.post_data_and_get_url(test_data_creator.GeneratorTemplate(), '/generator_template/') + schedulingset_test_data = test_data_creator.SchedulingSet(generator_template_url=generator_template_url) # POST new item test_data = dict(schedulingset_test_data) @@ -792,7 +796,7 @@ class SchedulingUnitDraftTestCase(unittest.TestCase): GET_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/1234321/', 404, {}) def test_scheduling_unit_draft_POST_and_GET(self): - schedulingunitdraft_test_data = SchedulingUnitDraft_test_data() + schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft() # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/', schedulingunitdraft_test_data, 201, schedulingunitdraft_test_data) @@ -800,25 +804,25 @@ class SchedulingUnitDraftTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, schedulingunitdraft_test_data) def test_scheduling_unit_draft_PUT_invalid_raises_error(self): - schedulingunitdraft_test_data = SchedulingUnitDraft_test_data() + schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft() PUT_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/9876789876/', schedulingunitdraft_test_data, 404, {}) def test_scheduling_unit_draft_PUT(self): - schedulingunitdraft_test_data = SchedulingUnitDraft_test_data() + schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/', schedulingunitdraft_test_data, 201, schedulingunitdraft_test_data) url = r_dict['url'] GET_and_assert_expected_response(self, url, 200, schedulingunitdraft_test_data) - schedulingunitdraft_test_data2 = SchedulingUnitDraft_test_data("my_scheduling_unit_draft2") + schedulingunitdraft_test_data2 = test_data_creator.SchedulingUnitDraft("my_scheduling_unit_draft2") # PUT new values, verify PUT_and_assert_expected_response(self, url, schedulingunitdraft_test_data2, 200, schedulingunitdraft_test_data2) GET_and_assert_expected_response(self, url, 200, schedulingunitdraft_test_data2) def test_scheduling_unit_draft_PATCH(self): - schedulingunitdraft_test_data = SchedulingUnitDraft_test_data() + schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/', schedulingunitdraft_test_data, 201, schedulingunitdraft_test_data) @@ -835,7 +839,7 @@ class SchedulingUnitDraftTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_scheduling_unit_draft_DELETE(self): - schedulingunitdraft_test_data = SchedulingUnitDraft_test_data() + schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/', schedulingunitdraft_test_data, 201, schedulingunitdraft_test_data) @@ -846,8 +850,8 @@ class SchedulingUnitDraftTestCase(unittest.TestCase): DELETE_and_assert_gone(self, url) def test_scheduling_unit_draft_CASCADE_behavior_on_scheduling_unit_template_deleted(self): - template_url = post_data_and_get_url(SchedulingUnitTemplate_test_data(), '/scheduling_unit_template/') - schedulingunitdraft_test_data = SchedulingUnitDraft_test_data(template_url=template_url) + template_url = test_data_creator.post_data_and_get_url(test_data_creator.SchedulingUnitTemplate(), '/scheduling_unit_template/') + schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft(template_url=template_url) # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/', schedulingunitdraft_test_data, 201, schedulingunitdraft_test_data)['url'] @@ -862,8 +866,8 @@ class SchedulingUnitDraftTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 404, {}) def test_scheduling_unit_draft_CASCADE_behavior_on_scheduling_set_deleted(self): - scheduling_set_url = post_data_and_get_url(SchedulingSet_test_data(), '/scheduling_set/') - schedulingunitdraft_test_data = SchedulingUnitDraft_test_data(scheduling_set_url=scheduling_set_url) + scheduling_set_url = test_data_creator.post_data_and_get_url(test_data_creator.SchedulingSet(), '/scheduling_set/') + schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft(scheduling_set_url=scheduling_set_url) # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/', schedulingunitdraft_test_data, 201, schedulingunitdraft_test_data)['url'] @@ -878,7 +882,7 @@ class SchedulingUnitDraftTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 404, {}) def test_scheduling_unit_draft_SET_NULL_behavior_on_copies_deleted(self): - schedulingunitdraft_test_data = SchedulingUnitDraft_test_data() + schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft() # POST new item with dependency copy_url = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/', schedulingunitdraft_test_data, 201, schedulingunitdraft_test_data)['url'] @@ -907,7 +911,7 @@ class TaskDraftTestCase(unittest.TestCase): GET_and_assert_expected_response(self, BASE_URL + '/task_draft/1234321/', 404, {}) def test_task_draft_POST_and_GET(self): - taskdraft_test_data = TaskDraft_test_data() + taskdraft_test_data = test_data_creator.TaskDraft() # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_draft/', taskdraft_test_data, 201, taskdraft_test_data) @@ -915,12 +919,12 @@ class TaskDraftTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, taskdraft_test_data) def test_task_draft_PUT_invalid_raises_error(self): - taskdraft_test_data = TaskDraft_test_data() + taskdraft_test_data = test_data_creator.TaskDraft() PUT_and_assert_expected_response(self, BASE_URL + '/task_draft/9876789876/', taskdraft_test_data, 404, {}) def test_task_draft_PUT(self): - taskdraft_test_data1 = TaskDraft_test_data(name="the one") - taskdraft_test_data2 = TaskDraft_test_data(name="the other") + taskdraft_test_data1 = test_data_creator.TaskDraft(name="the one") + taskdraft_test_data2 = test_data_creator.TaskDraft(name="the other") # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_draft/', taskdraft_test_data1, 201, taskdraft_test_data1) @@ -932,7 +936,7 @@ class TaskDraftTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, taskdraft_test_data2) def test_task_draft_PATCH(self): - taskdraft_test_data = TaskDraft_test_data() + taskdraft_test_data = test_data_creator.TaskDraft() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_draft/', taskdraft_test_data, 201, taskdraft_test_data) @@ -949,7 +953,7 @@ class TaskDraftTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_task_draft_DELETE(self): - taskdraft_test_data = TaskDraft_test_data() + taskdraft_test_data = test_data_creator.TaskDraft() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_draft/', taskdraft_test_data, 201, taskdraft_test_data) @@ -960,8 +964,8 @@ class TaskDraftTestCase(unittest.TestCase): DELETE_and_assert_gone(self, url) def test_task_draft_CASCADE_behavior_on_task_template_deleted(self): - template_url = post_data_and_get_url(TaskTemplate_test_data(), '/task_template/') - taskdraft_test_data = TaskDraft_test_data(name="task draft 2", template_url=template_url) + template_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskTemplate(), '/task_template/') + taskdraft_test_data = test_data_creator.TaskDraft(name="task draft 2", template_url=template_url) # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/task_draft/', taskdraft_test_data, 201, taskdraft_test_data)['url'] @@ -976,8 +980,8 @@ class TaskDraftTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 404, {}) def test_task_draft_CASCADE_behavior_on_scheduling_unit_draft_deleted(self): - scheduling_unit_draft_url = post_data_and_get_url(SchedulingUnitDraft_test_data(), '/scheduling_unit_draft/') - taskdraft_test_data = TaskDraft_test_data(name="task draft 2", scheduling_unit_draft_url=scheduling_unit_draft_url) + scheduling_unit_draft_url = test_data_creator.post_data_and_get_url(test_data_creator.SchedulingUnitDraft(), '/scheduling_unit_draft/') + taskdraft_test_data = test_data_creator.TaskDraft(name="task draft 2", scheduling_unit_draft_url=scheduling_unit_draft_url) # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/task_draft/', taskdraft_test_data, 201, taskdraft_test_data)['url'] @@ -992,8 +996,8 @@ class TaskDraftTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 404, {}) def test_task_draft_SET_NULL_behavior_on_copies_deleted(self): - taskdraft_test_data1 = TaskDraft_test_data(name="the one") - taskdraft_test_data2 = TaskDraft_test_data(name="the other") + taskdraft_test_data1 = test_data_creator.TaskDraft(name="the one") + taskdraft_test_data2 = test_data_creator.TaskDraft(name="the other") # POST new item with dependency copy_url = POST_and_assert_expected_response(self, BASE_URL + '/task_draft/', taskdraft_test_data2, 201, taskdraft_test_data2)['url'] @@ -1022,7 +1026,7 @@ class TaskRelationDraftTestCase(unittest.TestCase): GET_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/1234321/', 404, {}) def test_task_relation_draft_POST_and_GET(self): - trd_test_data = TaskRelationDraft_test_data() + trd_test_data = test_data_creator.TaskRelationDraft() # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', trd_test_data, 201, trd_test_data) @@ -1030,12 +1034,12 @@ class TaskRelationDraftTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, trd_test_data) def test_task_relation_draft_PUT_invalid_raises_error(self): - trd_test_data = TaskRelationDraft_test_data() + trd_test_data = test_data_creator.TaskRelationDraft() PUT_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/9876789876/', trd_test_data, 404, {}) def test_task_relation_draft_PUT(self): - trd_test_data1 = TaskRelationDraft_test_data() - trd_test_data2 = TaskRelationDraft_test_data() + trd_test_data1 = test_data_creator.TaskRelationDraft() + trd_test_data2 = test_data_creator.TaskRelationDraft() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', trd_test_data1, 201, trd_test_data1) @@ -1047,7 +1051,7 @@ class TaskRelationDraftTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, trd_test_data2) def test_task_relation_draft_PATCH(self): - trd_test_data = TaskRelationDraft_test_data() + trd_test_data = test_data_creator.TaskRelationDraft() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', trd_test_data, 201, trd_test_data) @@ -1063,7 +1067,7 @@ class TaskRelationDraftTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_task_relation_draft_DELETE(self): - trd_test_data = TaskRelationDraft_test_data() + trd_test_data = test_data_creator.TaskRelationDraft() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', trd_test_data, 201, trd_test_data) @@ -1074,8 +1078,8 @@ class TaskRelationDraftTestCase(unittest.TestCase): DELETE_and_assert_gone(self, url) def test_task_relation_draft_CASCADE_behavior_on_work_relation_selection_template_deleted(self): - template_url = post_data_and_get_url(WorkRelationSelectionTemplate_test_data(), '/work_relation_selection_template/') - trd_test_data = TaskRelationDraft_test_data(template_url=template_url) + template_url = test_data_creator.post_data_and_get_url(test_data_creator.WorkRelationSelectionTemplate(), '/work_relation_selection_template/') + trd_test_data = test_data_creator.TaskRelationDraft(template_url=template_url) # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', trd_test_data, 201, trd_test_data)['url'] @@ -1090,8 +1094,8 @@ class TaskRelationDraftTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 404, {}) def test_task_relation_draft_CASCADE_behavior_on_producer_deleted(self): - producer_url = post_data_and_get_url(TaskDraft_test_data(), '/task_draft/') - trd_test_data = TaskRelationDraft_test_data(producer_url=producer_url) + producer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskDraft(), '/task_draft/') + trd_test_data = test_data_creator.TaskRelationDraft(producer_url=producer_url) # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', @@ -1108,8 +1112,8 @@ class TaskRelationDraftTestCase(unittest.TestCase): def test_task_relation_draft_CASCADE_behavior_on_consumer_deleted(self): - consumer_url = post_data_and_get_url(TaskDraft_test_data(), '/task_draft/') - trd_test_data = TaskRelationDraft_test_data(consumer_url=consumer_url) + consumer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskDraft(), '/task_draft/') + trd_test_data = test_data_creator.TaskRelationDraft(consumer_url=consumer_url) # POST new item with dependency url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', @@ -1126,8 +1130,8 @@ class TaskRelationDraftTestCase(unittest.TestCase): def test_task_relation_draft_CASCADE_behavior_on_input_deleted(self): - input_url = post_data_and_get_url(TaskConnectors_test_data(), '/task_connectors/') - trd_test_data = TaskRelationDraft_test_data(input_url=input_url) + input_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectors(), '/task_connectors/') + trd_test_data = test_data_creator.TaskRelationDraft(input_url=input_url) # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', @@ -1144,8 +1148,8 @@ class TaskRelationDraftTestCase(unittest.TestCase): def test_task_relation_draft_CASCADE_behavior_on_output_deleted(self): - output_url = post_data_and_get_url(TaskConnectors_test_data(), '/task_connectors/') - trd_test_data = TaskRelationDraft_test_data(output_url=output_url) + output_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectors(), '/task_connectors/') + trd_test_data = test_data_creator.TaskRelationDraft(output_url=output_url) # POST new item with dependency url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', @@ -1171,7 +1175,7 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase): GET_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/1234321/', 404, {}) def test_scheduling_unit_blueprint_POST_and_GET(self): - sub_test_data = SchedulingUnitBlueprint_test_data() + sub_test_data = test_data_creator.SchedulingUnitBlueprint() # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/', sub_test_data, 201, sub_test_data) @@ -1179,12 +1183,12 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, sub_test_data) def test_scheduling_unit_blueprint_PUT_invalid_raises_error(self): - sub_test_data = SchedulingUnitBlueprint_test_data() + sub_test_data = test_data_creator.SchedulingUnitBlueprint() PUT_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/9876789876/', sub_test_data, 404, {}) def test_scheduling_unit_blueprint_PUT(self): - sub_test_data1 = SchedulingUnitBlueprint_test_data(name="the one") - sub_test_data2 = SchedulingUnitBlueprint_test_data(name="the other") + sub_test_data1 = test_data_creator.SchedulingUnitBlueprint(name="the one") + sub_test_data2 = test_data_creator.SchedulingUnitBlueprint(name="the other") # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/', sub_test_data1, 201, sub_test_data1) @@ -1196,7 +1200,7 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, sub_test_data2) def test_scheduling_unit_blueprint_PATCH(self): - sub_test_data = SchedulingUnitBlueprint_test_data() + sub_test_data = test_data_creator.SchedulingUnitBlueprint() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/', sub_test_data, 201, sub_test_data) @@ -1213,7 +1217,7 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_scheduling_unit_blueprint_DELETE(self): - sub_test_data = SchedulingUnitBlueprint_test_data() + sub_test_data = test_data_creator.SchedulingUnitBlueprint() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/', sub_test_data, 201, sub_test_data) @@ -1224,7 +1228,7 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase): DELETE_and_assert_gone(self, url) def test_scheduling_unit_blueprint_CASCADE_behavior_on_scheduling_unit_template_deleted(self): - sub_test_data = SchedulingUnitBlueprint_test_data() + sub_test_data = test_data_creator.SchedulingUnitBlueprint() # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/', sub_test_data, 201, sub_test_data)['url'] @@ -1239,7 +1243,7 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 404, {}) def test_scheduling_unit_blueprint_CASCADE_behavior_on_scheduling_unit_draft_deleted(self): - sub_test_data = SchedulingUnitBlueprint_test_data() + sub_test_data = test_data_creator.SchedulingUnitBlueprint() # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/', sub_test_data, 201, sub_test_data)['url'] @@ -1263,7 +1267,7 @@ class TaskBlueprintTestCase(unittest.TestCase): GET_and_assert_expected_response(self, BASE_URL + '/task_blueprint/1234321/', 404, {}) def test_task_blueprint_POST_and_GET(self): - tb_test_data = TaskBlueprint_test_data() + tb_test_data = test_data_creator.TaskBlueprint() # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_blueprint/', tb_test_data, 201, tb_test_data) @@ -1271,12 +1275,12 @@ class TaskBlueprintTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, tb_test_data) def test_task_blueprint_PUT_invalid_raises_error(self): - tb_test_data = TaskBlueprint_test_data() + tb_test_data = test_data_creator.TaskBlueprint() PUT_and_assert_expected_response(self, BASE_URL + '/task_blueprint/9876789876/', tb_test_data, 404, {}) def test_task_blueprint_PUT(self): - tb_test_data1 = TaskBlueprint_test_data(name="the one") - tb_test_data2 = TaskBlueprint_test_data(name="the other") + tb_test_data1 = test_data_creator.TaskBlueprint(name="the one") + tb_test_data2 = test_data_creator.TaskBlueprint(name="the other") # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_blueprint/', tb_test_data1, 201, tb_test_data1) @@ -1288,7 +1292,7 @@ class TaskBlueprintTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, tb_test_data2) def test_task_blueprint_PATCH(self): - tb_test_data = TaskBlueprint_test_data() + tb_test_data = test_data_creator.TaskBlueprint() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_blueprint/', tb_test_data, 201, tb_test_data) @@ -1305,7 +1309,7 @@ class TaskBlueprintTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_task_blueprint_DELETE(self): - tb_test_data = TaskBlueprint_test_data() + tb_test_data = test_data_creator.TaskBlueprint() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_blueprint/', tb_test_data, 201, tb_test_data) @@ -1316,7 +1320,7 @@ class TaskBlueprintTestCase(unittest.TestCase): DELETE_and_assert_gone(self, url) def test_task_blueprint_prevents_missing_specification_template(self): - tb_test_data = TaskBlueprint_test_data() + tb_test_data = test_data_creator.TaskBlueprint() # test data test_data = dict(tb_test_data) @@ -1327,7 +1331,7 @@ class TaskBlueprintTestCase(unittest.TestCase): self.assertTrue('This field may not be null' in str(r_dict['specifications_template'])) def test_task_blueprint_prevents_missing_draft(self): - tb_test_data = TaskBlueprint_test_data() + tb_test_data = test_data_creator.TaskBlueprint() # test data test_data = dict(tb_test_data) @@ -1338,7 +1342,7 @@ class TaskBlueprintTestCase(unittest.TestCase): self.assertTrue('This field may not be null' in str(r_dict['draft'])) def test_task_blueprint_prevents_missing_scheduling_unit_blueprint(self): - tb_test_data = TaskBlueprint_test_data() + tb_test_data = test_data_creator.TaskBlueprint() # test data test_data = dict(tb_test_data) @@ -1349,7 +1353,7 @@ class TaskBlueprintTestCase(unittest.TestCase): self.assertTrue('This field may not be null' in str(r_dict['scheduling_unit_blueprint'])) def test_task_blueprint_CASCADE_behavior_on_task_template_deleted(self): - tb_test_data = TaskBlueprint_test_data() + tb_test_data = test_data_creator.TaskBlueprint() # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/task_blueprint/', tb_test_data, 201, tb_test_data)['url'] @@ -1364,7 +1368,7 @@ class TaskBlueprintTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 404, {}) def test_task_blueprint_CASCADE_behavior_on_task_draft_deleted(self): - tb_test_data = TaskBlueprint_test_data() + tb_test_data = test_data_creator.TaskBlueprint() # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/task_blueprint/', tb_test_data, 201, tb_test_data)['url'] @@ -1379,7 +1383,7 @@ class TaskBlueprintTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 404, {}) def test_task_blueprint_CASCADE_behavior_on_scheduling_unit_blueprint_deleted(self): - tb_test_data = TaskBlueprint_test_data() + tb_test_data = test_data_creator.TaskBlueprint() # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/task_blueprint/', tb_test_data, 201, tb_test_data)['url'] @@ -1404,7 +1408,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): GET_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/1234321/', 404, {}) def test_task_relation_blueprint_POST_and_GET(self): - trb_test_data = TaskRelationBlueprint_test_data() + trb_test_data = test_data_creator.TaskRelationBlueprint() # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', trb_test_data, 201, trb_test_data) @@ -1412,12 +1416,12 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, trb_test_data) def test_task_relation_blueprint_PUT_invalid_raises_error(self): - trb_test_data = TaskRelationBlueprint_test_data() + trb_test_data = test_data_creator.TaskRelationBlueprint() PUT_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/9876789876/', trb_test_data, 404, {}) def test_task_relation_blueprint_PUT(self): - trb_test_data1 = TaskRelationBlueprint_test_data() - trb_test_data2 = TaskRelationBlueprint_test_data() + trb_test_data1 = test_data_creator.TaskRelationBlueprint() + trb_test_data2 = test_data_creator.TaskRelationBlueprint() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', trb_test_data1, 201, trb_test_data1) @@ -1429,7 +1433,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, trb_test_data2) def test_task_relation_blueprint_PATCH(self): - trb_test_data = TaskRelationBlueprint_test_data() + trb_test_data = test_data_creator.TaskRelationBlueprint() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', trb_test_data, 201, trb_test_data) @@ -1445,7 +1449,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 200, expected_data) def test_task_relation_blueprint_DELETE(self): - trb_test_data = TaskRelationBlueprint_test_data() + trb_test_data = test_data_creator.TaskRelationBlueprint() # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', trb_test_data, 201, trb_test_data) @@ -1456,7 +1460,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): DELETE_and_assert_gone(self, url) def test_task_relation_blueprint_prevents_missing_selection_template(self): - trb_test_data = TaskRelationBlueprint_test_data() + trb_test_data = test_data_creator.TaskRelationBlueprint() # test data test_data = dict(trb_test_data) @@ -1467,7 +1471,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): self.assertTrue('This field may not be null' in str(r_dict['selection_template'])) def test_task_relation_blueprint_prevents_missing_draft(self): - trb_test_data = TaskRelationBlueprint_test_data() + trb_test_data = test_data_creator.TaskRelationBlueprint() # test data test_data = dict(trb_test_data) @@ -1478,7 +1482,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): self.assertTrue('This field may not be null' in str(r_dict['draft'])) def test_task_relation_blueprint_prevents_missing_producer(self): - trb_test_data = TaskRelationBlueprint_test_data() + trb_test_data = test_data_creator.TaskRelationBlueprint() # test data test_data = dict(trb_test_data) @@ -1489,7 +1493,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): self.assertTrue('This field may not be null' in str(r_dict['producer'])) def test_task_relation_blueprint_prevents_missing_consumer(self): - trb_test_data = TaskRelationBlueprint_test_data() + trb_test_data = test_data_creator.TaskRelationBlueprint() # test data test_data = dict(trb_test_data) @@ -1500,7 +1504,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): self.assertTrue('This field may not be null' in str(r_dict['consumer'])) def test_task_relation_blueprint_prevents_missing_input(self): - trb_test_data = TaskRelationBlueprint_test_data() + trb_test_data = test_data_creator.TaskRelationBlueprint() # test data test_data = dict(trb_test_data) @@ -1511,7 +1515,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): self.assertTrue('This field may not be null' in str(r_dict['input'])) def test_task_relation_blueprint_prevents_missing_output(self): - trb_test_data = TaskRelationBlueprint_test_data() + trb_test_data = test_data_creator.TaskRelationBlueprint() # test data test_data = dict(trb_test_data) @@ -1522,8 +1526,8 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): self.assertTrue('This field may not be null' in str(r_dict['output'])) def test_task_relation_blueprint_CASCADE_behavior_on_work_relation_selection_template_deleted(self): - template_url = post_data_and_get_url(WorkRelationSelectionTemplate_test_data(), '/work_relation_selection_template/') - trb_test_data = TaskRelationBlueprint_test_data(template_url=template_url) + template_url = test_data_creator.post_data_and_get_url(test_data_creator.WorkRelationSelectionTemplate(), '/work_relation_selection_template/') + trb_test_data = test_data_creator.TaskRelationBlueprint(template_url=template_url) # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', trb_test_data, 201, trb_test_data)['url'] @@ -1538,8 +1542,8 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): GET_and_assert_expected_response(self, url, 404, {}) def test_task_relation_blueprint_CASCADE_behavior_on_producer_deleted(self): - producer_url = post_data_and_get_url(TaskBlueprint_test_data(), '/task_blueprint/') - trb_test_data = TaskRelationBlueprint_test_data(producer_url=producer_url) + producer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/') + trb_test_data = test_data_creator.TaskRelationBlueprint(producer_url=producer_url) # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', @@ -1556,8 +1560,8 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): def test_task_relation_blueprint_CASCADE_behavior_on_consumer_deleted(self): - consumer_url = post_data_and_get_url(TaskBlueprint_test_data(), '/task_blueprint/') - trb_test_data = TaskRelationBlueprint_test_data(consumer_url=consumer_url) + consumer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/') + trb_test_data = test_data_creator.TaskRelationBlueprint(consumer_url=consumer_url) # POST new item with dependency url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', @@ -1574,8 +1578,8 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): def test_task_relation_blueprint_CASCADE_behavior_on_input_deleted(self): - input_url = post_data_and_get_url(TaskConnectors_test_data(), '/task_connectors/') - trb_test_data = TaskRelationBlueprint_test_data(input_url=input_url) + input_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectors(), '/task_connectors/') + trb_test_data = test_data_creator.TaskRelationBlueprint(input_url=input_url) # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', @@ -1592,8 +1596,8 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): def test_task_relation_blueprint_CASCADE_behavior_on_output_deleted(self): - output_url = post_data_and_get_url(TaskConnectors_test_data(), '/task_connectors/') - trb_test_data = TaskRelationBlueprint_test_data(output_url=output_url) + output_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectors(), '/task_connectors/') + trb_test_data = test_data_creator.TaskRelationBlueprint(output_url=output_url) # POST new item with dependency url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', diff --git a/SAS/TMSS/test/t_tmssapp_specification_permissions.py b/SAS/TMSS/test/t_tmssapp_specification_permissions.py new file mode 100644 index 0000000000000000000000000000000000000000..cc356399963a8d553c330d54d09135dc00ed8808 --- /dev/null +++ b/SAS/TMSS/test/t_tmssapp_specification_permissions.py @@ -0,0 +1,117 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2018 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id: $ + +import logging +logger = logging.getLogger(__name__) +logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) + +# Do Mandatory setup step: +# use setup/teardown magic for tmss test database, ldap server and django server +# (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_test_environment_unittest_setup module) +from lofar.sas.tmss.test.tmss_test_environment_unittest_setup import * + +# import and setup test data creator +from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator + +from lofar.sas.tmss.tmss.tmssapp import models + +from django.contrib.auth.models import User, Group, Permission +from datetime import datetime +import unittest +import requests + +class CyclePermissionTestCase(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.test_data_creator = TMSSRESTTestDataCreator(BASE_URL, requests.auth.HTTPBasicAuth('paulus', 'pauluspass')) + response = requests.get(cls.test_data_creator.django_api_url, auth=cls.test_data_creator.auth) + + cls.support_group = Group.objects.create(name='support') + cls.support_group.permissions.add(Permission.objects.get(codename='add_cycle')) + + cls.admin_group = Group.objects.create(name='admin') + cls.admin_group.permissions.add(Permission.objects.get(codename='delete_cycle')) + + def test_Cycle_cannot_be_added_without_group(self): + user = User.objects.get(username='paulus') + user.groups.set([]) + + # refresh user to update cache, see: https://docs.djangoproject.com/en/3.0/topics/auth/default/#permission-caching + user = User.objects.get(username='paulus') + + self.assertFalse(user.has_perm('tmssapp.add_cycle')) + + test_data = self.test_data_creator.Cycle() + res = self.test_data_creator.post_data_and_get_response(test_data, '/cycle/') + self.assertEqual(res.status_code, 403) + + def test_Cycle_can_be_added_by_support(self): + user = User.objects.get(username='paulus') + user.groups.set([self.support_group]) + + # refresh user to update cache, see: https://docs.djangoproject.com/en/3.0/topics/auth/default/#permission-caching + user = User.objects.get(username='paulus') + + self.assertTrue(user.has_perm('tmssapp.add_cycle')) + + test_data = self.test_data_creator.Cycle() + res = self.test_data_creator.post_data_and_get_response(test_data, '/cycle/') + self.assertEqual(res.status_code, 201) + + def test_Cycle_cannot_be_deleted_without_group(self): + user = User.objects.get(username='paulus') + user.groups.set([self.support_group]) # can add, cannot delete + + # refresh user to update cache, see: https://docs.djangoproject.com/en/3.0/topics/auth/default/#permission-caching + user = User.objects.get(username='paulus') + + # add + count = len(models.Cycle.objects.all()) + test_data = self.test_data_creator.Cycle() + url = self.test_data_creator.post_data_and_get_url(test_data, '/cycle/') + self.assertEqual(count+1, len(models.Cycle.objects.all())) + + # delete + response = requests.delete(url, auth=self.test_data_creator.auth) + self.assertEqual(response.status_code, 403) + self.assertEqual(count + 1, len(models.Cycle.objects.all())) + + def test_Cycle_can_be_deleted_by_admin(self): + user = User.objects.get(username='paulus') + user.groups.set([self.support_group, self.admin_group]) # can add and delete + + # refresh user to update cache, see: https://docs.djangoproject.com/en/3.0/topics/auth/default/#permission-caching + user = User.objects.get(username='paulus') + + # add + count = len(models.Cycle.objects.all()) + test_data = self.test_data_creator.Cycle() + url = self.test_data_creator.post_data_and_get_url(test_data, '/cycle/') + self.assertEqual(count+1, len(models.Cycle.objects.all())) + + # delete + response = requests.delete(url, auth=self.test_data_creator.auth) + self.assertEqual(response.status_code, 204) + self.assertEqual(count, len(models.Cycle.objects.all())) + +if __name__ == "__main__": + unittest.main() + diff --git a/SAS/TMSS/test/t_tmssapp_specification_permissions.run b/SAS/TMSS/test/t_tmssapp_specification_permissions.run new file mode 100755 index 0000000000000000000000000000000000000000..d77ebff5c280ee56963775a4bf9b6a03b73bea6d --- /dev/null +++ b/SAS/TMSS/test/t_tmssapp_specification_permissions.run @@ -0,0 +1,5 @@ +#!/bin/bash + +# Run the unit test +source python-coverage.sh +python_coverage_test "*tmss*" t_tmssapp_specification_permissions.py diff --git a/SAS/TMSS/test/t_tmssapp_specification_permissions.sh b/SAS/TMSS/test/t_tmssapp_specification_permissions.sh new file mode 100755 index 0000000000000000000000000000000000000000..8689f8e0e9a5ccc08371584254cc450704cd9d9d --- /dev/null +++ b/SAS/TMSS/test/t_tmssapp_specification_permissions.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +./runctest.sh t_tmssapp_specification_permissions \ No newline at end of file diff --git a/SAS/TMSS/test/test_utils.py b/SAS/TMSS/test/test_utils.py index d64d416d2582f599099658dfbbf659ab1820d7f8..37ea08a42e6e856894ed4fedd1159924a075194a 100644 --- a/SAS/TMSS/test/test_utils.py +++ b/SAS/TMSS/test/test_utils.py @@ -22,6 +22,7 @@ import os import time from multiprocessing import Process, Event +import django import logging logger = logging.getLogger(__name__) @@ -143,6 +144,14 @@ class TMSSDjangoServerInstance(): ''':returns the temporary LDAP Credentials''' return DBCredentials().get(self._ldap_dbcreds_id) + def setup_django(self): + # (tmss)django is initialized via many environment variables. + # set these here, run django setup, and start the server + os.environ["TMSS_LDAPCREDENTIALS"] = self.ldap_dbcreds_id + os.environ["TMSS_DBCREDENTIALS"] = self.database_dbcreds_id + os.environ["DJANGO_SETTINGS_MODULE"] = "lofar.sas.tmss.tmss.settings" + django.setup() + def start(self): ''' Start the Django server with a test-LDAP server in the background. @@ -152,14 +161,7 @@ class TMSSDjangoServerInstance(): logger.info("Starting Django server at port=%d with database: %s and LDAP: %s", self.port, self.database_dbcreds, self.ldap_dbcreds) - # (tmss)django is initialized via many environment variables. - # set these here, run django setup, and start the server - os.environ["TMSS_LDAPCREDENTIALS"] = self.ldap_dbcreds_id - os.environ["TMSS_DBCREDENTIALS"] = self.database_dbcreds_id - os.environ["DJANGO_SETTINGS_MODULE"] = "lofar.sas.tmss.tmss.settings" - - import django - django.setup() + self.setup_django() django.core.management.call_command('runserver', use_reloader=False, addrport=self.address) @@ -228,18 +230,31 @@ class TMSSDjangoServerInstance(): class TMSSTestEnvironment: '''Create and run a test django TMSS server against a newly created test database and a test ldap server (and cleanup automagically)''' - def __init__(self): + def __init__(self, preferred_django_port: int=8000): self.ldap_server = TestLDAPServer(user='test', password='test') self.database = TMSSTestDatabaseInstance() self.django_server = TMSSDjangoServerInstance(db_dbcreds_id=self.database.dbcreds_id, ldap_dbcreds_id=self.ldap_server.dbcreds_id, - port=find_free_port(8000)) + port=find_free_port(preferred_django_port)) def start(self): self.ldap_server.start() self.database.create() self.django_server.start() + # apart from the running django server with a REST API, + # it is also convenient to provide a working django setup for the 'normal' django API (via models.objects) + # so: do setup_django + self.django_server.setup_django() + + # now that the ldap and django server are running, and the django set has been done, + # we can announce our test user as superuser, so the test user can do anythin via the API. + # (there are also other tests, using other (on the fly created) users with restricted permissions, which is fine but not part of this generic setup. + from django.contrib.auth.models import User + user, _ = User.objects.get_or_create(username=self.ldap_server.dbcreds.user) + user.is_superuser = True + user.save() + def stop(self): self.django_server.stop() self.ldap_server.stop() @@ -257,6 +272,7 @@ class TMSSTestEnvironment: def __exit__(self, exc_type, exc_val, exc_tb): self.stop() + def main_test_database(): """instantiate, run and destroy a test postgress django database""" os.environ['TZ'] = 'UTC' @@ -273,17 +289,24 @@ def main_test_database(): print("Test-TMSS database up and running.") print("**********************************") print("DB Credentials ID: %s (for example to run tmms against this test db, call 'tmss -C %s')" % (db.dbcreds_id, db.dbcreds_id)) - print() print("Press Ctrl-C to exit (and remove the test database automatically)") waitForInterrupt() def main_test_environment(): """instantiate, run and destroy a full tmss test environment (postgress database, ldap server, django server)""" + from optparse import OptionParser os.environ['TZ'] = 'UTC' + + parser = OptionParser('%prog [options]', + description='setup/run/teardown a full TMSS test environment including a fresh and isolated database, LDAP server and DJANGO REST server.') + parser.add_option("-p", "--port", dest="port", type="int", default=find_free_port(8000), + help="try to use this port for the DJANGO REST API. If not available, then a random free port is used and logged. [default=%default]") + (options, args) = parser.parse_args() + logging.basicConfig(format = '%(asctime)s %(levelname)s %(message)s', level = logging.INFO) - with TMSSTestEnvironment() as instance: + with TMSSTestEnvironment(preferred_django_port=options.port) as instance: # print some nice info for the user to use the test servers... # use print instead of log for clean lines. for h in logging.root.handlers: @@ -296,7 +319,10 @@ def main_test_environment(): print("DB Credentials ID: %s" % (instance.database.dbcreds_id, )) print("LDAP Credentials ID: %s" % (instance.django_server.ldap_dbcreds_id, )) print("Django URL: %s" % (instance.django_server.url)) - + print() + print("Example cmdlines to run tmss or tmss_manage_django:") + print("TMSS_DBCREDENTIALS=%s TMSS_LDAPCREDENTIALS=%s tmss" % (instance.database.dbcreds_id, instance.django_server.ldap_dbcreds_id)) + print("TMSS_DBCREDENTIALS=%s TMSS_LDAPCREDENTIALS=%s tmss_manage_django" % (instance.database.dbcreds_id, instance.django_server.ldap_dbcreds_id)) print() print("Press Ctrl-C to exit (and remove the test database and django server automatically)") waitForInterrupt() diff --git a/SAS/TMSS/test/testdata/readme.txt b/SAS/TMSS/test/testdata/readme.txt new file mode 100644 index 0000000000000000000000000000000000000000..8ce6efa3b59c18e72c7476e6a7a4542ede2ee6e9 --- /dev/null +++ b/SAS/TMSS/test/testdata/readme.txt @@ -0,0 +1,11 @@ +Provide model data with fixture files, see https://docs.djangoproject.com/en/2.2/howto/initial-data/ +Note that the fixture file is NOT meant for production or unittest data. It is just an example how you can fill the +Django model/Database with data. It can be used for demo purposes or small manual testing. +Use the Django manage.py to loaddata from a json 'fixture' file +First set the next environment variables: +export TMSS_DBCREDENTIALS=<DB Credentials ID> +export TMSS_LDAPCREDENTIALS=<LDAP Credentials ID> +The environment variables are provided during startup of 'tmss_test_environment' +Finally Execute (in home of your project): +/usr/bin/python3 build/gnucxx11_opt/lib/python3.6/site-packages/lofar/sas/tmss/manage.py loaddata ./SAS/TMSS/test/testdata/subtasks.json + diff --git a/SAS/TMSS/test/testdata/subtasks.json b/SAS/TMSS/test/testdata/subtasks.json new file mode 100644 index 0000000000000000000000000000000000000000..9bb8b375e5b1c2bc7fd59d0557f5b9e42f2b0bce --- /dev/null +++ b/SAS/TMSS/test/testdata/subtasks.json @@ -0,0 +1,89 @@ +[ + { + "model": "tmssapp.cluster", + "pk": 2, + "fields": { + "name": "bassieenadriaan", + "description": "the next cluster", + "location": "downstairs", + "tags": [ "loaded by fixture", "TMSS-154", "manual testing" ], + "created_at": "2020-02-24T13:19:57", + "updated_at": "2020-02-24T13:19:57" + } + }, + { + "model": "tmssapp.cluster", + "pk": 3, + "fields": { + "name": "peppieenkokkie", + "description": "the last cluster", + "location": "anywhere", + "tags": [ "loaded by fixture", "TMSS-154", "manual testing" ], + "created_at": "2020-02-24T13:19:57", + "updated_at": "2020-02-24T13:19:57" + } + }, + { + "model": "tmssapp.subtask", + "pk": 2000002, + "fields" : { + "start_time": "2020-01-02T00:00:00", + "stop_time": "2020-01-02T12:00:00", + "specifications_doc": 1, + "do_cancel": null, + "priority": 1, + "scheduler_input_doc": 1, + "state": "defined", + "task_blueprint": null, + "specifications_template": 1, + "schedule_method": "manual", + "cluster": 2, + "tags": [ "loaded by fixture", "TMSS-154", "manual testing" ], + "created_at": "2020-02-24T13:19:57", + "updated_at": "2020-02-24T13:19:57" + } + }, + { + "model": "tmssapp.subtask", + "pk": 2000003, + "fields" : { + "start_time": "2020-01-03T00:00:00", + "stop_time": "2020-01-03T12:00:00", + "specifications_doc": 1, + "do_cancel": null, + "priority": 1, + "scheduler_input_doc": 1, + "state": "defined", + "task_blueprint": null, + "specifications_template": 1, + "schedule_method": "manual", + "cluster": 3, + "tags": [ "loaded by fixture", "TMSS-154", "manual testing" ], + "created_at": "2020-02-24T13:19:57", + "updated_at": "2020-02-24T13:19:57" + } + }, + { + "model": "tmssapp.subtask", + "pk": 2000004, + "fields" : { + "start_time": "2020-01-04T00:00:00", + "stop_time": "2020-01-04T12:00:00", + "specifications_doc": 1, + "do_cancel": null, + "priority": 1, + "scheduler_input_doc": 1, + "state": "defined", + "task_blueprint": null, + "specifications_template": 1, + "schedule_method": "manual", + "cluster": 1, + "tags": [ "loaded by fixture", "TMSS-154", "manual testing" ], + "created_at": "2020-02-24T13:19:57", + "updated_at": "2020-02-24T13:19:57" + } + } + + + +] \ No newline at end of file diff --git a/SAS/TMSS/test/tmss_database_unittest_setup.py b/SAS/TMSS/test/tmss_database_unittest_setup.py index 0fbffc0d335ead5fef96fa06021b3a8ee47a7c1e..1819ee491a3a9119f1aa99a77736f960a54583bc 100644 --- a/SAS/TMSS/test/tmss_database_unittest_setup.py +++ b/SAS/TMSS/test/tmss_database_unittest_setup.py @@ -44,271 +44,3 @@ django.setup() # Do the actual tmss imports from lofar.sas.tmss.tmss.tmssapp import models -####################################################### -# the methods below can be used to create test data -# naming convention is: <django_model_name>_test_data() -####################################################### - -from datetime import datetime -import uuid - - -def GeneratorTemplate_test_data(name="my_GeneratorTemplate"): - return {"name": name, - "description": 'My one observation', - "version": 'v0.314159265359', - "schema": {"mykey": "my value"}, - "create_function": 'Funky', - "tags": ["TMSS", "TESTING"]} - -def DefaultGeneratorTemplate_test_data(name=None, template=None): - return {'name': name if name is not None else "DefaultGeneratorTemplate_"+str(uuid.uuid4()), - 'template': template, - 'tags':[]} - -def SchedulingUnitTemplate_test_data(): - return {"name": "My SchedulingUnitTemplate", - "description": 'My SchedulingUnitTemplate description', - "version": 'v0.314159265359', - "schema": {"mykey": "my value"}, - "tags": ["TMSS", "TESTING"]} - -def TaskTemplate_test_data(): - return {"validation_code_js":"", - "name": "my TaskTemplate", - "description": 'My TaskTemplate description', - "version": 'v0.314159265359', - "schema": {"mykey": "my value"}, - "tags": ["TMSS", "TESTING"]} - -def WorkRelationSelectionTemplate_test_data(): - return {"name": "my WorkRelationSelectionTemplate", - "description": 'My WorkRelationSelectionTemplate description', - "version": 'v0.314159265359', - "schema": {"mykey": "my value"}, - "tags": ["TMSS", "TESTING"]} - -def TaskConnectors_test_data(): - return {"role": models.Role.objects.get(value='calibrator'), - "datatype": models.Datatype.objects.get(value='instrument model'), - "output_of": models.TaskTemplate.objects.create(**TaskTemplate_test_data()), - "input_of": models.TaskTemplate.objects.create(**TaskTemplate_test_data()), - "tags": []} - -def Cycle_test_data(): - return {"name": 'my_cycle' + str(uuid.uuid4()), - "description": "", - "tags": [], - "start": datetime.utcnow().isoformat(), - "stop": datetime.utcnow().isoformat(), - "number": 1, - "standard_hours": 2, - "expert_hours": 3, - "filler_hours": 4} - -def Project_test_data(): - return { "cycle": models.Cycle.objects.create(**Cycle_test_data()), - "name": 'my_project_' + str(uuid.uuid4()), - "description": 'my description ' + str(uuid.uuid4()), - "tags": [], - "priority": 1, - "can_trigger": False, - "private_data": True, - "expert": True, - "filler": False} - -def SchedulingSet_test_data(): - return {"name": 'my_scheduling_set', - "description": "", - "tags": [], - "generator_doc": "{}", - "project": models.Project.objects.create(**Project_test_data()), - "generator_template": models.GeneratorTemplate.objects.create(**GeneratorTemplate_test_data()), - "generator_source": None} - -def SchedulingUnitDraft_test_data(): - return {"name": 'my_scheduling_unit_draft', - "description": "", - "tags": [], - "requirements_doc": "{}", - "copy_reason": models.CopyReason.objects.get(value='template'), - "generator_instance_doc": "para", - "copies": None, - "scheduling_set": models.SchedulingSet.objects.create(**SchedulingSet_test_data()), - "requirements_template": models.SchedulingUnitTemplate.objects.create(**SchedulingUnitTemplate_test_data())} - -def TaskDraft_test_data(): - return {"name": 'my_task_draft', - "description": "", - "tags": [], - "specifications_doc": "{}", - "copy_reason": models.CopyReason.objects.get(value='template'), - "copies": None, - "scheduling_unit_draft": models.SchedulingUnitDraft.objects.create(**SchedulingUnitDraft_test_data()), - "specifications_template": models.TaskTemplate.objects.create(**TaskTemplate_test_data())} - -def TaskRelationDraft_test_data(): - return {"tags": [], - "selection_doc": "{}", - "dataformat": models.Dataformat.objects.get(value='Beamformed'), - "producer": models.TaskDraft.objects.create(**TaskDraft_test_data()), - "consumer": models.TaskDraft.objects.create(**TaskDraft_test_data()), - "input": models.TaskConnectors.objects.create(**TaskConnectors_test_data()), - "output": models.TaskConnectors.objects.create(**TaskConnectors_test_data()), - "selection_template": models.WorkRelationSelectionTemplate.objects.create(**WorkRelationSelectionTemplate_test_data())} - -def SchedulingUnitBlueprint_test_data(): - return {"name": 'my_scheduling_unit_blueprint', - "description": "", - "tags": [], - "requirements_doc": "{}", - "do_cancel": False, - "draft": models.SchedulingUnitDraft.objects.create(**SchedulingUnitDraft_test_data()), - "requirements_template": models.SchedulingUnitTemplate.objects.create(**SchedulingUnitTemplate_test_data())} - -def TaskBlueprint_test_data(): - return {"name": 'my_task_blueprint', - "description": "", - "tags": [], - "specifications_doc": "{}", - "do_cancel": False, - "draft": models.TaskDraft.objects.create(**TaskDraft_test_data()), - "specifications_template": models.TaskTemplate.objects.create(**TaskTemplate_test_data()), - "scheduling_unit_blueprint": models.SchedulingUnitBlueprint.objects.create(**SchedulingUnitBlueprint_test_data())} - -def TaskRelationBlueprint_test_data(): - return { "tags": [], - "selection_doc": "{}", - "dataformat": models.Dataformat.objects.get(value='Beamformed'), - "input": models.TaskConnectors.objects.create(**TaskConnectors_test_data()), - "output": models.TaskConnectors.objects.create(**TaskConnectors_test_data()), - "draft": models.TaskRelationDraft.objects.create(**TaskRelationDraft_test_data()), - "selection_template": models.WorkRelationSelectionTemplate.objects.create(**WorkRelationSelectionTemplate_test_data()), - "producer": models.TaskBlueprint.objects.create(**TaskBlueprint_test_data()), - "consumer": models.TaskBlueprint.objects.create(**TaskBlueprint_test_data())} - - - - -def SubtaskTemplate_test_data(): - return {"type": models.SubtaskType.objects.get(value='copy'), - "name": "observation", - "description": 'My one observation', - "version": 'v0.314159265359', - "schema": {"mykey": "my value"}, - "realtime": True, - "queue": False, - "tags": ["TMSS", "TESTING"]} - -def DataproductSpecificationsTemplate_test_data(): - return {"name": "data", - "description": 'My one date', - "version": 'v0.314159265359', - "schema": {"mykey": "my value"}, - "tags": ["TMSS", "TESTING"]} - -def DataproductFeedbackTemplate_test_data(): - return {"name": "data", - "description": 'My one date', - "version": 'v0.314159265359', - "schema": {"mykey": "my value"}, - "tags": ["TMSS", "TESTING"]} - -def SubtaskOutput_test_data(): - return {"subtask": models.Subtask.objects.create(**Subtask_test_data()), - "connector": models.SubtaskConnector.objects.create(**SubtaskConnector_test_data()), - "tags":[]} - -def SubtaskInput_test_data(): - # test data - return {"subtask": models.Subtask.objects.create(**Subtask_test_data()), - "task_relation_blueprint": models.TaskRelationBlueprint.objects.create(**TaskRelationBlueprint_test_data()), - "connector": models.SubtaskConnector.objects.create(**SubtaskConnector_test_data()), - "producer": models.SubtaskOutput.objects.create(**SubtaskOutput_test_data()), - #"dataproducts": models.Dataproduct.objects.create(**dpt.get_test_data()), - "selection_doc": "{}", - "selection_template": models.SubtaskInputSelectionTemplate.objects.create(**SubtaskInputSelectionTemplate_test_data()), - "tags":[]} - -def Subtask_test_data(): - return { "start_time": datetime.utcnow().isoformat(), - "stop_time": datetime.utcnow().isoformat(), - "state": models.SubtaskState.objects.get(value='scheduling'), - "specifications_doc": "{}", - "task_blueprint": models.TaskBlueprint.objects.create(**TaskBlueprint_test_data()), - "specifications_template": models.SubtaskTemplate.objects.create(**SubtaskTemplate_test_data()), - "tags": ["TMSS", "TESTING"], - "do_cancel": datetime.utcnow().isoformat(), - "priority": 1, - "schedule_method": models.ScheduleMethod.objects.get(value='manual'), - "cluster": models.Cluster.objects.create(location="downstairs", tags=[]), - "scheduler_input_doc": "{}"} - -def Dataproduct_test_data(): - return {"filename": "my.file", - "directory": "/home/boskabouter/", - "dataformat": models.Dataformat.objects.get(value='Beamformed'), - "deleted_since": datetime.utcnow().isoformat(), - "pinned_since": datetime.utcnow().isoformat(), - "specifications_doc": "{}", - "specifications_template": models.DataproductSpecificationsTemplate.objects.create(**DataproductSpecificationsTemplate_test_data()), - "tags": ["TMSS", "TESTING"], - "producer": models.SubtaskOutput.objects.create(**SubtaskOutput_test_data()), - "do_cancel": datetime.utcnow().isoformat(), - "expected_size": 1234, - "size": 123, - "feedback_doc": "{}", - "feedback_template": models.DataproductFeedbackTemplate.objects.create(**DataproductFeedbackTemplate_test_data())} - -def SubtaskConnector_test_data(): - return {"role": models.Role.objects.get(value='calibrator'), - "datatype": models.Datatype.objects.get(value='instrument model'), - # "dataformats": [models.Dataformat.objects.get(value='Beamformed')], # -> use set() - "output_of": models.SubtaskTemplate.objects.create(**SubtaskTemplate_test_data()), - "input_of": models.SubtaskTemplate.objects.create(**SubtaskTemplate_test_data()), - "tags": []} - -def AntennaSet_test_data(): - return {"name": "observation", - "description": 'My one observation', - "station_type": models.StationType.objects.get(value='core'), - "rcus": [1,2,3,4,5], - "inputs": ['input1', 'input2'], - "tags": ['tmss', 'testing']} - - -def DataproductTransform_test_data(): - return {"input": models.Dataproduct.objects.create(**Dataproduct_test_data()), - "output": models.Dataproduct.objects.create(**Dataproduct_test_data()), - "identity": True, - "tags": ['tmss', 'testing']} - -def Filesystem_test_data(): - return {"capacity": 1111111111, - "cluster": models.Cluster.objects.create(**Cluster_test_data()), - "tags": ['tmss', 'testing']} - -def Cluster_test_data(): - return {"location": "upstairs", - "tags": ['tmss', 'testing']} - -def DataproductArchiveInfo_test_data(): - return {"dataproduct": models.Dataproduct.objects.create(**Dataproduct_test_data()), - "storage_ticket": "myticket_1", - "public_since": datetime.utcnow().isoformat(), - "corrupted_since": datetime.utcnow().isoformat(), - "tags": ['tmss', 'testing']} - -def DataproductHash_test_data(): - return {"dataproduct": models.Dataproduct.objects.create(**Dataproduct_test_data()), - "algorithm": models.Algorithm.objects.get(value='md5'), - "hash": "myhash_1", - "tags": ['tmss', 'testing']} - -def SubtaskInputSelectionTemplate_test_data(): - return {"name": "data", - "description": 'My one date', - "version": 'v0.314159265359', - "schema": {"mykey": "my value"}, - "tags": ["TMSS", "TESTING"]} - diff --git a/SAS/TMSS/test/tmss_test_data_django_models.py b/SAS/TMSS/test/tmss_test_data_django_models.py new file mode 100644 index 0000000000000000000000000000000000000000..adf787e0a9608cf60c2bf4981ef4203f23f61d74 --- /dev/null +++ b/SAS/TMSS/test/tmss_test_data_django_models.py @@ -0,0 +1,335 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2018 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +''' +By importing this helper module in your unittest module you get a TMSSTestDatabaseInstance +which is automatically destroyed at the end of the unittest session. +''' + +####################################################### +# the methods below can be used to create test data +# naming convention is: <django_model_name>_test_data() +####################################################### + +from lofar.sas.tmss.tmss.tmssapp import models +from lofar.common.json_utils import get_default_json_object_for_schema + +from datetime import datetime +import uuid + +def GeneratorTemplate_test_data(name="my_GeneratorTemplate") -> dict: + return {"name": name, + "description": 'My one observation', + "version": 'v0.314159265359', + "schema": {"mykey": "my value"}, + "create_function": 'Funky', + "tags": ["TMSS", "TESTING"]} + +def DefaultGeneratorTemplate_test_data(name=None, template=None) -> dict: + return {'name': name if name is not None else "DefaultGeneratorTemplate_"+str(uuid.uuid4()), + 'template': template, + 'tags':[]} + +def SchedulingUnitTemplate_test_data() -> dict: + return {"name": "My SchedulingUnitTemplate", + "description": 'My SchedulingUnitTemplate description', + "version": 'v0.314159265359', + "schema": {"mykey": "my value"}, + "tags": ["TMSS", "TESTING"]} + +def TaskTemplate_test_data() -> dict: + return {"validation_code_js":"", + "name": "my TaskTemplate", + "description": 'My TaskTemplate description', + "version": 'v0.314159265359', + "schema": {"mykey": "my value"}, + "tags": ["TMSS", "TESTING"]} + +def WorkRelationSelectionTemplate_test_data() -> dict: + return {"name": "my WorkRelationSelectionTemplate", + "description": 'My WorkRelationSelectionTemplate description', + "version": 'v0.314159265359', + "schema": {"mykey": "my value"}, + "tags": ["TMSS", "TESTING"]} + +def TaskConnectors_test_data() -> dict: + return {"role": models.Role.objects.get(value='calibrator'), + "datatype": models.Datatype.objects.get(value='instrument model'), + "output_of": models.TaskTemplate.objects.create(**TaskTemplate_test_data()), + "input_of": models.TaskTemplate.objects.create(**TaskTemplate_test_data()), + "tags": []} + +def Cycle_test_data() -> dict: + return {"name": 'my_cycle' + str(uuid.uuid4()), + "description": "", + "tags": [], + "start": datetime.utcnow(), + "stop": datetime.utcnow(), + "number": 1, + "standard_hours": 2, + "expert_hours": 3, + "filler_hours": 4} + +def Project_test_data() -> dict: + return { "cycle": models.Cycle.objects.create(**Cycle_test_data()), + "name": 'my_project_' + str(uuid.uuid4()), + "description": 'my description ' + str(uuid.uuid4()), + "tags": [], + "priority": 1, + "can_trigger": False, + "private_data": True, + "expert": True, + "filler": False} + +def SchedulingSet_test_data() -> dict: + return {"name": 'my_scheduling_set', + "description": "", + "tags": [], + "generator_doc": "{}", + "project": models.Project.objects.create(**Project_test_data()), + "generator_template": models.GeneratorTemplate.objects.create(**GeneratorTemplate_test_data()), + "generator_source": None} + +def SchedulingUnitDraft_test_data() -> dict: + return {"name": 'my_scheduling_unit_draft', + "description": "", + "tags": [], + "requirements_doc": "{}", + "copy_reason": models.CopyReason.objects.get(value='template'), + "generator_instance_doc": "para", + "copies": None, + "scheduling_set": models.SchedulingSet.objects.create(**SchedulingSet_test_data()), + "requirements_template": models.SchedulingUnitTemplate.objects.create(**SchedulingUnitTemplate_test_data())} + +def TaskDraft_test_data(name: str="my_task_draft", specifications_template: models.TaskTemplate=None) -> dict: + if specifications_template is None: + specifications_template = models.TaskTemplate.objects.create(**TaskTemplate_test_data()) + + return {"name": name, + "description": "", + "tags": [], + "specifications_doc": get_default_json_object_for_schema(specifications_template.schema), + "copy_reason": models.CopyReason.objects.get(value='template'), + "copies": None, + "scheduling_unit_draft": models.SchedulingUnitDraft.objects.create(**SchedulingUnitDraft_test_data()), + "specifications_template": specifications_template } + +def TaskRelationDraft_test_data() -> dict: + return {"tags": [], + "selection_doc": "{}", + "dataformat": models.Dataformat.objects.get(value='Beamformed'), + "producer": models.TaskDraft.objects.create(**TaskDraft_test_data()), + "consumer": models.TaskDraft.objects.create(**TaskDraft_test_data()), + "input": models.TaskConnectors.objects.create(**TaskConnectors_test_data()), + "output": models.TaskConnectors.objects.create(**TaskConnectors_test_data()), + "selection_template": models.WorkRelationSelectionTemplate.objects.create(**WorkRelationSelectionTemplate_test_data())} + +def SchedulingUnitBlueprint_test_data() -> dict: + return {"name": 'my_scheduling_unit_blueprint', + "description": "", + "tags": [], + "requirements_doc": "{}", + "do_cancel": False, + "draft": models.SchedulingUnitDraft.objects.create(**SchedulingUnitDraft_test_data()), + "requirements_template": models.SchedulingUnitTemplate.objects.create(**SchedulingUnitTemplate_test_data())} + +def TaskBlueprint_test_data(task_draft: models.TaskDraft = None) -> dict: + if task_draft is None: + task_draft = models.TaskDraft.objects.create(**TaskDraft_test_data()) + + return {"name": 'my_task_blueprint', + "description": "", + "tags": [], + "specifications_doc": str(task_draft.specifications_doc), + "do_cancel": False, + "draft": task_draft, + "specifications_template": task_draft.specifications_template, + "scheduling_unit_blueprint": models.SchedulingUnitBlueprint.objects.create(**SchedulingUnitBlueprint_test_data())} + +def TaskRelationBlueprint_test_data() -> dict: + return { "tags": [], + "selection_doc": "{}", + "dataformat": models.Dataformat.objects.get(value='Beamformed'), + "input": models.TaskConnectors.objects.create(**TaskConnectors_test_data()), + "output": models.TaskConnectors.objects.create(**TaskConnectors_test_data()), + "draft": models.TaskRelationDraft.objects.create(**TaskRelationDraft_test_data()), + "selection_template": models.WorkRelationSelectionTemplate.objects.create(**WorkRelationSelectionTemplate_test_data()), + "producer": models.TaskBlueprint.objects.create(**TaskBlueprint_test_data()), + "consumer": models.TaskBlueprint.objects.create(**TaskBlueprint_test_data())} + + +def SubtaskTemplate_test_data(schema: object=None) -> dict: + if schema is None: + schema = {} + + return {"type": models.SubtaskType.objects.get(value='copy'), + "name": "observation", + "description": 'My one observation', + "version": 'v0.314159265359', + "schema": schema, + "realtime": True, + "queue": False, + "tags": ["TMSS", "TESTING"]} + +def DataproductSpecificationsTemplate_test_data() -> dict: + return {"name": "data", + "description": 'My one date', + "version": 'v0.314159265359', + "schema": {"mykey": "my value"}, + "tags": ["TMSS", "TESTING"]} + +def DataproductFeedbackTemplate_test_data() -> dict: + return {"name": "data", + "description": 'My one date', + "version": 'v0.314159265359', + "schema": {"mykey": "my value"}, + "tags": ["TMSS", "TESTING"]} + +def SubtaskOutput_test_data(subtask: models.Subtask=None, connector: models.SubtaskConnector=None) -> dict: + if subtask is None: + subtask = models.Subtask.objects.create(**Subtask_test_data()) + + if connector is None: + connector = models.SubtaskConnector.objects.create(**SubtaskConnector_test_data(output_of=subtask.specifications_template, input_of=subtask.specifications_template)) + + return {"subtask": subtask, + "connector": connector, + "tags":[]} + +def SubtaskInput_test_data() -> dict: + return {"subtask": models.Subtask.objects.create(**Subtask_test_data()), + "task_relation_blueprint": models.TaskRelationBlueprint.objects.create(**TaskRelationBlueprint_test_data()), + "connector": models.SubtaskConnector.objects.create(**SubtaskConnector_test_data()), + "producer": models.SubtaskOutput.objects.create(**SubtaskOutput_test_data()), + #"dataproducts": models.Dataproduct.objects.create(**dpt.get_test_data()), + "selection_doc": "{}", + "selection_template": models.SubtaskInputSelectionTemplate.objects.create(**SubtaskInputSelectionTemplate_test_data()), + "tags":[]} + +def Subtask_test_data(task_blueprint: models.TaskBlueprint=None, subtask_template: models.SubtaskTemplate=None, + specifications_doc: str=None, start_time=None, stop_time=None, cluster_object=None) -> dict: + + if task_blueprint is None: + task_blueprint = models.TaskBlueprint.objects.create(**TaskBlueprint_test_data()) + + if subtask_template is None: + subtask_template = models.SubtaskTemplate.objects.create(**SubtaskTemplate_test_data()) + + if specifications_doc is None: + specifications_doc = get_default_json_object_for_schema(subtask_template.schema) + + if start_time is None: + start_time = datetime.utcnow() + + if stop_time is None: + stop_time = datetime.utcnow() + + if cluster_object is None: + cluster_object = models.Cluster.objects.create(name="dummy cluster", location="downstairs", tags=[]) + + return { "start_time": start_time, + "stop_time": stop_time, + "state": models.SubtaskState.objects.get(value='scheduling'), + "specifications_doc": specifications_doc, + "task_blueprint": task_blueprint, + "specifications_template": subtask_template, + "tags": ["TMSS", "TESTING"], + "do_cancel": datetime.utcnow(), + "priority": 1, + "schedule_method": models.ScheduleMethod.objects.get(value='manual'), + "cluster": cluster_object, + "scheduler_input_doc": "{}"} + +def Dataproduct_test_data(producer: models.SubtaskOutput=None, filename: str="my_file.ext", directory: str="/tmp/my/dir/") -> dict: + if producer is None: + producer = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data()) + + return {"filename": filename, + "directory": directory, + "dataformat": models.Dataformat.objects.get(value='Beamformed'), + "deleted_since": datetime.utcnow(), + "pinned_since": datetime.utcnow(), + "specifications_doc": "{}", + "specifications_template": models.DataproductSpecificationsTemplate.objects.create(**DataproductSpecificationsTemplate_test_data()), + "tags": ["TMSS", "TESTING"], + "producer": producer, + "do_cancel": datetime.utcnow(), + "expected_size": 1234, + "size": 123, + "feedback_doc": "{}", + "feedback_template": models.DataproductFeedbackTemplate.objects.create(**DataproductFeedbackTemplate_test_data())} + +def SubtaskConnector_test_data(output_of: models.SubtaskTemplate=None, input_of: models.SubtaskTemplate=None) -> dict: + if output_of is None: + output_of = models.SubtaskTemplate.objects.create(**SubtaskTemplate_test_data()) + + if input_of is None: + input_of = models.SubtaskTemplate.objects.create(**SubtaskTemplate_test_data()) + + return {"role": models.Role.objects.get(value='calibrator'), + "datatype": models.Datatype.objects.get(value='instrument model'), + "output_of": output_of, + "input_of": input_of, + "tags": []} + +def AntennaSet_test_data() -> dict: + return {"name": "observation", + "description": 'My one observation', + "station_type": models.StationType.objects.get(value='core'), + "rcus": [1,2,3,4,5], + "inputs": ['input1', 'input2'], + "tags": ['tmss', 'testing']} + + +def DataproductTransform_test_data() -> dict: + return {"input": models.Dataproduct.objects.create(**Dataproduct_test_data()), + "output": models.Dataproduct.objects.create(**Dataproduct_test_data()), + "identity": True, + "tags": ['tmss', 'testing']} + +def Filesystem_test_data() -> dict: + return {"capacity": 1111111111, + "cluster": models.Cluster.objects.create(**Cluster_test_data()), + "tags": ['tmss', 'testing']} + +def Cluster_test_data(name="default cluster") -> dict: + return {"name": name, + "location": "upstairs", + "tags": ['tmss', 'testing']} + +def DataproductArchiveInfo_test_data() -> dict: + return {"dataproduct": models.Dataproduct.objects.create(**Dataproduct_test_data()), + "storage_ticket": "myticket_1", + "public_since": datetime.utcnow(), + "corrupted_since": datetime.utcnow(), + "tags": ['tmss', 'testing']} + +def DataproductHash_test_data() -> dict: + return {"dataproduct": models.Dataproduct.objects.create(**Dataproduct_test_data()), + "algorithm": models.Algorithm.objects.get(value='md5'), + "hash": "myhash_1", + "tags": ['tmss', 'testing']} + +def SubtaskInputSelectionTemplate_test_data() -> dict: + return {"name": "data", + "description": 'My one date', + "version": 'v0.314159265359', + "schema": {"mykey": "my value"}, + "tags": ["TMSS", "TESTING"]} + diff --git a/SAS/TMSS/test/tmss_test_data_rest.py b/SAS/TMSS/test/tmss_test_data_rest.py new file mode 100644 index 0000000000000000000000000000000000000000..8c1a62ae360f77f89a83725709ce0931ec122a48 --- /dev/null +++ b/SAS/TMSS/test/tmss_test_data_rest.py @@ -0,0 +1,471 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2018 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +################################################################################################ +# the methods below can be used to to HTTP REST calls to the django server and check the results +################################################################################################ + +from datetime import datetime +import uuid +import requests +import json + +class TMSSRESTTestDataCreator(): + def __init__(self, django_api_url: str, auth: requests.auth.HTTPBasicAuth): + self.django_api_url = django_api_url + self.auth = auth + + def post_data_and_get_response(self, data, url_postfix): + """POST the given data the self.django_api_url+url_postfix, and return the response""" + return requests.post(self.django_api_url + url_postfix, json=data, auth=self.auth) + + def post_data_and_get_url(self, data, url_postfix): + """POST the given data the self.django_api_url+url_postfix, and return the response's url""" + return json.loads(self.post_data_and_get_response(data, url_postfix).content.decode('utf-8'))['url'] + + ####################################################### + # the methods below can be used to create test data + # naming convention is: <django_model_name>() + ####################################################### + + + def GeneratorTemplate(self, name="generatortemplate"): + return {"name": name, + "description": 'My one observation', + "version": 'v0.314159265359', + "schema": {"mykey": "my value"}, + "create_function": 'Funky', + "tags": ["TMSS", "TESTING"]} + + def SchedulingUnitTemplate(self, name="schedulingunittemplate1"): + return { "name": name, + "description": 'My description', + "version": 'v0.314159265359', + "schema": {"mykey": "my value"}, + "tags": ["TMSS", "TESTING"]} + + def TaskTemplate(self, name="tasktemplate1"): + return {"name": name, + "description": 'My one observation', + "version": 'v0.314159265359', + "schema": {"mykey": "my value"}, + "tags": ["TMSS", "TESTING"], + "validation_code_js": "???"} + + def WorkRelationSelectionTemplate(self, name="workrelationselectiontemplate1"): + return {"name": name, + "description": 'My one observation', + "version": 'v0.314159265359', + "schema": {"mykey": "my value"}, + "tags": ["TMSS", "TESTING"]} + + def TaskConnectors(self, role="correlator", input_of_url=None, output_of_url=None): + if input_of_url is None: + input_of_url = self.post_data_and_get_url(self.TaskTemplate(), '/task_template/') + + if output_of_url is None: + output_of_url = self.post_data_and_get_url(self.TaskTemplate(), '/task_template/') + + return {"role": self.django_api_url + '/role/%s/'%role, + "datatype": self.django_api_url + '/datatype/image/', + "dataformats": [self.django_api_url + '/dataformat/Beamformed/'], + "output_of": output_of_url, + "input_of": input_of_url, + "tags": []} + + def DefaultTemplates(self, name="defaulttemplate"): + return {"name": name, + "template": None, + "tags": []} + + def Cycle(self, description="my cycle description"): + return {"name": 'my_cycle_' + str(uuid.uuid4()), + "description": description, + "tags": [], + "start": datetime.utcnow().isoformat(), + "stop": datetime.utcnow().isoformat(), + "number": 1, + "standard_hours": 2, + "expert_hours": 3, + "filler_hours": 4, + "projects": []} + + def Project(self, description="my project description"): + return {"name": 'my_project_' + str(uuid.uuid4()), + "description": description, + "tags": [], + "priority": 1, + "can_trigger": False, + "private_data": True} + + def SchedulingSet(self, name="my_scheduling_set", project_url=None, generator_template_url=None): + if project_url is None: + project_url = self.post_data_and_get_url(self.Project(), '/project/') + + if generator_template_url is None: + generator_template_url = self.post_data_and_get_url(self.GeneratorTemplate(), '/generator_template/') + + return {"name": name, + "description": "This is my scheduling set", + "tags": [], + "generator_doc": "{}", + "project": project_url, + "generator_template": generator_template_url, + "generator_source": None, + "scheduling_unit_drafts": []} + + def SchedulingUnitDraft(self, name="my_scheduling_unit_draft", scheduling_set_url=None, template_url=None): + if scheduling_set_url is None: + scheduling_set_url = self.post_data_and_get_url(self.SchedulingSet(), '/scheduling_set/') + + if template_url is None: + template_url = self.post_data_and_get_url(self.SchedulingUnitTemplate(), '/scheduling_unit_template/') + + return {"name": name, + "description": "This is my run draft", + "tags": [], + "requirements_doc": "{}", + "copy_reason": self.django_api_url + '/copy_reason/template/', + "generator_instance_doc": "{}", + "copies": None, + "scheduling_set": scheduling_set_url, + "requirements_template": template_url, + "related_scheduling_unit_blueprint": [], + "task_drafts": []} + + def TaskDraft(self, name='my_task_draft', scheduling_unit_draft_url=None, template_url=None): + if scheduling_unit_draft_url is None: + scheduling_unit_draft_url = self.post_data_and_get_url(self.SchedulingUnitDraft(), '/scheduling_unit_draft/') + + if template_url is None: + template_url = self.post_data_and_get_url(self.TaskTemplate(), '/task_template/') + + return {"name": name, + "description": "This is my task draft", + "tags": [], + "specifications_doc": "{}", + "copy_reason": self.django_api_url + '/copy_reason/template/', + "copies": None, + "scheduling_unit_draft": scheduling_unit_draft_url, + "specifications_template": template_url, + 'related_task_blueprint': [], + 'produced_by': [], + 'consumed_by': []} + + + def TaskRelationDraft(self, name="myTaskRelationDraft", producer_url=None, consumer_url=None, template_url=None, input_url=None, output_url=None): + if producer_url is None: + producer_url = self.post_data_and_get_url(self.TaskDraft(), '/task_draft/') + + if consumer_url is None: + consumer_url = self.post_data_and_get_url(self.TaskDraft(),'/task_draft/') + + if template_url is None: + template_url = self.post_data_and_get_url(self.WorkRelationSelectionTemplate(), '/work_relation_selection_template/') + + if input_url is None: + input_url = self.post_data_and_get_url(self.TaskConnectors(), '/task_connectors/') + + if output_url is None: + output_url = self.post_data_and_get_url(self.TaskConnectors(), '/task_connectors/') + + return {"tags": [], + "selection_doc": "{}", + "dataformat": self.django_api_url + "/dataformat/Beamformed/", + "producer": producer_url, + "consumer": consumer_url, + "input": input_url, + "output": output_url, + "selection_template": template_url, + 'related_task_relation_blueprint': []} + + def SchedulingUnitBlueprint(self, name="my_scheduling_unit_blueprint", scheduling_unit_draft_url=None, template_url=None): + if scheduling_unit_draft_url is None: + scheduling_unit_draft_url = self.post_data_and_get_url(self.SchedulingUnitDraft(), '/scheduling_unit_draft/') + + if template_url is None: + template_url = self.post_data_and_get_url(self.SchedulingUnitTemplate(), '/scheduling_unit_template/') + + return {"name": name, + "description": "This is my run blueprint", + "tags": [], + "requirements_doc": "{}", + "do_cancel": False, + "draft": scheduling_unit_draft_url, + "requirements_template": template_url} + + def TaskBlueprint(self, name="my_TaskBlueprint", draft_url=None, template_url=None, scheduling_unit_blueprint_url=None): + if draft_url is None: + draft_url = self.post_data_and_get_url(self.TaskDraft(), '/task_draft/') + + if template_url is None: + template_url = self.post_data_and_get_url(self.TaskTemplate(), '/task_template/') + + if scheduling_unit_blueprint_url is None: + scheduling_unit_blueprint_url = self.post_data_and_get_url(self.SchedulingUnitBlueprint(), '/scheduling_unit_blueprint/') + + return {"name": name, + "description": "This is my work request blueprint", + "tags": [], + "specifications_doc": "{}", + "do_cancel": False, + "draft": draft_url, + "specifications_template": template_url, + "scheduling_unit_blueprint": scheduling_unit_blueprint_url, + "subtasks": [], + "produced_by": [], + "consumed_by": []} + + def TaskRelationBlueprint(self, draft_url=None, template_url=None, input_url=None, output_url=None, consumer_url=None, producer_url=None): + if draft_url is None: + draft_url = self.post_data_and_get_url(self.TaskRelationDraft(), '/task_relation_draft/') + + if producer_url is None: + producer_url = self.post_data_and_get_url(self.TaskBlueprint(), '/task_blueprint/') + + if consumer_url is None: + consumer_url = self.post_data_and_get_url(self.TaskBlueprint(),'/task_blueprint/') + + if template_url is None: + template_url = self.post_data_and_get_url(self.WorkRelationSelectionTemplate(), '/work_relation_selection_template/') + + if input_url is None: + input_url = self.post_data_and_get_url(self.TaskConnectors(), '/task_connectors/') + + if output_url is None: + output_url = self.post_data_and_get_url(self.TaskConnectors(), '/task_connectors/') + + # test data + return {"tags": [], + "selection_doc": "{}", + "dataformat": self.django_api_url + '/dataformat/MeasurementSet/', + "input": input_url, + "output": output_url, + "draft": draft_url, + "selection_template": template_url, + "producer": producer_url, + "consumer": consumer_url} + + def SubtaskTemplate(self, name="subtask1", schema=None): + if schema is None: + schema = {} + + return {"type": self.django_api_url + '/subtask_type/copy/', + "name": name, + "description": 'My one observation', + "version": 'v0.314159265359', + "schema": schema, + "realtime": True, + "queue": False, + "tags": ["TMSS", "TESTING"]} + + def DataproductSpecificationsTemplate(self, name="my_DataproductSpecificationsTemplate"): + return {"name": name, + "description": 'My one date', + "version": 'v0.314159265359', + "schema": {"mykey": "my value"}, + "tags": ["TMSS", "TESTING"]} + + def DataproductFeedbackTemplate(self, name="my_DataproductFeedbackTemplate"): + return {"name": name, + "description": 'My one date', + "version": 'v0.314159265359', + "schema": {"mykey": "my value"}, + "tags": ["TMSS", "TESTING"]} + + def DefaultSubtaskTemplates(self, name=None, template_url=None): + if template_url is None: + template_url = self.post_data_and_get_url(self.SubtaskTemplate(), '/subtask_template/') + + return {"name": name if name else "default_template_%s" % uuid.uuid4(), + "template": template_url, + "tags": []} + + def Cluster(self, name=None): + return {"name": name if name else "Cluster %s" % uuid.uuid4(), + "description": 'My one cluster', + "location": "upstairs", + "tags": ['tmss', 'testing']} + + def Subtask(self, cluster_url=None, task_blueprint_url=None, specifications_template_url=None, specifications_doc=None): + if cluster_url is None: + cluster_url = self.post_data_and_get_url(self.Cluster(), '/cluster/') + + if task_blueprint_url is None: + task_blueprint_url = self.post_data_and_get_url(self.TaskBlueprint(), '/task_blueprint/') + + if specifications_template_url is None: + specifications_template_url = self.post_data_and_get_url(self.SubtaskTemplate(), '/subtask_template/') + + if specifications_doc is None: + specifications_doc = "{}" + + return {"start_time": datetime.utcnow().isoformat(), + "stop_time": datetime.utcnow().isoformat(), + "state": self.django_api_url + '/subtask_state/scheduling/', + "specifications_doc": specifications_doc, + "task_blueprint": task_blueprint_url, + "specifications_template": specifications_template_url, + "tags": ["TMSS", "TESTING"], + "do_cancel": datetime.utcnow().isoformat(), + "priority": 1, + "schedule_method": self.django_api_url + '/schedule_method/manual/', + "cluster": cluster_url, + "scheduler_input_doc": "{}" } + + def SubtaskOutput(self, subtask_url=None, subtask_connector_url=None): + if subtask_url is None: + subtask_url = self.post_data_and_get_url(self.Subtask(), '/subtask/') + + if subtask_connector_url is None: + subtask_connector_url = self.post_data_and_get_url(self.SubtaskConnector(), '/subtask_connector/') + + return {"subtask": subtask_url, + "connector": subtask_connector_url, + "tags": []} + + def SubtaskConnector(self, input_of_url=None, output_of_url=None): + if input_of_url is None: + input_of_url = self.post_data_and_get_url(self.SubtaskTemplate(), '/subtask_template/') + + if output_of_url is None: + output_of_url = self.post_data_and_get_url(self.SubtaskTemplate(), '/subtask_template/') + + return {"role": self.django_api_url + '/role/correlator/', + "datatype": self.django_api_url + '/datatype/image/', + "dataformats": [self.django_api_url + '/dataformat/Beamformed/'], + "output_of": output_of_url, + "input_of": input_of_url, + "tags": []} + + def Dataproduct(self, filename="my_filename", specifications_template_url=None, subtask_output_url=None, dataproduct_feedback_template_url=None): + if specifications_template_url is None: + specifications_template_url = self.post_data_and_get_url(self.SubtaskTemplate(), '/dataproduct_specifications_template/') + + if subtask_output_url is None: + subtask_output_url = self.post_data_and_get_url(self.SubtaskOutput(), '/subtask_output/') + + if dataproduct_feedback_template_url is None: + dataproduct_feedback_template_url = self.post_data_and_get_url(self.DataproductFeedbackTemplate(), '/dataproduct_feedback_template/') + + return {"filename": "my.file", + "directory": "/home/boskabouter/", + "dataformat": self.django_api_url + '/dataformat/Beamformed/', + "deleted_since": None, + "pinned_since": None, + "specifications_doc": "{}", + "specifications_template": specifications_template_url, + "tags": ["TMSS", "TESTING"], + "producer": subtask_output_url, + "do_cancel": datetime.utcnow().isoformat(), + "expected_size": 1234, + "size": 123, + "feedback_doc": "{}", + "feedback_template": dataproduct_feedback_template_url + } + + def AntennaSet(self, name="antennaset1"): + return {"name": name, + "description": 'My one observation', + "station_type": self.django_api_url + '/station_type/core/', + "rcus": [1,2,3,4,5], + "inputs": ['input1', 'input2'], + "tags": ['tmss', 'testing']} + + def DataproductTransform(self, input_dataproduct_url=None, output_dataproduct_url=None): + if input_dataproduct_url is None: + input_dataproduct_url = self.post_data_and_get_url(self.Dataproduct(), '/dataproduct/') + + if output_dataproduct_url is None: + output_dataproduct_url = self.post_data_and_get_url(self.Dataproduct(), '/dataproduct/') + + return {"input": input_dataproduct_url, + "output": output_dataproduct_url, + "identity": True, + "tags": ['tmss', 'testing']} + + def DataproductHash(self, algorithm_url=None, hash="my_hash", dataproduct_url=None): + if algorithm_url is None: + algorithm_url = self.django_api_url + '/algorithm/md5/' + + if dataproduct_url is None: + dataproduct_url = self.post_data_and_get_url(self.Dataproduct(), '/dataproduct/') + + return {"dataproduct": dataproduct_url, + "algorithm": algorithm_url, + "hash": hash, + "tags": ['tmss', 'testing']} + + + def DataproductArchiveInfo(self, storage_ticket="my_storage_ticket", dataproduct_url=None): + if dataproduct_url is None: + dataproduct_url = self.post_data_and_get_url(self.Dataproduct(), '/dataproduct/') + + return {"dataproduct": dataproduct_url, + "storage_ticket": storage_ticket, + "public_since": datetime.utcnow().isoformat(), + "corrupted_since": datetime.utcnow().isoformat(), + "tags": ['tmss', 'testing']} + + def SubtaskInputSelectionTemplate(self, name="my_SubtaskInputSelectionTemplate"): + return {"name": name, + "description": 'My one date', + "version": 'v0.314159265359', + "schema": {"mykey": "my value"}, + "tags": ["TMSS", "TESTING"]} + + def SubtaskInput(self, subtask_url=None, task_relation_blueprint_url=None, dataproduct_urls=None, subtask_connector_url=None, subtask_output_url=None, subtask_input_selection_template_url=None): + if subtask_url is None: + subtask_url = self.post_data_and_get_url(self.Subtask(), '/subtask/') + + if task_relation_blueprint_url is None: + task_relation_blueprint_url = self.post_data_and_get_url(self.TaskRelationBlueprint(), '/task_relation_blueprint/') + + if dataproduct_urls is None: + dataproduct_urls = [self.post_data_and_get_url(self.Dataproduct(), '/dataproduct/'), + self.post_data_and_get_url(self.Dataproduct(), '/dataproduct/')] + + if subtask_connector_url is None: + subtask_connector_url = self.post_data_and_get_url(self.SubtaskConnector(), '/subtask_connector/') + + if subtask_output_url is None: + subtask_output_url = self.post_data_and_get_url(self.SubtaskOutput(), '/subtask_output/') + + if subtask_input_selection_template_url is None: + subtask_input_selection_template_url = self.post_data_and_get_url(self.SubtaskInputSelectionTemplate(), '/subtask_input_selection_template/') + + return {"subtask": subtask_url, + "task_relation_blueprint": task_relation_blueprint_url, + "connector": subtask_connector_url, + "producer": subtask_output_url, + "dataproducts": dataproduct_urls, + "selection_doc": "{}", + "selection_template": subtask_input_selection_template_url, + "tags": []} + + def Filesystem(self, name="my_Filesystem", cluster_url=None): + if cluster_url is None: + cluster_url = self.post_data_and_get_url(self.Cluster(), '/cluster/') + + return {"name": name, + "description": 'My one filesystem', + "capacity": 1111111111, + "cluster": cluster_url, + "tags": ['tmss', 'testing']} + diff --git a/SAS/TMSS/test/tmss_test_environment_unittest_setup.py b/SAS/TMSS/test/tmss_test_environment_unittest_setup.py index c8dc5e904cc638dbc6f9e3f205f935ad6ad38b10..128cda77d2a57539fedd30fc43c0b8dba558ca34 100644 --- a/SAS/TMSS/test/tmss_test_environment_unittest_setup.py +++ b/SAS/TMSS/test/tmss_test_environment_unittest_setup.py @@ -39,16 +39,10 @@ def tearDownModule(): # the methods below can be used to to HTTP REST calls to the django server and check the results ################################################################################################ -from requests.auth import HTTPBasicAuth -AUTH = HTTPBasicAuth(tmss_test_env.ldap_server.dbcreds.user, tmss_test_env.ldap_server.dbcreds.password) -BASE_URL = tmss_test_env.django_server.url - -import requests import json - -def post_data_and_get_url(data, url_postfix): - """POST the given data the BASE_URL+url_postfix, and return the response's url""" - return json.loads(requests.post(BASE_URL + url_postfix, json=data, auth=AUTH).content.decode('utf-8'))['url'] +import requests +AUTH = requests.auth.HTTPBasicAuth(tmss_test_env.ldap_server.dbcreds.user, tmss_test_env.ldap_server.dbcreds.password) +BASE_URL = tmss_test_env.django_server.url def _call_API_and_assert_expected_response(test_instance, url, call, data, expected_code, expected_content): """ @@ -71,16 +65,26 @@ def _call_API_and_assert_expected_response(test_instance, url, call, data, expec if response.status_code != expected_code: print("!!! Unexpected: [%s] - %s %s: %s" % (test_instance.id(), call, url, response.content.decode('utf-8').strip())) test_instance.assertEqual(response.status_code, expected_code) - r_dict = json.loads(response.content.decode('utf-8')) - for key, value in expected_content.items(): - if key not in r_dict.keys(): - print('!!! Missing key: %s in %s' % (key, r_dict.keys())) - test_instance.assertTrue(key in r_dict.keys()) - if type(value) is list: - test_instance.assertEqual(sorted(value), sorted(r_dict[key])) # compare lists independent of ordering - else: - test_instance.assertEqual(value, r_dict[key]) - return r_dict + + content = response.content.decode('utf-8') + + if response.status_code in range(200, 100): + r_dict = json.loads(content) + for key, value in expected_content.items(): + if key not in r_dict.keys(): + print('!!! Missing key: %s in %s' % (key, r_dict.keys())) + test_instance.assertTrue(key in r_dict.keys()) + if type(value) is list: + test_instance.assertEqual(sorted(value), sorted(r_dict[key])) # compare lists independent of ordering + else: + test_instance.assertEqual(value, r_dict[key]) + return r_dict + + try: + return json.loads(content) + except: + return content + def PUT_and_assert_expected_response(test_instance, url, data, expected_code, expected_content): """ @@ -130,432 +134,3 @@ def DELETE_and_assert_gone(test_instance, url): if response.status_code != 404: print("!!! Unexpected: [%s] - %s %s: %s" % (test_instance.id(), 'GET', url, response.content)) test_instance.assertEqual(response.status_code, 404) - - - -####################################################### -# the methods below can be used to create test data -# naming convention is: <django_model_name>_test_data() -####################################################### - -from datetime import datetime -import uuid - -def GeneratorTemplate_test_data(name="generatortemplate"): - return {"name": name, - "description": 'My one observation', - "version": 'v0.314159265359', - "schema": {"mykey": "my value"}, - "create_function": 'Funky', - "tags": ["TMSS", "TESTING"]} - -def SchedulingUnitTemplate_test_data(name="schedulingunittemplate1"): - return { "name": name, - "description": 'My description', - "version": 'v0.314159265359', - "schema": {"mykey": "my value"}, - "tags": ["TMSS", "TESTING"]} - -def TaskTemplate_test_data(name="tasktemplate1"): - return {"name": name, - "description": 'My one observation', - "version": 'v0.314159265359', - "schema": {"mykey": "my value"}, - "tags": ["TMSS", "TESTING"], - "validation_code_js": "???"} - -def WorkRelationSelectionTemplate_test_data(name="workrelationselectiontemplate1"): - return {"name": name, - "description": 'My one observation', - "version": 'v0.314159265359', - "schema": {"mykey": "my value"}, - "tags": ["TMSS", "TESTING"]} - -def TaskConnectors_test_data(role="correlator", input_of_url=None, output_of_url=None): - if input_of_url is None: - input_of_url = post_data_and_get_url(TaskTemplate_test_data(), '/task_template/') - - if output_of_url is None: - output_of_url = post_data_and_get_url(TaskTemplate_test_data(), '/task_template/') - - return {"role": BASE_URL + '/role/%s/'%role, - "datatype": BASE_URL + '/datatype/image/', - "dataformats": [BASE_URL + '/dataformat/Beamformed/'], - "output_of": output_of_url, - "input_of": input_of_url, - "tags": []} - -def DefaultTemplates_test_data(name="defaulttemplate"): - return {"name": name, - "template": None, - "tags": []} - -def Cycle_test_data(description="my cycle description"): - return {"name": 'my_cycle_' + str(uuid.uuid4()), - "description": description, - "tags": [], - "start": datetime.utcnow().isoformat(), - "stop": datetime.utcnow().isoformat(), - "number": 1, - "standard_hours": 2, - "expert_hours": 3, - "filler_hours": 4, - "projects": []} - -def Project_test_data(description="my project description"): - return {"name": 'my_project_' + str(uuid.uuid4()), - "description": description, - "tags": [], - "priority": 1, - "can_trigger": False, - "private_data": True} - -def SchedulingSet_test_data(name="my_scheduling_set", project_url=None, generator_template_url=None): - if project_url is None: - project_url = post_data_and_get_url(Project_test_data(), '/project/') - - if generator_template_url is None: - generator_template_url = post_data_and_get_url(GeneratorTemplate_test_data(), '/generator_template/') - - return {"name": name, - "description": "This is my scheduling set", - "tags": [], - "generator_doc": "{}", - "project": project_url, - "generator_template": generator_template_url, - "generator_source": None, - "scheduling_unit_drafts": []} - -def SchedulingUnitDraft_test_data(name="my_scheduling_unit_draft", scheduling_set_url=None, template_url=None): - if scheduling_set_url is None: - scheduling_set_url = post_data_and_get_url(SchedulingSet_test_data(), '/scheduling_set/') - - if template_url is None: - template_url = post_data_and_get_url(SchedulingUnitTemplate_test_data(), '/scheduling_unit_template/') - - return {"name": name, - "description": "This is my run draft", - "tags": [], - "requirements_doc": "{}", - "copy_reason": BASE_URL + '/copy_reason/template/', - "generator_instance_doc": "{}", - "copies": None, - "scheduling_set": scheduling_set_url, - "requirements_template": template_url, - "related_scheduling_unit_blueprint": [], - "task_drafts": []} - -def TaskDraft_test_data(name='my_task_draft', scheduling_unit_draft_url=None, template_url=None): - if scheduling_unit_draft_url is None: - scheduling_unit_draft_url = post_data_and_get_url(SchedulingUnitDraft_test_data(), '/scheduling_unit_draft/') - - if template_url is None: - template_url = post_data_and_get_url(TaskTemplate_test_data(), '/task_template/') - - return {"name": name, - "description": "This is my task draft", - "tags": [], - "specifications_doc": "{}", - "copy_reason": BASE_URL + '/copy_reason/template/', - "copies": None, - "scheduling_unit_draft": scheduling_unit_draft_url, - "specifications_template": template_url, - 'related_task_blueprint': [], - 'produced_by': [], - 'consumed_by': []} - - -def TaskRelationDraft_test_data(name="myTaskRelationDraft", producer_url=None, consumer_url=None, template_url=None, input_url=None, output_url=None): - if producer_url is None: - producer_url = post_data_and_get_url(TaskDraft_test_data(), '/task_draft/') - - if consumer_url is None: - consumer_url = post_data_and_get_url(TaskDraft_test_data(),'/task_draft/') - - if template_url is None: - template_url = post_data_and_get_url(WorkRelationSelectionTemplate_test_data(), '/work_relation_selection_template/') - - if input_url is None: - input_url = post_data_and_get_url(TaskConnectors_test_data(), '/task_connectors/') - - if output_url is None: - output_url = post_data_and_get_url(TaskConnectors_test_data(), '/task_connectors/') - - return {"tags": [], - "selection_doc": "{}", - "dataformat": BASE_URL + "/dataformat/Beamformed/", - "producer": producer_url, - "consumer": consumer_url, - "input": input_url, - "output": output_url, - "selection_template": template_url, - 'related_task_relation_blueprint': []} - -def SchedulingUnitBlueprint_test_data(name="my_scheduling_unit_blueprint", scheduling_unit_draft_url=None, template_url=None): - if scheduling_unit_draft_url is None: - scheduling_unit_draft_url = post_data_and_get_url(SchedulingUnitDraft_test_data(), '/scheduling_unit_draft/') - - if template_url is None: - template_url = post_data_and_get_url(SchedulingUnitTemplate_test_data(), '/scheduling_unit_template/') - - return {"name": name, - "description": "This is my run blueprint", - "tags": [], - "requirements_doc": "{}", - "do_cancel": False, - "draft": scheduling_unit_draft_url, - "requirements_template": template_url} - -def TaskBlueprint_test_data(name="my_TaskBlueprint", draft_url=None, template_url=None, scheduling_unit_blueprint_url=None): - if draft_url is None: - draft_url = post_data_and_get_url(TaskDraft_test_data(), '/task_draft/') - - if template_url is None: - template_url = post_data_and_get_url(TaskTemplate_test_data(), '/task_template/') - - if scheduling_unit_blueprint_url is None: - scheduling_unit_blueprint_url = post_data_and_get_url(SchedulingUnitBlueprint_test_data(), '/scheduling_unit_blueprint/') - - return {"name": name, - "description": "This is my work request blueprint", - "tags": [], - "specifications_doc": "{}", - "do_cancel": False, - "draft": draft_url, - "specifications_template": template_url, - "scheduling_unit_blueprint": scheduling_unit_blueprint_url, - "subtasks": [], - "produced_by": [], - "consumed_by": []} - -def TaskRelationBlueprint_test_data(draft_url=None, template_url=None, input_url=None, output_url=None, consumer_url=None, producer_url=None): - if draft_url is None: - draft_url = post_data_and_get_url(TaskRelationDraft_test_data(), '/task_relation_draft/') - - if producer_url is None: - producer_url = post_data_and_get_url(TaskBlueprint_test_data(), '/task_blueprint/') - - if consumer_url is None: - consumer_url = post_data_and_get_url(TaskBlueprint_test_data(),'/task_blueprint/') - - if template_url is None: - template_url = post_data_and_get_url(WorkRelationSelectionTemplate_test_data(), '/work_relation_selection_template/') - - if input_url is None: - input_url = post_data_and_get_url(TaskConnectors_test_data(), '/task_connectors/') - - if output_url is None: - output_url = post_data_and_get_url(TaskConnectors_test_data(), '/task_connectors/') - - # test data - return {"tags": [], - "selection_doc": "{}", - "dataformat": BASE_URL + '/dataformat/MeasurementSet/', - "input": input_url, - "output": output_url, - "draft": draft_url, - "selection_template": template_url, - "producer": producer_url, - "consumer": consumer_url} - -def SubtaskTemplate_test_data(name="subtask1"): - return {"type": BASE_URL + '/subtask_type/copy/', - "name": name, - "description": 'My one observation', - "version": 'v0.314159265359', - "schema": {"mykey": "my value"}, - "realtime": True, - "queue": False, - "tags": ["TMSS", "TESTING"]} - -def DataproductSpecificationsTemplate_test_data(name="my_DataproductSpecificationsTemplate"): - return {"name": name, - "description": 'My one date', - "version": 'v0.314159265359', - "schema": {"mykey": "my value"}, - "tags": ["TMSS", "TESTING"]} - -def DataproductFeedbackTemplate_test_data(name="my_DataproductFeedbackTemplate"): - return {"name": name, - "description": 'My one date', - "version": 'v0.314159265359', - "schema": {"mykey": "my value"}, - "tags": ["TMSS", "TESTING"]} - -def DefaultSubtaskTemplates_test_data(name=None, template_url=None): - if template_url is None: - template_url = post_data_and_get_url(SubtaskTemplate_test_data(), '/subtask_template/') - - return {"name": name if name else "default_template_%s" % uuid.uuid4(), - "template": template_url, - "tags": []} - -def Cluster_test_data(name=None): - return {"name": name if name else "Cluster %s" % uuid.uuid4(), - "description": 'My one cluster', - "location": "upstairs", - "tags": ['tmss', 'testing']} - -def Subtask_test_data(cluster_url=None, task_blueprint_url=None, specifications_template_url=None): - if cluster_url is None: - cluster_url = post_data_and_get_url(Cluster_test_data(), '/cluster/') - - if task_blueprint_url is None: - task_blueprint_url = post_data_and_get_url(TaskBlueprint_test_data(), '/task_blueprint/') - - if specifications_template_url is None: - specifications_template_url = post_data_and_get_url(SubtaskTemplate_test_data(), '/subtask_template/') - - return {"start_time": datetime.utcnow().isoformat(), - "stop_time": datetime.utcnow().isoformat(), - "state": BASE_URL + '/subtask_state/scheduling/', - "specifications_doc": "{}", - "task_blueprint": task_blueprint_url, - "specifications_template": specifications_template_url, - "tags": ["TMSS", "TESTING"], - "do_cancel": datetime.utcnow().isoformat(), - "priority": 1, - "schedule_method": BASE_URL + '/schedule_method/manual/', - "cluster": cluster_url, - "scheduler_input_doc": "{}" } - -def SubtaskOutput_test_data(subtask_url=None, subtask_connector_url=None): - if subtask_url is None: - subtask_url = post_data_and_get_url(Subtask_test_data(), '/subtask/') - - if subtask_connector_url is None: - subtask_connector_url = post_data_and_get_url(SubtaskConnector_test_data(), '/subtask_connector/') - - return {"subtask": subtask_url, - "connector": subtask_connector_url, - "tags": []} - -def SubtaskConnector_test_data(input_of_url=None, output_of_url=None): - if input_of_url is None: - input_of_url = post_data_and_get_url(SubtaskTemplate_test_data(), '/subtask_template/') - - if output_of_url is None: - output_of_url = post_data_and_get_url(SubtaskTemplate_test_data(), '/subtask_template/') - - return {"role": BASE_URL + '/role/correlator/', - "datatype": BASE_URL + '/datatype/image/', - "dataformats": [BASE_URL + '/dataformat/Beamformed/'], - "output_of": output_of_url, - "input_of": input_of_url, - "tags": []} - -def Dataproduct_test_data(filename="my_filename", specifications_template_url=None, subtask_output_url=None, dataproduct_feedback_template_url=None): - if specifications_template_url is None: - specifications_template_url = post_data_and_get_url(SubtaskTemplate_test_data(), '/dataproduct_specifications_template/') - - if subtask_output_url is None: - subtask_output_url = post_data_and_get_url(SubtaskOutput_test_data(), '/subtask_output/') - - if dataproduct_feedback_template_url is None: - dataproduct_feedback_template_url = post_data_and_get_url(DataproductFeedbackTemplate_test_data(), '/dataproduct_feedback_template/') - - return {"filename": "my.file", - "directory": "/home/boskabouter/", - "dataformat": BASE_URL + '/dataformat/Beamformed/', - "deleted_since": None, - "pinned_since": None, - "specifications_doc": "{}", - "specifications_template": specifications_template_url, - "tags": ["TMSS", "TESTING"], - "producer": subtask_output_url, - "do_cancel": datetime.utcnow().isoformat(), - "expected_size": 1234, - "size": 123, - "feedback_doc": "{}", - "feedback_template": dataproduct_feedback_template_url - } - -def AntennaSet_test_data(name="antennaset1"): - return {"name": name, - "description": 'My one observation', - "station_type": BASE_URL + '/station_type/core/', - "rcus": [1,2,3,4,5], - "inputs": ['input1', 'input2'], - "tags": ['tmss', 'testing']} - -def DataproductTransform_test_data(input_dataproduct_url=None, output_dataproduct_url=None): - if input_dataproduct_url is None: - input_dataproduct_url = post_data_and_get_url(Dataproduct_test_data(), '/dataproduct/') - - if output_dataproduct_url is None: - output_dataproduct_url = post_data_and_get_url(Dataproduct_test_data(), '/dataproduct/') - - return {"input": input_dataproduct_url, - "output": output_dataproduct_url, - "identity": True, - "tags": ['tmss', 'testing']} - -def DataproductHash_test_data(algorithm_url=None, hash="my_hash", dataproduct_url=None): - if algorithm_url is None: - algorithm_url = BASE_URL + '/algorithm/md5/' - - if dataproduct_url is None: - dataproduct_url = post_data_and_get_url(Dataproduct_test_data(), '/dataproduct/') - - return {"dataproduct": dataproduct_url, - "algorithm": algorithm_url, - "hash": hash, - "tags": ['tmss', 'testing']} - - -def DataproductArchiveInfo_test_data(storage_ticket="my_storage_ticket", dataproduct_url=None): - if dataproduct_url is None: - dataproduct_url = post_data_and_get_url(Dataproduct_test_data(), '/dataproduct/') - - return {"dataproduct": dataproduct_url, - "storage_ticket": storage_ticket, - "public_since": datetime.utcnow().isoformat(), - "corrupted_since": datetime.utcnow().isoformat(), - "tags": ['tmss', 'testing']} - -def SubtaskInputSelectionTemplate_test_data(name="my_SubtaskInputSelectionTemplate"): - return {"name": name, - "description": 'My one date', - "version": 'v0.314159265359', - "schema": {"mykey": "my value"}, - "tags": ["TMSS", "TESTING"]} - -def SubtaskInput_test_data(subtask_url=None, task_relation_blueprint_url=None, dataproduct_urls=None, subtask_connector_url=None, subtask_output_url=None, subtask_input_selection_template_url=None): - if subtask_url is None: - subtask_url = post_data_and_get_url(Subtask_test_data(), '/subtask/') - - if task_relation_blueprint_url is None: - task_relation_blueprint_url = post_data_and_get_url(TaskRelationBlueprint_test_data(), '/task_relation_blueprint/') - - if dataproduct_urls is None: - dataproduct_urls = [post_data_and_get_url(Dataproduct_test_data(), '/dataproduct/'), - post_data_and_get_url(Dataproduct_test_data(), '/dataproduct/')] - - if subtask_connector_url is None: - subtask_connector_url = post_data_and_get_url(SubtaskConnector_test_data(), '/subtask_connector/') - - if subtask_output_url is None: - subtask_output_url = post_data_and_get_url(SubtaskOutput_test_data(), '/subtask_output/') - - if subtask_input_selection_template_url is None: - subtask_input_selection_template_url = post_data_and_get_url(SubtaskInputSelectionTemplate_test_data(), '/subtask_input_selection_template/') - - return {"subtask": subtask_url, - "task_relation_blueprint": task_relation_blueprint_url, - "connector": subtask_connector_url, - "producer": subtask_output_url, - "dataproducts": dataproduct_urls, - "selection_doc": "{}", - "selection_template": subtask_input_selection_template_url, - "tags": []} - -def Filesystem_test_data(name="my_Filesystem", cluster_url=None): - if cluster_url is None: - cluster_url = post_data_and_get_url(Cluster_test_data(), '/cluster/') - - return {"name": name, - "description": 'My one filesystem', - "capacity": 1111111111, - "cluster": cluster_url, - "tags": ['tmss', 'testing']} - diff --git a/SAS/TriggerServices/django_rest/restinterface/triggerinterface/serializers.py b/SAS/TriggerServices/django_rest/restinterface/triggerinterface/serializers.py index 9b06c7cf9b29b9cb8a6fb3c520b207a85afdf843..cf3dbf5f030deff9158b5bbe71d2560067bd5e9b 100644 --- a/SAS/TriggerServices/django_rest/restinterface/triggerinterface/serializers.py +++ b/SAS/TriggerServices/django_rest/restinterface/triggerinterface/serializers.py @@ -1,7 +1,7 @@ """ This is a stub. Currently not used, but can be implemented to populate and validate the trigger data model. -Check views.py for data parsing and rendering on get/post. +Check filter.py for data parsing and rendering on get/post. """ diff --git a/SubSystems/RAServices/Dockerfile b/SubSystems/RAServices/Dockerfile index bdca521f98783ed8d5d9f690bf140eca492615e1..3b09b0deab2f904ee783938f8c29b3adf8eb4216 100644 --- a/SubSystems/RAServices/Dockerfile +++ b/SubSystems/RAServices/Dockerfile @@ -10,7 +10,7 @@ RUN yum install -y https://download.postgresql.org/pub/repos/yum/reporpms/EL-7-x RUN yum install -y postgresql96 postgresql96-server postgresql96-devel ENV PATH /usr/pgsql-9.6/bin:$PATH -RUN pip3 install cython kombu lxml requests pygcn xmljson mysql-connector-python python-dateutil django djangorestframework djangorestframework-xml ldap==1.0.2 flask fabric coverage python-qpid-proton PyGreSQL numpy h5py psycopg2 testing.postgresql Flask-Testing scipy Markdown django-filter python-ldap python-ldap-test ldap3 djangorestframework django-jsonforms django-json-widget django-jsoneditor drf-yasg flex swagger-spec-validator django-auth-ldap mozilla-django-oidc +RUN pip3 install cython kombu lxml requests pygcn xmljson mysql-connector-python python-dateutil django djangorestframework djangorestframework-xml ldap==1.0.2 flask fabric coverage python-qpid-proton PyGreSQL numpy h5py psycopg2 testing.postgresql Flask-Testing scipy Markdown django-filter python-ldap python-ldap-test ldap3 djangorestframework django-jsonforms django-json-widget django-jsoneditor drf-yasg flex swagger-spec-validator django-auth-ldap mozilla-django-oidc jsonschema RUN adduser lofarsys USER lofarsys