Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • ro/lofar
1 result
Show changes
Commits on Source (275)
Showing
with 1034 additions and 159 deletions
......@@ -207,6 +207,16 @@ build_ST_MAC:
paths:
- build/gnucxx11_opt
build_lofar_pulp_docker_image:
stage: build
script:
- docker build -t ci_pulp:$CI_COMMIT_SHORT_SHA Docker/pulp-base
- docker build --build-arg BASE_VERSION=$CI_COMMIT_SHORT_SHA --build-arg LOFAR_TAG=$CI_COMMIT_REF_NAME -t ci_lofar_pulp:$CI_COMMIT_SHORT_SHA Docker/lofar-pulp
interruptible: true
when: manual # pulp docker images take a long time to build. It does not/hardly depend on currently developed lofar code. So an automatic build for each lofar commit is not needed.
#
# UNIT TEST STAGE
#
......@@ -337,6 +347,23 @@ dockerize_TMSS:
- job: build_SCU
artifacts: true
dockerize_PULP:
# The dockerize step is meant to create small images ready for production with marginal overhead.
# For PULP we don't know (yet) what we can and cannot extract/remove from the ci_lofar_pulp image
# Therefor - for the time being - we just upload the full ci_lofar_pulp image to nexus, making it ready for deployment
stage: dockerize
allow_failure: true
script:
- docker login -u $CI_NEXUS_REGISTRY_USERNAME -p $CI_NEXUS_REGISTRY_PASSWORD $CI_NEXUS_REGISTRY
- docker tag ci_lofar_pulp:$CI_COMMIT_SHORT_SHA $CI_NEXUS_REGISTRY_LOCATION/ci_lofar_pulp:$CI_COMMIT_SHORT_SHA
- docker push $CI_NEXUS_REGISTRY_LOCATION/ci_lofar_pulp:$CI_COMMIT_SHORT_SHA
- docker logout $CI_NEXUS_REGISTRY
interruptible: true
needs:
- job: build_lofar_pulp_docker_image
artifacts: true
#
# INTEGRATION TEST STAGE
#
......@@ -578,6 +605,26 @@ deploy-CCU_MAC-prod:
- build_CCU_MAC
allow_failure: true
when: manual
deploy-PULP-prod:
stage: deploy-prod
before_script:
# needed so that gitlab-runner can login at cep4
- 'which ssh-agent || ( apt-get update -y && apt-get install openssh-client git -y )'
- eval $(ssh-agent -s)
- echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add -
- mkdir -p ~/.ssh
- chmod 700 ~/.ssh
- ssh-keyscan head.cep4.control.lofar >> ~/.ssh/known_hosts
- chmod 644 ~/.ssh/known_hosts
script:
- ssh lofarsys@head.cep4.control.lofar "clush -S -P -w cpu[01-50] -w gpu[01-04] -w head[01-02] docker pull $CI_NEXUS_REGISTRY_LOCATION/ci_lofar_pulp:$CI_COMMIT_SHORT_SHA"
# the pulp pipeline is built to be run as lofar-pulp:$CI_COMMIT_REF_NAME through setting the LOFAR_TAG in build_lofar_pulp_docker_image
- ssh lofarsys@head.cep4.control.lofar "clush -S -P -w cpu[01-50] -w gpu[01-04] -w head[01-02] docker tag $CI_NEXUS_REGISTRY_LOCATION/ci_lofar_pulp:$CI_COMMIT_SHORT_SHA lofar-pulp:$CI_COMMIT_REF_NAME"
needs:
- dockerize_PULP
when: manual
only:
- tags
......
......@@ -25,7 +25,6 @@ lofar_add_bin_program(versiondocker versiondocker.cc)
set(DOCKER_TEMPLATE_DIRS
lofar-base
lofar-pipeline
lofar-pulp
lofar-outputproc
lofar-subbandtbbwriter)
......
......@@ -33,6 +33,7 @@ RUN pip3 install django-material django-viewflow
RUN echo "Installing Nodejs packages...." && \
curl -sL https://rpm.nodesource.com/setup_14.x | bash - && \
yum install -y nodejs && \
export NODE_OPTIONS="--max-old-space-size=8192" && \
npm -v && \
node -v && \
npm install -g serve
......
#
# base
#
FROM pulp:github-master
ENV INSTALLDIR /opt
# Tell image build information
ARG BASE_VERSION=latest
FROM ci_pulp:${BASE_VERSION}
ENV BASE_VERSION=${BASE_VERSION}
USER 0
COPY ["sudoers", "/etc/"]
......@@ -24,11 +28,12 @@ RUN apt-get update && apt-get install -y sasl2-bin libuuid1 libnss3 libnspr4 xqi
# Install
# QPID daemon legacy store would require: libaio-dev libdb5.1++-dev
RUN apt-get update && apt-get install -y ruby ruby-dev libsasl2-dev uuid-dev libxerces-c-dev libnss3-dev libnspr4-dev help2man libsslcommon2-dev libxqilla-dev libboost-program-options${BOOST_VERSION}-dev libboost-filesystem${BOOST_VERSION}-dev && \
mkdir ${INSTALLDIR}/qpid && \
svn --non-interactive -q co https://svn.astron.nl/LOFAR/trunk/LCS/MessageBus/qpid/ ${INSTALLDIR}/qpid; \
bash -c "HOME=/tmp ${INSTALLDIR}/qpid/local/sbin/build_qpid" && \
bash -c "strip ${INSTALLDIR}/qpid/{bin,lib}/* || true" && \
bash -c "rm -rf /tmp/sources" && \
mkdir -p /opt/qpid/src && \
git clone --depth=1 https://git.astron.nl/ro/lofar.git /opt/qpid/src && \
cd /opt/qpid/src/LCS/MessageBus/qpid && \
bash -c "HOME=/tmp /opt/qpid/src/LCS/MessageBus/qpid/local/sbin/build_qpid" && \
bash -c "strip /opt/qpid/{bin,lib}/* || true" && \
bash -c "rm -rf /tmp/sources /opt/qpid/src" && \
apt-get purge -y ruby ruby-dev libsasl2-dev uuid-dev libxerces-c-dev libnss3-dev libnspr4-dev help2man libsslcommon2-dev libxqilla-dev libboost-program-options${BOOST_VERSION}-dev libboost-filesystem${BOOST_VERSION}-dev && \
apt-get autoremove -y
......@@ -43,29 +48,33 @@ RUN aptitude install -y python3-kombu
# *******************
#
# Tell image build information
ENV LOFAR_BRANCH=${LOFAR_BRANCH_NAME} \
LOFAR_TAG=${LOFAR_TAG} \
LOFAR_REVISION=${LOFAR_REVISION} \
LOFAR_BUILDVARIANT=gnucxx11_optarch
ARG LOFAR_BUILDVARIANT=gnucxx11_optarch
ENV LOFAR_BUILDVARIANT=${LOFAR_BUILDVARIANT}
# Pulp is still python2, so we need a LOFAR version that still builds CEP with python2. We have a specific branch for that containing any patches on top of the old LOFAR version
ARG LOFAR_VERSION=LOFAR-Release-3_2_17-PULP
ENV LOFAR_VERSION=${LOFAR_VERSION}
# Install
#some are already installed, but we need boost, and RUN apt-get update && apt-get install -y subversion cmake g++ gfortran bison flex liblog4cplus-dev libhdf5-dev libblitz0-dev python-dev libxml2-dev pkg-config libunittest++-dev libxml++2.6-dev binutils-dev && \
RUN apt-get update && apt-get install -y liblog4cplus-dev libhdf5-dev libblitz0-dev libunittest++-dev libxml++2.6-dev binutils-dev && \
mkdir -p ${INSTALLDIR}/lofar/build/${LOFAR_BUILDVARIANT} && \
cd ${INSTALLDIR}/lofar && \
svn --non-interactive -q co -r ${LOFAR_REVISION} -N ${LOFAR_BRANCH_URL} src; \
svn --non-interactive -q up src/CMake && \
cd ${INSTALLDIR}/lofar/build/${LOFAR_BUILDVARIANT} && cmake -DBUILD_PACKAGES="Pipeline MessageBus OTDB_Services" -DBUILD_TESTING=OFF -DCMAKE_INSTALL_PREFIX=${INSTALLDIR}/lofar/ -DCASACORE_ROOT_DIR=${INSTALLDIR}/casacore/ -DQPID_ROOT_DIR=/opt/qpid/ -DUSE_OPENMP=True ${INSTALLDIR}/lofar/src/ && \
cd ${INSTALLDIR}/lofar/build/${LOFAR_BUILDVARIANT} && make -j ${J} && \
cd ${INSTALLDIR}/lofar/build/${LOFAR_BUILDVARIANT} && make install && \
bash -c "mkdir -p ${INSTALLDIR}/lofar/var/{log,run}" && \
bash -c "chmod a+rwx ${INSTALLDIR}/lofar/var/{log,run}" && \
bash -c "strip ${INSTALLDIR}/lofar/{bin,sbin,lib64}/* || true" && \
bash -c "rm -rf ${INSTALLDIR}/lofar/{build,src}" && \
mkdir -p /opt/lofar/build/${LOFAR_BUILDVARIANT} /opt/lofar/src && \
git clone --depth=1 --branch=${LOFAR_VERSION} https://git.astron.nl/ro/lofar.git /opt/lofar/src && \
cd /opt/lofar/build/${LOFAR_BUILDVARIANT} && cmake -DBUILD_PACKAGES="Pipeline MessageBus OTDB_Services" -DBUILD_TESTING=OFF -DCMAKE_INSTALL_PREFIX=/opt/lofar/ -DCASACORE_ROOT_DIR=/opt/casacore/ -DQPID_ROOT_DIR=/opt/qpid/ -DUSE_OPENMP=True /opt/lofar/src/ && \
cd /opt/lofar/build/${LOFAR_BUILDVARIANT} && make -j ${J} && \
cd /opt/lofar/build/${LOFAR_BUILDVARIANT} && make install && \
bash -c "mkdir -p /opt/lofar/var/{log,run}" && \
bash -c "chmod a+rwx /opt/lofar/var/{log,run}" && \
bash -c "strip /opt/lofar/{bin,sbin,lib64}/* || true" && \
bash -c "rm -rf /opt/lofar/{build,src}" && \
apt-get purge -y liblog4cplus-dev libhdf5-dev libblitz0-dev libunittest++-dev libxml++2.6-dev binutils-dev && \
apt-get autoremove -y
# The tag under which this image will be deployed
ARG LOFAR_TAG=${BASE_VERSION}
ENV LOFAR_TAG=${LOFAR_TAG}
COPY ["bashrc", "/opt/"]
COPY ["chuser.sh", "/usr/local/bin"]
ENTRYPOINT ["/usr/local/bin/chuser.sh"]
#!/bin/bash
# lofar
[ -r ${INSTALLDIR}/lofar/lofarinit.sh ] && source ${INSTALLDIR}/lofar/lofarinit.sh
[ -r /opt/lofar/lofarinit.sh ] && source /opt/lofar/lofarinit.sh
export PATH PYTHONPATH LD_LIBRARY_PATH LOFARROOT
# qpid
source ${INSTALLDIR}/qpid/.profile
source /opt/qpid/.profile
# lofarsoft
source ${LOFARSOFT}/devel_common/scripts/init.sh
This diff is collapsed.
#!/bin/bash
# Source global definitions
if [ -f /etc/bashrc ]; then
. /etc/bashrc
fi
#
alias xv='display -flatten -background white'
alias gva='gv -antialias'
alias vi='vim'
export PSRHOME="/usr/local"
if [ -f ${PSRHOME}/etc/dir_colors ]; then
eval `dircolors -b ${PSRHOME}/etc/dir_colors`
fi
if [ -f ${HOME}/.dir_colors ]; then
eval `dircolors -b ${HOME}/.dir_colors`
fi
alias ssh='ssh -XY'
alias ls='ls -F --color'
PS1="[\[\033[36m\]\u\[\033[0m\]@\[\033[34m\]\h \[\033[0m\]\w]# "
# pulsar software definitions
if [ -f ${PSRHOME}/etc/psr.bashrc ]; then
source ${PSRHOME}/etc/psr.bashrc
fi
# pulsar software definitions
if [ -f ${HOME}/.psr.bashrc ]; then
source ${HOME}/.psr.bashrc
fi
#!/usr/bin/env bash
# Correct UID
export UID=`id -u`
# Configure user
if [ -z "${USER}" ]; then
export USER=${UID}
fi
# Create home directory
if [ -z "${HOME}" ]; then
export HOME=/home/${USER}
mkdir -p $HOME && cd $HOME
fi
# Add user to system
fgrep -q ":x:${UID}:" /etc/passwd || echo "${USER}:x:${UID}:${UID}::${HOME}:/bin/bash" >> /etc/passwd
fgrep -q ":x:${UID}:" /etc/group || echo "${USER}:x:${UID}:" >> /etc/group
# Set the environment
#[ -e /opt/bashrc ] && source /opt/bashrc
[ -e /usr/local/etc/bashrc ] && source /usr/local/etc/bashrc
# Run the requested command
if [ -z "$*" ]; then
exec /bin/bash
else
exec "$@"
fi
# Configuration file for the color ls utility
# This file goes in the /etc directory, and must be world readable.
# You can copy this file to .dir_colors in your $HOME directory to override
# the system defaults.
# COLOR needs one of these arguments: 'tty' colorizes output to ttys, but not
# pipes. 'all' adds color characters to all output. 'none' shuts colorization
# off.
COLOR tty
# Below, there should be one TERM entry for each termtype that is colorizable
TERM linux
TERM console
TERM con132x25
TERM con132x30
TERM con132x43
TERM con132x60
TERM con80x25
TERM con80x28
TERM con80x30
TERM con80x43
TERM con80x50
TERM con80x60
TERM xterm
TERM vt100
# EIGHTBIT, followed by '1' for on, '0' for off. (8-bit output)
EIGHTBIT 1
# Below are the color init strings for the basic file types. A color init
# string consists of one or more of the following numeric codes:
# Attribute codes:
# 00=none 01=bold 04=underscore 05=blink 07=reverse 08=concealed
# Text color codes:
# 30=black 31=red 32=green 33=yellow 34=blue 35=magenta 36=cyan 37=white
# Background color codes:
# 40=black 41=red 42=green 43=yellow 44=blue 45=magenta 46=cyan 47=white
NORMAL 00 # global default, although everything should be something.
FILE 00 # normal file
DIR 01;34 # directory
LINK 01;36 # symbolic link
FIFO 40;33 # pipe
SOCK 01;35 # socket
BLK 40;33;01 # block device driver
CHR 40;33;01 # character device driver
ORPHAN 01;05;37;41 # orphaned syminks
MISSING 01;05;37;41 # ... and the files they point to
# This is for files with execute permission:
EXEC 00;32
# List any file extensions like '.gz' or '.tar' that you would like ls
# to colorize below. Put the extension, a space, and the color init string.
# (and any comments you want to add after a '#')
.cmd 01;32 # executables (bright green)
.exe 01;32
.com 01;32
.btm 01;32
.bat 01;32
.tar 01;31 # archives or compressed (bright red)
.tgz 01;31
.arj 01;31
.taz 01;31
.lzh 01;31
.zip 01;31
.z 01;31
.Z 01;31
.gz 01;31
.bz2 01;31
.jpg 01;35 # image formats
.gif 01;35
.bmp 01;35
.xbm 01;35
.xpm 01;35
.h 00;33
.hpp 00;33
.c 00;33
.cpp 00;33
.o 08;30
.dvi 00;33
.ps 00;35
.pdf 00;35
.aux 08;30
.log 00;33
#
# PULSAR ENV VARIABLES
#
export OSTYPE=linux
export PSRHOME="/usr/local"
export QTDIR="${PSRHOME}/src/qt3/qt"
export MANPATH="${QTDIR}/doc/man:${MANPATH}"
export LD_LIBRARY_PATH="${QTDIR}/lib:${PSRHOME}/qt3/lib:${LD_LIBRARY_PATH}"
export PATH="${QTDIR}/bin:${PSRHOME}/bin:${PATH}"
#
# Pgplot
#
export PGPLOT_DIR="${PSRHOME}/lib"
export PGPLOT_FONT="${PGPLOT_DIR}/grfont.dat"
#export PGPLOT_BACKGROUND="white"
export PGPLOT_DEV="/xwindow"
export PGPLOT_TYPE="cps"
export PSRCAT_FILE="${PSRHOME}/bin/psrcat.db"
export TEMPO="${PSRHOME}/src/tempo"
export TEMPO2="${PSRHOME}/share/tempo2/"
export PSRCHIVE="${PSRHOME}/src/psrchive"
export DSPSR="${PSRHOME}/src/dspsr"
export COASTGUARD_CFG="${PSRHOME}/src/coast_guard/configurations"
export SIGPROC="${PSRHOME}/src/sigproc-4.3"
export PRESTO="${PSRHOME}/src/presto"
export PSRCHIVE_CONFIG="${PSRHOME}/etc/psrchive.cfg"
export LOFAR_BF_PULSAR_SCRIPTS="${PSRHOME}/src/LOFAR-BF-pulsar-scripts"
export MSCORPOL="${PSRHOME}/src/mscorpol"
export PULP="${PSRHOME}/src/pulp"
#
# update PATH, LD_LIBRARY_PATH and PYTHONPATH
#
export PATH=".:${HOME}:${HOME}/bin:${PSRHOME}/bin:${SIGPROC}:${PRESTO}/bin:${TEMPO}/src:${TEMPO2}/bin:${PSRCHIVE}/bin:${PSRHOME}/src/coast_guard:${PSRHOME}/bin/lofar_antenna_state:${LOFAR_BF_PULSAR_SCRIPTS}:${LOFAR_BF_PULSAR_SCRIPTS}/simple-sh-scripts:${LOFAR_BF_PULSAR_SCRIPTS}/scheduling:${LOFAR_BF_PULSAR_SCRIPTS}/fluxcal:${LOFAR_BF_PULSAR_SCRIPTS}/LTA:${LOFAR_BF_PULSAR_SCRIPTS}/FE-map:${MSCORPOL}/mscorpol:${MSCORPOL}/lofar_element_response:${PULP}:${PATH}"
export LD_LIBRARY_PATH="${PGPLOT_DIR}:${PSRHOME}/lib:/usr/lib/x86_64-linux-gnu:${PSRHOME}/lib64:${PSRCHIVE}/lib:${PRESTO}/lib:${PRESTO}/lib/python:${SIGPROC}:${TEMPO2}/lib:${PSRHOME}/lib/instantclient_11_2:${PSRHOME}/src/dedisp/lib:${PSRHOME}/src/dedisp-multi/lib:${PSRHOME}/lib64/python2.7/site-packages/:${PSRHOME}/lib/python2.7/site-packages"
export PYTHONPATH="./:${PSRHOME}/bin:${PSRHOME}/src:${PRESTO}/lib/python:${PRESTO}/lib64/python:${PRESTO}/python:${PSRHOME}/lib/python2.7/site-packages/:${PSRHOME}/lib64/python2.7/site-packages/:${PSRHOME}/lib/python2.7/dist-packages/:${PSRHOME}/src/coast_guard:${PSRHOME}/bin/lofar_antenna_state:${LOFAR_BF_PULSAR_SCRIPTS}/fluxcal:${MSCORPOL}/mscorpol:${PULP}"
......@@ -22,6 +22,7 @@ import jsonschema
from copy import deepcopy
import requests
from datetime import datetime, timedelta
from lofar.common.util import dict_with_overrides
DEFAULT_MAX_SCHEMA_CACHE_AGE = timedelta(minutes=1)
......@@ -123,7 +124,7 @@ def add_defaults_to_json_object_for_schema(json_object: dict, schema: str, cache
copy_of_json_object['$schema'] = schema['$id']
# resolve $refs to fill in defaults for those, too
schema = resolved_refs(schema, cache=cache, max_cache_age=max_cache_age)
schema = resolved_remote_refs(schema, cache=cache, max_cache_age=max_cache_age)
# run validator, which populates the properties with defaults.
get_validator_for_schema(schema, add_defaults=True).validate(copy_of_json_object)
......@@ -157,23 +158,22 @@ def replace_host_in_urls(schema, new_base_url: str, keys=['$id', '$ref', '$schem
return schema
def resolve_path(schema, reference: str):
def get_sub_schema(schema: dict, reference: str):
'''resolve a JSON reference (f.e. /definitions/foo) in the schema and return the corresponding subschema.'''
parts = reference.strip('/').split('/')
subschema = schema
parts = reference.lstrip('#').strip('/').split('/')
if parts == ['']:
# reference to root
return schema
try:
subschema = schema
for part in parts:
subschema = subschema[part]
return subschema
except KeyError as e:
raise KeyError("Could not resolve path %s in schema %s" % (reference, schema)) from e
return subschema
def _fetch_url(url: str) -> str:
'''try to obtain the provided URL.'''
......@@ -189,8 +189,22 @@ def _fetch_url(url: str) -> str:
raise Exception("Could not get: %s" % (url,))
def _get_referenced_subschema(ref_url, cache: dict=None, max_cache_age: timedelta=DEFAULT_MAX_SCHEMA_CACHE_AGE):
'''fetch the schema given by the ref_url, and return both the schema, and the sub-schema given by the #/ path in the ref_url as a tuple'''
def _get_referenced_definition(ref_url, cache: dict=None, max_cache_age: timedelta=DEFAULT_MAX_SCHEMA_CACHE_AGE):
'''fetch the schema given by the remote ref_url, and return a tuple of the now-local-ref, and the definition sub-schema'''
referenced_schema = _get_referenced_schema(ref_url, cache=cache, max_cache_age=max_cache_age)
# deduct referred schema name and version from ref-value
head, anchor, tail = ref_url.partition('#')
# extract the definition sub-schema
definition = get_sub_schema(referenced_schema, tail)
return tail, definition
def _get_referenced_schema(ref_url, cache: dict=None, max_cache_age: timedelta=DEFAULT_MAX_SCHEMA_CACHE_AGE):
'''fetch the schema given by the ref_url, and return it'''
# deduct referred schema name and version from ref-value
head, anchor, tail = ref_url.partition('#')
......@@ -211,21 +225,99 @@ def _get_referenced_subschema(ref_url, cache: dict=None, max_cache_age: timedelt
# fetch url, and store in cache
referenced_schema = _fech_url_and_update_cache_entry_if_needed()
full_schema = referenced_schema
return referenced_schema
# extract sub-schema
referenced_schema = resolve_path(referenced_schema, tail)
return full_schema, referenced_schema
def resolved_refs(schema, cache: dict=None, max_cache_age: timedelta=DEFAULT_MAX_SCHEMA_CACHE_AGE, root_schema=None):
'''return the given schema with all $ref fields replaced by the referred json (sub)schema that they point to.'''
def resolved_remote_refs(schema, cache: dict=None, max_cache_age: timedelta=DEFAULT_MAX_SCHEMA_CACHE_AGE):
'''return the given schema with all remote $ref fields (to http://my.server.com/my/schema/#/definitions/...) replaced by the local $ref pointers to #/definitions/...'''
if cache is None:
cache = {}
schema_id = schema.get('$id', '').rstrip('/').rstrip('#')
local_refs_to_definition_map = {}
def _recursive_resolved_remote_refs(sub_schema):
if isinstance(sub_schema, list):
# recurse over each item in the list
return [_recursive_resolved_remote_refs(item) for item in sub_schema]
if isinstance(sub_schema, dict):
updated_sub_schema = {}
for key in sub_schema.keys():
# if the key is a remote ref, then fetch the definition and change it into a local definition and ref
if key=="$ref" and isinstance(sub_schema['$ref'], str) and sub_schema['$ref'].startswith('http'):
# resolve remote reference
ref_url = sub_schema['$ref']
# deduct and construct a replacement local_ref for the ref_url
schema_url, anchor, local_ref = ref_url.partition('#')
schema_url = schema_url.rstrip('/')
local_ref = '#'+local_ref
if schema_url==schema_id:
# self referencing
# just replace the full ref_url by a local_ref,
# no need to fetch the remote schema, no need to store the remote definition in local_refs_to_definition_map
updated_sub_schema['$ref'] = '#'+local_ref
else:
# truely remote reference to another schema
# make sure the new local_ref is unique by including the schema name
# and that the local_ref starts with an anchor
# the schema_name is extracted from the url_head, as the url_head without the http[s]:// part
schema_identifier = schema_url.split('://')[-1]
local_unique_ref = local_ref.replace('#/definitions/', '/definitions/').replace('/definitions/', '#/definitions/'+schema_identifier.strip('/')+'/')
# replace remote ref by new local_ref
updated_sub_schema['$ref'] = local_unique_ref
# fetch the remote schema and extract the definition, if not already known
if local_unique_ref not in local_refs_to_definition_map:
referenced_schema = _get_referenced_schema(ref_url, cache=cache, max_cache_age=max_cache_age)
# get **all*** definition for this referenced_schema (including the definition for local_ref)
definitions = get_sub_schema(referenced_schema, "#/definitions")
# replace all local references **within** the referenced_schema definitions by their longer unique'ified references
definitions = json.loads(json.dumps(definitions).replace('"#/definitions/', '"#/definitions/'+schema_identifier.strip('/')+'/'))
for definition_key, definition in definitions.items():
# store definition in local_refs_to_definition_map for later addition of all gathered definitions in the root schema
definition_unique_local_ref = '#/definitions/'+schema_identifier.strip('/')+'/'+definition_key
local_refs_to_definition_map[definition_unique_local_ref] = definition
else:
# key is not a (remote) $ref,
# just copy a recursively resolved key/value into the updated_sub_schema
updated_sub_schema[key] = _recursive_resolved_remote_refs(sub_schema[key])
return updated_sub_schema
# sub_schema is not a list or dict, so no need to resolve anything, just return it.
return sub_schema
# use the recursive helper method to replace the remote refs, and log the local_refs and definitions in local_refs_to_definition_map
updated_schema = _recursive_resolved_remote_refs(schema)
# add all local_definition_refs and definitions to the updated_schema definitions
for local_ref, definition in list(local_refs_to_definition_map.items()):
# the definition itself may contain remote refs, so resolve those as well
definition = _recursive_resolved_remote_refs(definition)
sub_schema = updated_schema
path_parts = local_ref.lstrip('#').strip('/').split('/')
for i, path_part in enumerate(path_parts):
if path_part not in sub_schema:
sub_schema[path_part] = {} if i < len(path_parts)-1 else definition
sub_schema = sub_schema[path_part]
return updated_schema
def resolved_local_refs(schema, root_schema: dict=None):
'''return the given schema with all local $ref fields (to #/definitions/...) replaced by the referred definition that they point to.'''
if root_schema is None:
# original schema, to lookup local references
root_schema = schema
if isinstance(schema, dict):
......@@ -234,23 +326,18 @@ def resolved_refs(schema, cache: dict=None, max_cache_age: timedelta=DEFAULT_MAX
if "$ref" in keys and isinstance(schema['$ref'], str):
ref = schema['$ref']
if ref.startswith('http'):
# resolve remote reference
referenced_root_schema, referenced_schema = _get_referenced_subschema(ref, cache=cache, max_cache_age=max_cache_age)
# ... recursively, as this may contain further local & remote references
updated_schema = resolved_refs(referenced_schema, cache, root_schema=referenced_root_schema)
elif ref.startswith('#/'):
if ref.startswith('#/'):
# resolve local reference, a-la "#/definitions/foo"
updated_schema = resolve_path(root_schema, ref[1:])
updated_schema = get_sub_schema(root_schema, ref[1:])
keys.remove("$ref")
for key in keys:
updated_schema[key] = resolved_refs(schema[key], cache, root_schema=root_schema)
updated_schema[key] = resolved_local_refs(schema[key], root_schema=root_schema)
return updated_schema
if isinstance(schema, list):
return [resolved_refs(item, cache, root_schema=root_schema) for item in schema]
return [resolved_local_refs(item, root_schema=root_schema) for item in schema]
return schema
......@@ -274,8 +361,8 @@ def get_refs(schema) -> set:
def validate_json_against_its_schema(json_object: dict, cache: dict=None, max_cache_age: timedelta=DEFAULT_MAX_SCHEMA_CACHE_AGE):
'''validate the give json object against its own schema (the URI/URL that its propery $schema points to)'''
schema_url = json_object['$schema']
_, schema_object = _get_referenced_subschema(schema_url, cache=cache, max_cache_age=max_cache_age)
return validate_json_against_schema(json_object, schema_object, cache=cache, max_cache_age=max_cache_age)
referenced_schema = _get_referenced_schema(schema_url, cache=cache, max_cache_age=max_cache_age)
return validate_json_against_schema(json_object, referenced_schema, cache=cache, max_cache_age=max_cache_age)
def validate_json_against_schema(json_string: str, schema: str, cache: dict=None, max_cache_age: timedelta=DEFAULT_MAX_SCHEMA_CACHE_AGE):
......@@ -302,7 +389,7 @@ def validate_json_against_schema(json_string: str, schema: str, cache: dict=None
raise jsonschema.exceptions.ValidationError("Invalid JSON: %s\n%s" % (str(e), schema))
# resolve $refs to fill in defaults for those, too
schema_object = resolved_refs(schema_object, cache=cache, max_cache_age=max_cache_age)
schema_object = resolved_remote_refs(schema_object, cache=cache, max_cache_age=max_cache_age)
# now do the actual validation
try:
......
......@@ -27,7 +27,7 @@ logging.basicConfig(format='%(asctime)s %(process)s %(threadName)s %(levelname)s
import unittest
import threading
import json
from lofar.common.json_utils import get_default_json_object_for_schema, replace_host_in_urls, resolved_refs, resolve_path
from lofar.common.json_utils import get_default_json_object_for_schema, replace_host_in_urls, resolved_remote_refs, resolved_local_refs, get_sub_schema
class TestJSONUtils(unittest.TestCase):
def test_empty_schema_yields_empty_object(self):
......@@ -69,27 +69,24 @@ class TestJSONUtils(unittest.TestCase):
"prop_a": 42,
"prop_b": 3.14}, json)
def test_resolve_path(self):
def test_get_sub_schema(self):
test_schema = { "one": { "two": { "three": "value" }, "foo": "bar" }, "foo": "bar" }
# resolve root
self.assertEqual(test_schema, resolve_path(test_schema, "/"))
self.assertEqual(test_schema, resolve_path(test_schema, ""))
self.assertEqual(test_schema, get_sub_schema(test_schema, "/"))
self.assertEqual(test_schema, get_sub_schema(test_schema, ""))
# resolve deeper
self.assertEqual(test_schema["one"], resolve_path(test_schema, "/one"))
self.assertEqual(test_schema["one"]["two"], resolve_path(test_schema, "/one/two"))
self.assertEqual("value", resolve_path(test_schema, "/one/two/three"))
self.assertEqual(test_schema["one"], get_sub_schema(test_schema, "/one"))
self.assertEqual(test_schema["one"]["two"], get_sub_schema(test_schema, "/one/two"))
self.assertEqual("value", get_sub_schema(test_schema, "/one/two/three"))
# check variants
self.assertEqual("value", resolve_path(test_schema, "/one/two/three/"))
self.assertEqual("value", resolve_path(test_schema, "one/two/three"))
self.assertEqual("value", get_sub_schema(test_schema, "/one/two/three/"))
self.assertEqual("value", get_sub_schema(test_schema, "one/two/three"))
def test_resolved_local_refs(self):
'''test if $refs to #/definitions are properly resolved'''
import http.server
import socketserver
from lofar.common.util import find_free_port
user_schema = {"definitions": {
"email": {
......@@ -114,7 +111,7 @@ class TestJSONUtils(unittest.TestCase):
}
} } }
resolved_user_schema = resolved_refs(user_schema)
resolved_user_schema = resolved_local_refs(user_schema)
self.assertNotEqual(user_schema['properties']['email'], resolved_user_schema['properties']['email'])
for key,value in user_schema['definitions']['email'].items():
......@@ -129,17 +126,35 @@ class TestJSONUtils(unittest.TestCase):
import socketserver
from lofar.common.util import find_free_port
port = find_free_port(8000)
port = find_free_port(8000, allow_reuse_of_lingering_port=False)
host = "127.0.0.1"
base_url = "http://%s:%s" % (host, port)
host_port = "%s:%s" % (host, port)
base_url = "http://"+host_port
base_schema = { "$id": base_url + "/base_schema.json",
"$schema": "http://json-schema.org/draft-06/schema#",
"definitions": {
"timestamp": {
"description": "A timestamp defined in UTC",
"type": "string",
"pattern": "\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d(\\.\\d+)?Z?",
"format": "date-time"
},
"email": {
"type": "string",
"format": "email",
"pattern": "@example\\.com$" }
"pattern": "@example\\.com$" },
"account": {
"type": "object",
"properties": {
"email_address": {
"$ref": "#/definitions/email"
},
"creation_at": {
"$ref": "#/definitions/timestamp"
}
}
}
} }
user_schema = {"$id": base_url + "/user_schema.json",
......@@ -150,8 +165,8 @@ class TestJSONUtils(unittest.TestCase):
"name": {
"type": "string",
"minLength": 2 },
"email": {
"$ref": base_url + "/base_schema.json" + "#/definitions/email",
"user_account": {
"$ref": base_url + "/base_schema.json" + "#/definitions/account",
"extra_prop": "very important"
},
"other_emails": {
......@@ -184,22 +199,28 @@ class TestJSONUtils(unittest.TestCase):
thread = threading.Thread(target=httpd.serve_forever)
thread.start()
# the method-under-test
resolved_user_schema = resolved_refs(user_schema)
print('user_schema: ', json.dumps(user_schema, indent=2))
print('resolved_user_schema: ', json.dumps(resolved_user_schema, indent=2))
self.assertNotEqual(user_schema['properties']['email'], resolved_user_schema['properties']['email'])
for key,value in base_schema['definitions']['email'].items():
self.assertEqual(value, resolved_user_schema['properties']['email'][key])
self.assertTrue('extra_prop' in resolved_user_schema['properties']['email'])
self.assertEqual('very important', resolved_user_schema['properties']['email']['extra_prop'])
httpd.shutdown()
thread.join(timeout=2)
self.assertFalse(thread.is_alive())
try:
# the method-under-test
resolved_user_schema = resolved_remote_refs(user_schema)
print('base_schema: ', json.dumps(base_schema, indent=2))
print('user_schema: ', json.dumps(user_schema, indent=2))
print('resolved_user_schema: ', json.dumps(resolved_user_schema, indent=2))
for key, value in base_schema['definitions']['email'].items():
self.assertEqual(value, resolved_user_schema['definitions'][host_port]['base_schema.json']['email'][key])
for key, value in base_schema['definitions']['timestamp'].items():
self.assertEqual(value, resolved_user_schema['definitions'][host_port]['base_schema.json']['timestamp'][key])
for key, value in base_schema['definitions']['account'].items():
value = json.loads(json.dumps(value).replace('"#/definitions/', '"#/definitions/'+host_port+'/base_schema.json/'))
self.assertEqual(value, resolved_user_schema['definitions'][host_port]['base_schema.json']['account'][key])
self.assertTrue('extra_prop' in resolved_user_schema['properties']['user_account'])
self.assertEqual('very important', resolved_user_schema['properties']['user_account']['extra_prop'])
finally:
httpd.shutdown()
thread.join(timeout=2)
self.assertFalse(thread.is_alive())
def test_replace_host_in_ref_urls(self):
base_host = "http://foo.bar.com"
......
......@@ -61,10 +61,6 @@ class TestUtils(unittest.TestCase):
merged = dict_with_overrides(dict_a, dict_b)
self.assertEqual({'a': 1, 'b': {'c': 3, 'd': [4, 5, 6]}, 'e': [{'foo': 2}, {'bar': 3}]}, merged)
with self.assertRaises(AssertionError):
dict_b = {'e': []} #AssertionError should be raised cause list is not of same length as original
dict_with_overrides(dict_a, dict_b)
def main(argv):
unittest.main()
......
......@@ -247,15 +247,27 @@ def dict_with_overrides(org_dict: dict, overrides: dict) -> dict:
elif isinstance(override_value, list):
sub_list = new_dict.get(override_key, [])
assert isinstance(sub_list, list)
assert len(sub_list) == len(override_value)
for i in range(len(override_value)):
org_list_item = sub_list[i]
override_list_item = override_value[i]
if isinstance(org_list_item, dict) and isinstance(override_list_item, dict):
new_dict[override_key][i] = dict_with_overrides(org_list_item, override_list_item)
else:
new_dict[override_key][i] = override_list_item
if override_key not in new_dict:
new_dict[override_key] = []
if any([isinstance(item, dict) or isinstance(item, list) for item in override_value]):
# override_value is a list of with some/all recursible items which need recursion.
for i in range(len(override_value)):
override_list_item = override_value[i]
if i < len(sub_list):
org_list_item = sub_list[i]
if isinstance(org_list_item, dict) and isinstance(override_list_item, dict):
# recurse
override_list_item = dict_with_overrides(org_list_item, override_list_item)
new_dict[override_key][i] = override_list_item
else:
new_dict[override_key].append(override_list_item)
else:
# override_value is a list of 'plain' values which need no recursion. Just copy it.
new_dict[override_key] = override_value
else:
new_dict[override_key] = override_value
......
......@@ -42,7 +42,7 @@
#define DELAY_INDEX(s) (delayIndices[s])
//# Typedefs used to map input data on arrays
typedef double (*DelaysType)[1][NR_DELAYS][NR_TABS];
typedef double (*DelaysType)[NR_DELAYS][NR_TABS];
#ifdef FLYS_EYE
typedef float2 (*BandPassCorrectedType)[NR_INPUT_STATIONS][NR_CHANNELS][NR_SAMPLES_PER_CHANNEL][NR_POLARIZATIONS];
#else
......@@ -60,7 +60,6 @@ typedef float2 (*ComplexVoltagesType)[NR_CHANNELS][NR_SAMPLES_PER_CHANNEL][NR_T
* \param[in] delaysPtr 3D input array of complex valued delays to be applied to the correctData samples. There is a delay for each Sub-Array Pointing, station, and Tied Array Beam triplet.
* \param[in] delayIndices 1D input array of which stations to use out of delaysPtr, if a subset of the stations need to be beam formed
* \param[in] subbandFrequency central frequency of the subband
* \param[in] sap number (index) of the Sub-Array Pointing (aka (station) beam)
*
* Pre-processor input symbols (some are tied to the execution configuration)
* Symbol | Valid Values | Description
......@@ -84,8 +83,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
const unsigned *stationIndices, // lookup index for stations to use in samplesPtr
const void *delaysPtr,
const unsigned *delayIndices, // lookup index for stations to use in delaysPtr
double subbandFrequency,
unsigned sap)
double subbandFrequency)
{
ComplexVoltagesType complexVoltages = (ComplexVoltagesType) complexVoltagesPtr;
BandPassCorrectedType samples = (BandPassCorrectedType) samplesPtr;
......@@ -132,7 +130,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
fcomplex weight_00; // assign the weights to register variables
if (first_station + 0 < NR_OUTPUT_STATIONS) { // Number of station might be larger then 32:
// We then do multiple passes to span all stations
double delay = (*delays)[sap][DELAY_INDEX(first_station + 0)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 0)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_00 = make_float2(weight.x, weight.y);
}
......@@ -141,7 +139,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 2
fcomplex weight_01;
if (first_station + 1 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 1)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 1)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_01 = make_float2(weight.x, weight.y);
}
......@@ -150,7 +148,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 3
fcomplex weight_02;
if (first_station + 2 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 2)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 2)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_02 = make_float2(weight.x, weight.y);
}
......@@ -159,7 +157,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 4
fcomplex weight_03;
if (first_station + 3 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 3)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 3)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_03 = make_float2(weight.x, weight.y);
}
......@@ -168,7 +166,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 5
fcomplex weight_04;
if (first_station + 4 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 4)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 4)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_04 = make_float2(weight.x, weight.y);
}
......@@ -177,7 +175,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 6
fcomplex weight_05;
if (first_station + 5 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 5)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 5)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_05 = make_float2(weight.x, weight.y);
}
......@@ -186,7 +184,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 7
fcomplex weight_06;
if (first_station + 6 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 6)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 6)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_06 = make_float2(weight.x, weight.y);
}
......@@ -195,7 +193,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 8
fcomplex weight_07;
if (first_station + 7 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 7)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 7)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_07 = make_float2(weight.x, weight.y);
}
......@@ -204,7 +202,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 9
fcomplex weight_08;
if (first_station + 8 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 8)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 8)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_08 = make_float2(weight.x, weight.y);
}
......@@ -213,7 +211,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 10
fcomplex weight_09;
if (first_station + 9 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 9)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 9)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_09 = make_float2(weight.x, weight.y);
}
......@@ -222,7 +220,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 11
fcomplex weight_10;
if (first_station + 10 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 10)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 10)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_10 = make_float2(weight.x, weight.y);
}
......@@ -231,7 +229,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 12
fcomplex weight_11;
if (first_station + 11 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 11)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 11)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_11 = make_float2(weight.x, weight.y);
}
......@@ -240,7 +238,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 13
fcomplex weight_12;
if (first_station + 12 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 12)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 12)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_12 = make_float2(weight.x, weight.y);
}
......@@ -249,7 +247,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 14
fcomplex weight_13;
if (first_station + 13 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 13)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 13)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_13 = make_float2(weight.x, weight.y);
}
......@@ -258,7 +256,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 15
fcomplex weight_14;
if (first_station + 14 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 14)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 14)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_14 = make_float2(weight.x, weight.y);
}
......@@ -267,7 +265,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 16
fcomplex weight_15;
if (first_station + 15 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 15)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 15)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_15 = make_float2(weight.x, weight.y);
}
......@@ -276,7 +274,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 17
fcomplex weight_16;
if (first_station + 16 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 16)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 16)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_16 = make_float2(weight.x, weight.y);
}
......@@ -285,7 +283,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 18
fcomplex weight_17;
if (first_station + 17 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 17)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 17)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_17 = make_float2(weight.x, weight.y);
}
......@@ -294,7 +292,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 19
fcomplex weight_18;
if (first_station + 18 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 18)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 18)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_18 = make_float2(weight.x, weight.y);
}
......@@ -303,7 +301,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 20
fcomplex weight_19;
if (first_station + 19 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 19)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 19)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_19 = make_float2(weight.x, weight.y);
}
......@@ -312,7 +310,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 21
fcomplex weight_20;
if (first_station + 20 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 20)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 20)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_20 = make_float2(weight.x, weight.y);
}
......@@ -321,7 +319,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 22
fcomplex weight_21;
if (first_station + 21 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 21)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 21)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_21 = make_float2(weight.x, weight.y);
}
......@@ -330,7 +328,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 23
fcomplex weight_22;
if (first_station + 22 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 22)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 22)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_22 = make_float2(weight.x, weight.y);
}
......@@ -339,7 +337,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 24
fcomplex weight_23;
if (first_station + 23 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 23)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 23)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_23 = make_float2(weight.x, weight.y);
}
......@@ -348,7 +346,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 25
fcomplex weight_24;
if (first_station + 24 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 24)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 24)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_24 = make_float2(weight.x, weight.y);
}
......@@ -357,7 +355,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 26
fcomplex weight_25;
if (first_station + 25 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 25)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 25)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_25 = make_float2(weight.x, weight.y);
}
......@@ -366,7 +364,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 27
fcomplex weight_26;
if (first_station + 26 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 26)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 26)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_26 = make_float2(weight.x, weight.y);
}
......@@ -375,7 +373,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 28
fcomplex weight_27;
if (first_station + 27 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 27)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 27)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_27 = make_float2(weight.x, weight.y);
}
......@@ -384,7 +382,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 29
fcomplex weight_28;
if (first_station + 28 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 28)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 28)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_28 = make_float2(weight.x, weight.y);
}
......@@ -393,7 +391,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 30
fcomplex weight_29;
if (first_station + 29 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 29)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 29)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_29 = make_float2(weight.x, weight.y);
}
......@@ -402,7 +400,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 31
fcomplex weight_30;
if (first_station + 30 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 30)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 30)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_30 = make_float2(weight.x, weight.y);
}
......@@ -411,7 +409,7 @@ extern "C" __global__ void beamFormer( void *complexVoltagesPtr,
#if NR_STATIONS_PER_PASS >= 32
fcomplex weight_31;
if (first_station + 31 < NR_OUTPUT_STATIONS) {
double delay = (*delays)[sap][DELAY_INDEX(first_station + 31)][tab_or_zero];
double delay = (*delays)[DELAY_INDEX(first_station + 31)][tab_or_zero];
dcomplex weight = dphaseShift(frequency, delay);
weight_31 = make_float2(weight.x, weight.y);
}
......
......@@ -75,7 +75,7 @@ typedef fcomplex(*OutputDataType)[NR_STATIONS][NR_POLARIZATIONS][NR_CHANNELS][N
#endif
typedef fcomplex(*InputDataType)[NR_STATIONS][NR_POLARIZATIONS][NR_SAMPLES_PER_CHANNEL][NR_CHANNELS];
typedef const double(*DelaysType)[1][NR_DELAYS][NR_POLARIZATIONS]; // 2 Polarizations; in seconds
typedef const double(*DelaysType)[NR_DELAYS][NR_POLARIZATIONS]; // 2 Polarizations; in seconds
typedef const double2(*Phase0sType)[NR_STATIONS]; // 2 Polarizations; in radians
typedef const float(*BandPassFactorsType)[NR_CHANNELS];
......@@ -111,13 +111,12 @@ inline __device__ fcomplex sincos_d2f(double phi)
* 4D array [station][polarization][sample][channel][complex]
* of ::fcomplex
* @param[in] subbandFrequency center freqency of the subband
* @param[in] beam index number of the beam
* @param[in] delaysAtBeginPtr pointer to delay data of ::DelaysType,
* a 2D array [beam][station] of float2 (real:
* a 1D array [station] of float2 (real:
* 2 polarizations), containing delays in
* seconds at begin of integration period
* @param[in] delaysAfterEndPtr pointer to delay data of ::DelaysType,
* a 2D array [beam][station] of float2 (real:
* a 1D array [station] of float2 (real:
* 2 polarizations), containing delays in
* seconds after end of integration period
* @param[in] phase0sPt r pointer to phase offset data of
......@@ -134,7 +133,6 @@ extern "C" {
const fcomplex * filteredDataPtr,
const unsigned * delayIndices,
double subbandFrequency,
unsigned beam, // =nrSAPS
const double * delaysAtBeginPtr,
const double * delaysAfterEndPtr,
const double * phase0sPtr,
......@@ -226,8 +224,8 @@ extern "C" {
? subbandFrequency
: subbandFrequency - 0.5 * SUBBAND_BANDWIDTH + channel * (SUBBAND_BANDWIDTH / NR_CHANNELS);
const double2 delayAtBegin = make_double2((*delaysAtBegin)[beam][delayIdx][0], (*delaysAtBegin)[beam][delayIdx][1]);
const double2 delayAfterEnd = make_double2((*delaysAfterEnd)[beam][delayIdx][0], (*delaysAfterEnd)[beam][delayIdx][1]);
const double2 delayAtBegin = make_double2((*delaysAtBegin)[delayIdx][0], (*delaysAtBegin)[delayIdx][1]);
const double2 delayAfterEnd = make_double2((*delaysAfterEnd)[delayIdx][0], (*delaysAfterEnd)[delayIdx][1]);
// Calculate the angles to rotate for for the first and (beyond the) last sample.
//
......
......@@ -124,7 +124,7 @@ inline __device__ float2 sincos_d2f_select(double phi, int ri)
return make_float2((ri?s:c),(ri?c:-s));
}
typedef const double(*DelaysType)[1][NR_STABS][NR_POLARIZATIONS]; // 2 Polarizations; in seconds
typedef const double(*DelaysType)[NR_STABS][NR_POLARIZATIONS]; // 2 Polarizations; in seconds
typedef const double(*Phase0sType)[NR_STABS][NR_POLARIZATIONS]; // 2 Polarizations; in radians
#endif /* DOPPLER_CORRECTION */
......@@ -201,8 +201,8 @@ __global__ void FIR_filter( void *filteredDataPtr,
#ifdef DOPPLER_CORRECTION
DelaysType delaysAtBegin = (DelaysType)delaysAtBeginPtr;
DelaysType delaysAfterEnd = (DelaysType)delaysAfterEndPtr;
const double delayAtBegin = (*delaysAtBegin)[0][station][pol];
const double delayAfterEnd = (*delaysAfterEnd)[0][station][pol];
const double delayAtBegin = (*delaysAtBegin)[station][pol];
const double delayAfterEnd = (*delaysAfterEnd)[station][pol];
// Calculate the angles to rotate for for the first and (beyond the) last sample.
//
......
......@@ -18,8 +18,6 @@
//#
//# $Id: QuantizeOutput.cu 29617 2014-06-23 08:08:41Z mol $
#include <stdint.h>
#include <cooperative_groups.h>
namespace cg = cooperative_groups;
......@@ -41,11 +39,11 @@ namespace cg = cooperative_groups;
#endif
#if (NR_QUANTIZE_BITS == 16)
#define UINT uint16_t
#define INT int16_t
#define UINT unsigned short int
#define INT signed short int
#elif (NR_QUANTIZE_BITS == 8)
#define UINT uint8_t
#define INT int8_t
#define UINT unsigned char
#define INT signed char
#else
#error Precondition violated: invalid value for NR_QUANTIZE_BITS
#endif
......
......@@ -50,7 +50,6 @@ namespace LOFAR
unsigned nrDelays_,
unsigned nrChannels_,
unsigned nrSamplesPerChannel_,
unsigned nrSAPs_,
unsigned nrTABs_,
double subbandBandWidth_,
bool doFlysEye_,
......@@ -70,7 +69,6 @@ namespace LOFAR
nrChannels(nrChannels_),
nrSamplesPerChannel(nrSamplesPerChannel_),
nrSAPs(nrSAPs_),
nrTABs(nrTABs_),
subbandBandwidth(subbandBandWidth_),
doFlysEye(doFlysEye_)
......@@ -100,7 +98,7 @@ namespace LOFAR
(size_t) delayIndices.size() * sizeof delayIndices[0];
case BeamFormerKernel::BEAM_FORMER_DELAYS:
return
(size_t) nrSAPs * nrDelays *
(size_t) nrDelays *
nrTABs * sizeof(double);
default:
THROW(GPUProcException, "Invalid bufferType (" << bufferType << ")");
......@@ -151,10 +149,9 @@ namespace LOFAR
}
void BeamFormerKernel::enqueue(const BlockID &blockId,
double subbandFrequency, unsigned SAP)
double subbandFrequency)
{
setArg(5, subbandFrequency);
setArg(6, SAP);
Kernel::enqueue(blockId);
}
......
......@@ -57,7 +57,6 @@ namespace LOFAR
unsigned nrDelays,
unsigned nrChannels,
unsigned nrSamplesPerChannel,
unsigned nrSAPs,
unsigned nrTABs,
double subbandWidth,
bool doFlysEye,
......@@ -97,7 +96,7 @@ namespace LOFAR
const Parameters &param);
void enqueue(const BlockID &blockId,
double subbandFrequency, unsigned SAP);
double subbandFrequency);
gpu::DeviceMemory stationIndices;
gpu::DeviceMemory delayIndices;
......