Skip to content
Snippets Groups Projects
Commit 06af40e7 authored by Corné Lukken's avatar Corné Lukken
Browse files

Create pycommon from dump

parents
No related branches found
No related tags found
No related merge requests found
Pipeline #102506 failed
Pipeline: PyCommon

#102507

    Showing with 1049 additions and 0 deletions
    root = true
    [*]
    charset = utf-8
    end_of_line = lf
    indent_size = 4
    indent_style = space
    insert_final_newline = true
    trim_trailing_whitespace = true
    max_line_length = 120
    tab_width = 4
    ij_continuation_indent_size = 8
    ij_formatter_off_tag = @formatter:off
    ij_formatter_on_tag = @formatter:on
    ij_formatter_tags_enabled = false
    ij_smart_tabs = false
    ij_visual_guides = none
    ij_wrap_on_typing = false
    [{*.bash,*.sh,*.zsh}]
    indent_size = 2
    tab_width = 2
    ij_shell_binary_ops_start_line = false
    ij_shell_keep_column_alignment_padding = false
    ij_shell_minify_program = false
    ij_shell_redirect_followed_by_space = false
    ij_shell_switch_cases_indented = false
    ij_shell_use_unix_line_separator = true
    [{*.har,*.jsb2,*.jsb3,*.json,.babelrc,.eslintrc,.stylelintrc,bowerrc,jest.config}]
    indent_size = 2
    ij_json_array_wrapping = split_into_lines
    ij_json_keep_blank_lines_in_code = 0
    ij_json_keep_indents_on_empty_lines = false
    ij_json_keep_line_breaks = true
    ij_json_keep_trailing_comma = false
    ij_json_object_wrapping = split_into_lines
    ij_json_property_alignment = do_not_align
    ij_json_space_after_colon = true
    ij_json_space_after_comma = true
    ij_json_space_before_colon = false
    ij_json_space_before_comma = false
    ij_json_spaces_within_braces = false
    ij_json_spaces_within_brackets = false
    ij_json_wrap_long_lines = false
    [{*.markdown,*.md}]
    ij_markdown_force_one_space_after_blockquote_symbol = true
    ij_markdown_force_one_space_after_header_symbol = true
    ij_markdown_force_one_space_after_list_bullet = true
    ij_markdown_force_one_space_between_words = true
    ij_markdown_insert_quote_arrows_on_wrap = true
    ij_markdown_keep_indents_on_empty_lines = false
    ij_markdown_keep_line_breaks_inside_text_blocks = true
    ij_markdown_max_lines_around_block_elements = 1
    ij_markdown_max_lines_around_header = 1
    ij_markdown_max_lines_between_paragraphs = 1
    ij_markdown_min_lines_around_block_elements = 1
    ij_markdown_min_lines_around_header = 1
    ij_markdown_min_lines_between_paragraphs = 1
    ij_markdown_wrap_text_if_long = true
    ij_markdown_wrap_text_inside_blockquotes = true
    [{*.py,*.pyw}]
    max_line_length = 88
    ij_python_align_collections_and_comprehensions = true
    ij_python_align_multiline_imports = true
    ij_python_align_multiline_parameters = true
    ij_python_align_multiline_parameters_in_calls = false
    ij_python_blank_line_at_file_end = true
    ij_python_blank_lines_after_imports = 1
    ij_python_blank_lines_after_local_imports = 0
    ij_python_blank_lines_around_class = 1
    ij_python_blank_lines_around_method = 1
    ij_python_blank_lines_around_top_level_classes_functions = 2
    ij_python_blank_lines_before_first_method = 0
    ij_python_call_parameters_new_line_after_left_paren = true
    ij_python_call_parameters_right_paren_on_new_line = true
    ij_python_call_parameters_wrap = normal
    ij_python_dict_alignment = 2
    ij_python_dict_new_line_after_left_brace = true
    ij_python_dict_new_line_before_right_brace = true
    ij_python_dict_wrapping = 1
    ij_python_from_import_new_line_after_left_parenthesis = true
    ij_python_from_import_new_line_before_right_parenthesis = true
    ij_python_from_import_parentheses_force_if_multiline = true
    ij_python_from_import_trailing_comma_if_multiline = false
    ij_python_from_import_wrapping = 1
    ij_python_hang_closing_brackets = false
    ij_python_keep_blank_lines_in_code = 1
    ij_python_keep_blank_lines_in_declarations = 1
    ij_python_keep_indents_on_empty_lines = false
    ij_python_keep_line_breaks = true
    ij_python_method_parameters_new_line_after_left_paren = true
    ij_python_method_parameters_right_paren_on_new_line = true
    ij_python_method_parameters_wrap = normal
    ij_python_new_line_after_colon = true
    ij_python_new_line_after_colon_multi_clause = true
    ij_python_optimize_imports_always_split_from_imports = false
    ij_python_optimize_imports_case_insensitive_order = false
    ij_python_optimize_imports_join_from_imports_with_same_source = false
    ij_python_optimize_imports_sort_by_type_first = true
    ij_python_optimize_imports_sort_imports = true
    ij_python_optimize_imports_sort_names_in_from_imports = false
    ij_python_space_after_comma = true
    ij_python_space_after_number_sign = true
    ij_python_space_after_py_colon = true
    ij_python_space_before_backslash = true
    ij_python_space_before_comma = false
    ij_python_space_before_for_semicolon = false
    ij_python_space_before_lbracket = false
    ij_python_space_before_method_call_parentheses = false
    ij_python_space_before_method_parentheses = false
    ij_python_space_before_number_sign = true
    ij_python_space_before_py_colon = false
    ij_python_space_within_empty_method_call_parentheses = false
    ij_python_space_within_empty_method_parentheses = false
    ij_python_spaces_around_additive_operators = true
    ij_python_spaces_around_assignment_operators = true
    ij_python_spaces_around_bitwise_operators = true
    ij_python_spaces_around_eq_in_keyword_argument = false
    ij_python_spaces_around_eq_in_named_parameter = false
    ij_python_spaces_around_equality_operators = true
    ij_python_spaces_around_multiplicative_operators = true
    ij_python_spaces_around_power_operator = false
    ij_python_spaces_around_relational_operators = true
    ij_python_spaces_around_shift_operators = true
    ij_python_spaces_within_braces = false
    ij_python_spaces_within_brackets = false
    ij_python_spaces_within_method_call_parentheses = false
    ij_python_spaces_within_method_parentheses = false
    ij_python_use_continuation_indent_for_arguments = true
    ij_python_use_continuation_indent_for_collection_and_comprehensions = false
    ij_python_use_continuation_indent_for_parameters = true
    ij_python_wrap_long_lines = false
    [{*.yaml,*.yml}]
    indent_size = 2
    ij_yaml_align_values_properties = do_not_align
    ij_yaml_autoinsert_sequence_marker = true
    ij_yaml_block_mapping_on_new_line = false
    ij_yaml_indent_sequence_value = true
    ij_yaml_keep_indents_on_empty_lines = false
    ij_yaml_keep_line_breaks = true
    ij_yaml_sequence_on_new_line = false
    ij_yaml_space_before_colon = false
    ij_yaml_spaces_within_braces = true
    ij_yaml_spaces_within_brackets = true
    dist/*
    *.egg-info
    *.pyc
    .tox
    .coverage
    coverage.xml
    htmlcov/*
    build
    dist
    # Documentation
    docs/source/source_documentation
    !docs/source/source_documentation/index.rst
    docs/build
    # Setuptools SCM
    pycommon/_version.py
    # IDE configuration
    .vscode
    .idea
    default:
    image: $CI_REGISTRY_IMAGE/ci-build-runner:$CI_COMMIT_REF_SLUG
    before_script:
    - python --version # For debugging
    cache:
    paths:
    - .cache/pip
    # Do not cache .tox, to recreate virtualenvs for every step
    stages:
    - prepare
    - lint
    # check if this needs to be a separate step
    # - build_extensions
    - test
    - package
    - images
    - integration
    - publish # publish instead of deploy
    # Caching of dependencies to speed up builds
    variables:
    PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
    include:
    - template: Security/SAST.gitlab-ci.yml
    - template: Security/Dependency-Scanning.gitlab-ci.yml
    - template: Security/Secret-Detection.gitlab-ci.yml
    # Prepare image to run ci on
    trigger_prepare:
    stage: prepare
    trigger:
    strategy: depend
    include: .prepare.gitlab-ci.yml
    run_black:
    stage: lint
    script:
    - tox -e black
    allow_failure: true
    run_flake8:
    stage: lint
    script:
    - tox -e pep8
    allow_failure: true
    run_pylint:
    stage: lint
    script:
    - tox -e pylint
    allow_failure: true
    # build_extensions:
    # stage: build_extensions
    # script:
    # - echo "build fortran/c/cpp extension source code"
    sast:
    variables:
    SAST_EXCLUDED_ANALYZERS: brakeman, flawfinder, kubesec, nodejs-scan, phpcs-security-audit,
    pmd-apex, security-code-scan, sobelow, spotbugs
    stage: test
    dependency_scanning:
    # override default before_script, job won't have Python available
    before_script:
    - uname
    secret_detection:
    # override default before_script, job won't have Python available
    before_script:
    - uname
    # Basic setup for all Python versions for which we don't have a base image
    .run_unit_test_version_base:
    before_script:
    - python --version # For debugging
    - python -m pip install --upgrade pip
    - python -m pip install --upgrade tox twine
    # Run all unit tests for Python versions except the base image
    run_unit_tests:
    extends: .run_unit_test_version_base
    stage: test
    image: python:3.${PY_VERSION}
    script:
    - tox -e py3${PY_VERSION}
    parallel:
    matrix: # use the matrix for testing
    - PY_VERSION: [8, 9, 10, 11]
    # Run code coverage on the base image thus also performing unit tests
    run_unit_tests_coverage:
    stage: test
    script:
    - tox -e coverage
    coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/'
    artifacts:
    reports:
    coverage_report:
    coverage_format: cobertura
    path: coverage.xml
    paths:
    - htmlcov/*
    package_files:
    stage: package
    artifacts:
    expire_in: 1w
    paths:
    - dist/*
    script:
    - tox -e build
    package_docs:
    stage: package
    artifacts:
    expire_in: 1w
    paths:
    - docs/build/*
    script:
    - tox -e docs
    docker_build:
    stage: images
    image: docker:latest
    needs:
    - package_files
    tags:
    - dind
    before_script: []
    script:
    - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
    - docker build -f docker/pycommon/Dockerfile . --build-arg BUILD_ENV=copy --tag $CI_REGISTRY_IMAGE/pycommon:$CI_COMMIT_REF_SLUG
    # enable this push line once you have configured docker registry cleanup policy
    # - docker push $CI_REGISTRY_IMAGE/pycommon:$CI_COMMIT_REF_SLUG
    run_integration_tests:
    stage: integration
    allow_failure: true
    needs:
    - package_files
    script:
    - echo "make sure to move out of source dir"
    - echo "install package from filesystem (or use the artefact)"
    - echo "run against foreign systems (e.g. databases, cwl etc.)"
    - exit 1
    publish_on_gitlab:
    stage: publish
    environment: gitlab
    needs:
    - package_files
    when: manual
    rules:
    - if: $CI_COMMIT_TAG
    script:
    - echo "run twine for gitlab"
    - |
    TWINE_PASSWORD=${CI_JOB_TOKEN} \
    TWINE_USERNAME=gitlab-ci-token \
    python -m twine upload \
    --repository-url ${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/pypi dist/*
    publish_on_test_pypi:
    stage: publish
    environment: pypi-test
    needs:
    - package_files
    when: manual
    rules:
    - if: '$CI_COMMIT_TAG && $CI_COMMIT_REF_PROTECTED == "true"'
    script:
    - echo "run twine for test pypi"
    # - |
    # TWINE_PASSWORD=${PIPY_TOKEN} \
    # TWINE_USERNAME=${PIPY_USERNAME} \
    # TODO: replace URL with a pipy URL
    # python -m twine upload \
    # --repository-url ${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/pypi dist/*
    - exit 1
    publish_on_pypi:
    stage: publish
    environment: pypi
    needs:
    - package_files
    when: manual
    rules:
    - if: '$CI_COMMIT_TAG && $CI_COMMIT_REF_PROTECTED == "true"'
    script:
    - echo "run twine for pypi"
    # - |
    # TWINE_PASSWORD=${PIPY_TOKEN} \
    # TWINE_USERNAME=${PIPY_USERNAME} \
    # TODO: replace URL with a pipy URL
    # python -m twine upload \
    # --repository-url ${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/pypi dist/*
    - exit 1
    publish_to_readthedocs:
    stage: publish
    allow_failure: true
    environment: readthedocs
    needs:
    - package_docs
    when: manual
    rules:
    - if: '$CI_COMMIT_TAG && $CI_COMMIT_REF_PROTECTED == "true"'
    script:
    - echo "scp docs/* ???"
    - exit 1
    release_job:
    stage: publish
    image: registry.gitlab.com/gitlab-org/release-cli:latest
    rules:
    - if: '$CI_COMMIT_TAG && $CI_COMMIT_REF_PROTECTED == "true"'
    script:
    - echo "running release_job"
    release:
    tag_name: '$CI_COMMIT_TAG'
    description: '$CI_COMMIT_TAG - $CI_COMMIT_TAG_MESSAGE'
    stages:
    - build
    build_ci_runner_image:
    stage: build
    image: docker
    tags:
    - dind
    script:
    - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
    - |
    if docker pull $CI_REGISTRY_IMAGE/ci-build-runner:$CI_COMMIT_REF_SLUG; then
    docker build --cache-from $CI_REGISTRY_IMAGE/ci-build-runner:$CI_COMMIT_REF_SLUG --tag $CI_REGISTRY_IMAGE/ci-build-runner:$CI_COMMIT_REF_SLUG docker/ci-runner
    else
    docker pull $CI_REGISTRY_IMAGE/ci-build-runner:latest || true
    docker build --cache-from $CI_REGISTRY_IMAGE/ci-build-runner:latest --tag $CI_REGISTRY_IMAGE/ci-build-runner:$CI_COMMIT_REF_SLUG docker/ci-runner
    fi
    - docker push $CI_REGISTRY_IMAGE/ci-build-runner:$CI_COMMIT_REF_SLUG # push the image
    - |
    if [[ "$CI_COMMIT_BRANCH" == "$CI_DEFAULT_BRANCH" ]]; then
    docker image tag $CI_REGISTRY_IMAGE/ci-build-runner:$CI_COMMIT_REF_SLUG $CI_REGISTRY_IMAGE/ci-build-runner:latest
    docker push $CI_REGISTRY_IMAGE/ci-build-runner:latest
    fi
    This diff is collapsed.
    include LICENSE
    include README.md
    recursive-include docs *
    recursive-exclude tests *
    # PyCommon
    ![Build status](git.astron.nl/tmss/libraries/pycommon/badges/main/pipeline.svg)
    ![Test coverage](git.astron.nl/tmss/libraries/pycommon/badges/main/coverage.svg)
    <!-- ![Latest release](https://git.astron.nl/templates/python-package/badges/main/release.svg) -->
    An example repository of an CI/CD pipeline for building, testing and publishing a python package.
    ## Installation
    ```
    pip install .
    ```
    ## Setup
    One time template setup should include configuring the docker registry to regularly cleanup old images of
    the CI/CD pipelines. And you can consider creating protected version tags for software releases:
    1. [Cleanup Docker Registry Images](https://git.astron.nl/groups/templates/-/wikis/Cleanup-Docker-Registry-Images)
    2. [Setup Protected Verson Tags](https://git.astron.nl/groups/templates/-/wikis/Setting-up-Protected-Version-Tags)
    Once the cleanup policy for docker registry is setup you can uncomment the `docker push` comment in the `.gitlab-ci.yml`
    file from the `docker_build` job. This will allow to download minimal docker images with your Python package installed.
    ## Usage
    ```python
    from pycommon import cool_module
    cool_module.greeter() # prints "Hello World"
    ```
    ## Contributing
    To contribute, please create a feature branch and a "Draft" merge request.
    Upon completion, the merge request should be marked as ready and a reviewer
    should be assigned.
    Verify your changes locally and be sure to add tests. Verifying local
    changes is done through `tox`.
    ```pip install tox```
    With tox the same jobs as run on the CI/CD pipeline can be ran. These
    include unit tests and linting.
    ```tox```
    To automatically apply most suggested linting changes execute:
    ```tox -e format```
    ## License
    This project is licensed under the Apache License Version 2.0
    FROM python:3.12
    RUN python -m pip install --upgrade pip
    RUN python -m pip install --upgrade tox twine
    ARG BUILD_ENV=no_copy
    FROM python:3.11 AS build_no_copy
    ADD ../../requirements.txt .
    COPY ../.. /work
    RUN rm -r /work/dist | true
    RUN python -m pip install --user tox
    WORKDIR /work
    RUN python -m tox -e build
    FROM python:3.11 AS build_copy
    COPY dist /work/dist
    FROM build_${BUILD_ENV} AS build
    FROM python:3.11-slim
    COPY --from=build /work/dist /dist
    RUN python -m pip install /dist/*.whl
    #!/usr/bin/env python3
    # Copyright (C) 2023 ASTRON (Netherlands Institute for Radio Astronomy)
    # SPDX-License-Identifier: Apache-2.0
    import os
    file_dir = os.path.dirname(os.path.realpath(__file__))
    clean_dir = os.path.join(file_dir, "source", "source_documentation")
    print(f"Cleaning.. {clean_dir}/*")
    if not os.path.exists(clean_dir):
    exit()
    for file_name in os.listdir(clean_dir):
    file = os.path.join(clean_dir, file_name)
    if file_name == "index.rst":
    continue
    print(f"Removing.. {file}")
    os.remove(file)
    sphinx!=1.6.6,!=1.6.7,>=1.6.5 # BSD
    sphinx-rtd-theme>=0.4.3 #MIT
    sphinxcontrib-apidoc>=0.3.0 #BSD
    myst-parser>=2.0 # MIT
    docutils>=0.17 # BSD
    # Copyright (C) 2023 ASTRON (Netherlands Institute for Radio Astronomy)
    # SPDX-License-Identifier: Apache-2.0
    import os
    from pycommon import __version__
    # -- General configuration ----------------------------------------------------
    # Add any Sphinx extension module names here, as strings. They can be
    # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
    extensions = [
    "sphinx.ext.autodoc",
    "sphinx.ext.viewcode",
    "sphinxcontrib.apidoc",
    "sphinx_rtd_theme",
    "myst_parser"
    ]
    # Assumes tox is used to call sphinx-build
    project_root_directory = os.getcwd()
    apidoc_module_dir = "../../pycommon"
    apidoc_output_dir = "source_documentation"
    apidoc_excluded_paths = []
    apidoc_separate_modules = True
    apidoc_toc_file = False
    # This should include private methods but does not work
    # https://github.com/sphinx-contrib/apidoc/issues/14
    apidoc_extra_args = ["--private"]
    # The suffix of source filenames.
    source_suffix = [".rst"]
    # The master toctree document.
    master_doc = "index"
    # General information about the project.
    project = "PyCommon"
    copyright = "2023, ASTRON"
    # openstackdocstheme options
    repository_name = "git.astron.nl/tmss/libraries/pycommon"
    bug_project = "none"
    bug_tag = ""
    html_last_updated_fmt = "%Y-%m-%d %H:%M"
    # If true, '()' will be appended to :func: etc. cross-reference text.
    add_function_parentheses = True
    version = __version__
    modindex_common_prefix = ["pycommon."]
    # If true, the current module name will be prepended to all description
    # unit titles (such as .. function::).
    add_module_names = True
    # The name of the Pygments (syntax highlighting) style to use.
    pygments_style = "sphinx"
    # -- Options for HTML output --------------------------------------------------
    # The theme to use for HTML and HTML Help pages. Major themes that come with
    # Sphinx are currently 'default' and 'sphinxdoc'.
    # html_theme_path = ["."]
    html_theme = "sphinx_rtd_theme"
    html_static_path = ["static"]
    html_css_files = [
    "css/custom.css",
    ]
    # Output file base name for HTML help builder.
    htmlhelp_basename = "%sdoc" % project
    # Conf.py variables exported to sphinx rst files access using |NAME|
    variables_to_export = [
    "project",
    "copyright",
    "version",
    ]
    # Write to rst_epilog to export `variables_to_export` extract using `locals()`
    # https://www.sphinx-doc.org/en/master/usage/configuration.html#confval-rst_epilog
    frozen_locals = dict(locals())
    rst_epilog = "\n".join(
    map(
    lambda x: f".. |{x}| replace:: {frozen_locals[x]}", # noqa: F821
    variables_to_export,
    )
    )
    # Pep is not able to determine that frozen_locals always exists so noqa
    del frozen_locals
    ====================================================
    Welcome to the documentation of PyCommon
    ====================================================
    ..
    To define more variables see rst_epilog generation in conf.py
    Documentation for version: |version|
    Contents:
    .. toctree::
    :maxdepth: 2
    readme
    source_documentation/index
    .. include:: ../../README.md
    :parser: myst_parser.sphinx_
    .orange { color: #c65d09; }
    .green { color: #5dc609; }
    .yellow { color: #c6c609; }
    .bolditalic {
    font-weight: bold;
    font-style: italic;
    }
    .rst-content code, .rst-content tt, code {
    white-space: break-spaces;
    }
    # $Id: CMakeLists.txt 720 2014-12-08 16:29:33Z loose $
    lofar_package(PyCommon 1.0)
    lofar_find_package(Python 3.4 REQUIRED)
    include(PythonInstall)
    include(FindPythonModule)
    find_python_module(jsonschema)
    find_python_module(psycopg2)
    find_python_module(cx_Oracle)
    find_python_module(lxml)
    find_python_module(prometheus_client)
    set(_py_files
    __init__.py
    ssh_utils.py
    cep4_utils.py
    cobaltblocksize.py
    threading_utils.py
    lcu_utils.py
    cache.py
    dbcredentials.py
    defaultmailaddresses.py
    factory.py
    math.py
    methodtrigger.py
    metrics.py
    util.py
    database.py
    oracle.py
    postgres.py
    datetimeutils.py
    flask_utils.py
    h5_utils.py
    subprocess_utils.py
    xmlparse.py
    json_utils.py
    locking.py
    test_utils.py
    typing.py
    toposort.py
    ring_coordinates.py
    station_coordinates.py
    parameterset.py)
    python_install(${_py_files} DESTINATION lofar/common)
    lofar_add_bin_scripts(dbcredentials)
    add_subdirectory(test)
    # __init__.py: Module initialization file.
    #
    # Copyright (C) 2015
    # ASTRON (Netherlands Institute for Radio Astronomy)
    # P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
    #
    # This file is part of the LOFAR software suite.
    # The LOFAR software suite is free software: you can redistribute it
    # and/or modify it under the terms of the GNU General Public License as
    # published by the Free Software Foundation, either version 3 of the
    # License, or (at your option) any later version.
    #
    # The LOFAR software suite is distributed in the hope that it will be
    # useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
    # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
    # GNU General Public License for more details.
    #
    # You should have received a copy of the GNU General Public License along
    # with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
    #
    # $Id: __init__.py 1568 2015-09-18 15:21:11Z loose $
    """
    Module initialization file.
    """
    import os
    def isProductionEnvironment():
    '''check if the program is running in a lofar producution environment'''
    return os.environ.get('LOFARENV', '') == 'PRODUCTION'
    def isTestEnvironment():
    '''check if the program is running in a lofar test environment'''
    return os.environ.get('LOFARENV', '') == 'TEST'
    def isDevelopmentEnvironment():
    '''check if the program is running in a lofar development (not production or test) environment'''
    return not (isProductionEnvironment() or isTestEnvironment())
    # cache.py: function return value cache
    #
    # copyright (c) 2015
    # astron (netherlands institute for radio astronomy)
    # p.o.box 2, 7990 aa dwingeloo, the netherlands
    #
    # this file is part of the lofar software suite.
    # the lofar software suite is free software: you can redistribute it
    # and/or modify it under the terms of the gnu general public license as
    # published by the free software foundation, either version 3 of the
    # license, or (at your option) any later version.
    #
    # the lofar software suite is distributed in the hope that it will be
    # useful, but without any warranty; without even the implied warranty of
    # merchantability or fitness for a particular purpose. see the
    # gnu general public license for more details.
    #
    # you should have received a copy of the gnu general public license along
    # with the lofar software suite. if not, see <http://www.gnu.org/licenses/>.
    #
    # $id: __init__.py 1568 2015-09-18 15:21:11z loose $
    import functools
    class cache(object):
    """ A simple cache for function call return values in Python 2.
    Use:
    @cache
    def foo(x):
    return x
    Causes foo() to be evaluated only for new values of x. """
    # If the needs for this class ever expands significantly, we should consider
    # switching to python 3, which provides a more comprehensive functools.lru_cache.
    def __init__(self, func):
    self.func = func
    self.cache = {}
    def __call__(self, *args, **kwargs):
    key = "%s %s" % (args, kwargs) # we can't hash on (args,kwargs) directly
    if key not in self.cache:
    self.cache[key] = self.func(*args, **kwargs)
    return self.cache[key]
    def __get__(self, obj, objtype):
    """ Support instance methods. """
    return functools.partial(self.__call__, obj)
    This diff is collapsed.
    """
    Code to derive the following parset input parameters for Cobalt. These keys need to be tuned
    specifically to make sure all Cobalt processing fits inside a block. Only two processing
    kernels can cross block boundaries: the FIR Filter taps, and the integration of multiple
    blocks of Correlator output.
    Once the following parameters are set, the integration time of the correlator can change
    slightly from what was requested. This in turn forces us to derive these keys during resource
    estimation.
    Cobalt.blockSize
    The number of samples in each unit of work. Needs to be a multiple of the working size
    of each individual step, for example, an 64-channel FFT requires blockSize to be a multiple
    of 64.
    Cobalt.Correlator.nrBlocksPerIntegration
    The number of correlator integration periods that fit in one block.
    Cobalt.Correlator.nrIntegrationsPerBlock
    The number of blocks that together form one integration period.
    Note that either nrBlocksPerIntegration or nrIntegrationsPerBlock has to be equal to 1.
    """
    from math import ceil
    from lofar.common.math import lcm
    class CorrelatorSettings(object):
    """ Settings for the Correlator. """
    def __init__(self):
    self.nrChannelsPerSubband = 64
    self.integrationTime = 1.0
    class StokesSettings(object):
    """ Settings for the Beamformer. """
    def __init__(self):
    self.nrChannelsPerSubband = 1
    self.timeIntegrationFactor = 1
    class BlockConstraints(object):
    """ Provide the constraints for the block size, as derived
    from the correlator and beamformer settings. """
    def __init__(self, correlatorSettings=None, coherentStokesSettings=[], incoherentStokesSettings=[], clockMHz=200):
    self.correlator = correlatorSettings
    self.coherentStokes = coherentStokesSettings
    self.incoherentStokes = incoherentStokesSettings
    self.clockMHz = clockMHz
    def minBlockSize(self):
    """ Block size below which the overhead per block becomes unwieldy. """
    # 0.6s is an estimate.
    return int(round(self._time2samples(0.6)))
    def maxBlockSize(self):
    """ Block size above which the data does not fit on the GPU. """
    # 1.3s is an estimate.
    return int(round(self._time2samples(1.3)))
    def nrSubblocks(self):
    if self.correlator:
    integrationSamples = self._time2samples(self.correlator.integrationTime)
    if integrationSamples < self.minBlockSize():
    def average(x, y):
    return (x + y) / 2.0
    return max(1, int(round(average(self.maxBlockSize(), self.minBlockSize()) / integrationSamples)))
    return 1
    def idealBlockSize(self):
    integrationTime = self.correlator.integrationTime if self.correlator else 1.0
    return self.nrSubblocks() * self._time2samples(integrationTime)
    def factor(self):
    """
    Determine common factors needed for the block Size.
    The Cobalt GPU kernels require the Cobalt.blockSize to be a multiple
    of several values in order to:
    1) divide the work evenly over threads and blocks.
    2) prevent integration of samples from crossing blockSize boundaries.
    """
    factor = 1
    NR_PPF_TAPS = 16
    CORRELATOR_BLOCKSIZE = 16
    BEAMFORMER_NR_DELAYCOMPENSATION_CHANNELS = 256
    BEAMFORMER_DELAYCOMPENSATION_BLOCKSIZE = 16
    # Process correlator settings
    if self.correlator:
    # FIR_Filter.cu
    factor = lcm(factor, NR_PPF_TAPS * self.correlator.nrChannelsPerSubband)
    # Correlator.cu (minimum of 16 samples per channel)
    factor = lcm(factor, CORRELATOR_BLOCKSIZE * self.correlator.nrChannelsPerSubband * self.nrSubblocks())
    for coherentStokes in self.coherentStokes:
    # DelayAndBandPass.cu
    factor = lcm(factor, BEAMFORMER_DELAYCOMPENSATION_BLOCKSIZE * BEAMFORMER_NR_DELAYCOMPENSATION_CHANNELS)
    # FIR_Filter.cu
    factor = lcm(factor, NR_PPF_TAPS * coherentStokes.nrChannelsPerSubband)
    # CoherentStokesKernel.cc searches for the best fit, supporting a wide range of configurations by
    # splitting up the work load into passes. There could be a performance impact for some ill-chosen
    # values, but that is not something this code is tasked with.
    pass
    # CoherentStokes.cu (produce at least one output sample/channel)
    factor = lcm(factor, coherentStokes.timeIntegrationFactor * coherentStokes.nrChannelsPerSubband)
    for incoherentStokes in self.incoherentStokes:
    # DelayAndBandPass.cu
    factor = lcm(factor, BEAMFORMER_DELAYCOMPENSATION_BLOCKSIZE * BEAMFORMER_NR_DELAYCOMPENSATION_CHANNELS)
    # FIR_Filter.cu
    factor = lcm(factor, NR_PPF_TAPS * incoherentStokes.nrChannelsPerSubband)
    # IncoherentStokes.cu (produce at least one output sample/channel)
    factor = lcm(factor, incoherentStokes.timeIntegrationFactor * incoherentStokes.nrChannelsPerSubband)
    return factor
    def __samples_per_second(self):
    MHZ_PER_HZ = 1e6
    STATION_FFT_LENGTH = 1024
    return self.clockMHz * MHZ_PER_HZ / STATION_FFT_LENGTH
    def _time2samples(self, t):
    """ Convert a time `t' (seconds) into a number of station samples. """
    return int(round(t * self.__samples_per_second()))
    def _samples2time(self, samples):
    """ Return the duration of a number of station samples. """
    return samples / self.__samples_per_second()
    class BlockSize(object):
    """ Derive Cobalt specifications given BlockConstraints. Output:
    BlockSize member | Cobalt parset key
    ---------------------------------------
    blockSize | Cobalt.blockSize
    nrSubblocks | Cobalt.Correlator.nrIntegrationsPerBlock
    nrBlocks | Cobalt.Correlator.nrBlocksPerIntegration
    integrationTime | Cobalt.Correlator.integrationTime
    """
    def __init__(self, constraints):
    self.constraints = constraints
    self.nrSubblocks = constraints.nrSubblocks()
    self.blockSize = self._blockSize(constraints.idealBlockSize(), constraints.factor())
    self.nrBlocks = self._nrBlocks(constraints.idealBlockSize(), self.blockSize)
    if self.nrSubblocks > 1:
    self.integrationSamples = self.blockSize / self.nrSubblocks
    else:
    self.integrationSamples = self.blockSize * self.nrBlocks
    self.integrationTime = constraints._samples2time(self.integrationSamples)
    def _nrBlocks(self, integrationSamples, blockSize):
    return max(1, int(round(integrationSamples / blockSize)))
    def _blockSize(self, integrationSamples, factor):
    bestBlockSize = None
    bestNrBlocks = None
    bestError = None
    # Create a comfortable range to search in for possible fits.
    maxFactorPerBlock = int(ceil(integrationSamples / factor)) * 2
    for factorsPerBlock in range(1, maxFactorPerBlock):
    blockSize = factorsPerBlock * factor;
    # Discard invalid block sizes
    if blockSize < self.constraints.minBlockSize():
    continue
    if blockSize > self.constraints.maxBlockSize():
    continue
    # Calculate the number of blocks we'd use
    nrBlocks = self._nrBlocks(integrationSamples, blockSize)
    # Calculate error for this solution
    diff = lambda a,b: max(a,b) - min(a,b)
    error = diff(integrationSamples, nrBlocks * blockSize)
    # Accept this candidate if best so far. Prefer
    # fewer blocks if candidates are (nearly) equal in their error.
    if not bestBlockSize \
    or error < bestError \
    or (error < 0.01 * integrationSamples and nrBlocks < bestNrBlocks):
    bestBlockSize = blockSize
    bestNrBlocks = nrBlocks
    bestError = error
    return int(round(bestBlockSize))
    if __name__ == "__main__":
    import argparse
    import sys
    parser = argparse.ArgumentParser(description='Compute COBALT block sizes, based on the given constraints.')
    parser.add_argument('-c', '--correlate', dest='correlate', action='store_true', default=False, help='enable the correlator')
    parser.add_argument('--correlator_nrchannels', type=int, default=64, help='Correlator.nrChannelsPerSubband (default: %(default)s)')
    parser.add_argument('--correlator_integration_time', type=float, default=1.0, help='Correlator.integrationTime (default: %(default)s)')
    parser.add_argument('--coherent', dest='coherent', action='store_true', default=False, help='enable coherent beamforming')
    parser.add_argument('--incoherent', dest='incoherent', action='store_true', default=False, help='enable incoherent beamforming')
    parser.add_argument('--coherent_nrchannels', type=int, default=16, help='CoherentStokes.nrChannelsPerSubband (default: %(default)s)')
    parser.add_argument('--coherent_time_integration_factor', type=int, default=1, help='CoherentStokes.timeIntegrationFactor (default: %(default)s)')
    parser.add_argument('--incoherent_nrchannels', type=int, default=16, help='IncoherentStokes.nrChannelsPerSubband (default: %(default)s)')
    parser.add_argument('--incoherent_time_integration_factor', type=int, default=1, help='IncoherentStokes.timeIntegrationFactor (default: %(default)s)')
    args = parser.parse_args()
    if not args.correlate and not args.coherent and not args.incoherent:
    parser.print_help()
    sys.exit(1)
    corr = CorrelatorSettings()
    corr.nrChannelsPerSubband = args.correlator_nrchannels
    corr.integrationTime = args.correlator_integration_time
    coh = StokesSettings()
    coh.nrChannelsPerSubband = args.coherent_nrchannels
    coh.timeIntegrationFactor = args.coherent_time_integration_factor
    incoh = StokesSettings()
    incoh.nrChannelsPerSubband = args.incoherent_nrchannels
    incoh.timeIntegrationFactor = args.incoherent_time_integration_factor
    constraints = BlockConstraints(correlatorSettings=corr if args.correlate else None,
    coherentStokesSettings=[coh] if args.coherent else [],
    incoherentStokesSettings=[incoh] if args.incoherent else [])
    # will throw if the blocksize cannot be computed
    blocksize = BlockSize(constraints)
    # print optimal block size
    print(blocksize.blockSize)
    0% Loading or .
    You are about to add 0 people to the discussion. Proceed with caution.
    Please register or to comment