Skip to content
Snippets Groups Projects
Commit 17dff92b authored by Mattia Mancini's avatar Mattia Mancini
Browse files

Merge branch 'add_ci/cd' into 'main'

Add ci/cd

See merge request !1
parents 25cdddc6 eeb9e2df
No related branches found
No related tags found
1 merge request!1Add ci/cd
Pipeline #33318 passed
tests/*.h5 **/**/*.h5
variables:
TAG: ":latest"
workflow:
rules:
- if: $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH
variables:
TAG: ":latest" # Override globally-defined DEPLOY_VARIABLE
- if: $CI_COMMIT_REF_NAME != $CI_DEFAULT_BRANCH
variables:
TAG: ":$CI_COMMIT_REF_SLUG"
docker-build:
# Use the official docker image.
image: docker:latest
stage: build
services:
- docker:dind
before_script:
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
# Default branch leaves tag empty (= latest tag)
# All other branches are tagged with the escaped branch name (commit ref slug)
script:
- docker build --pull -t "$CI_REGISTRY_IMAGE${TAG}" .
- docker push "$CI_REGISTRY_IMAGE${TAG}"
# Run this job in a branch where a Dockerfile exists
rules:
- if: $CI_COMMIT_BRANCH
exists:
- Dockerfile
test_scintillation_script:
stage: test
image: $CI_REGISTRY_IMAGE$TAG
before_script:
- mv $WGET_CREDENTIALS_FILE ~/.wgetrc
- chmod 600 ~/.wgetrc
script:
- scintillation_utils.py --help
- bash tests/download_test_data.sh
- python3 setup.py test
- scintillation_utils.py tests/data/Dynspec*.h5 preview --export_only_station CS001LBA
artifacts:
paths:
- preview/
\ No newline at end of file
...@@ -6,7 +6,8 @@ RUN apt update && \ ...@@ -6,7 +6,8 @@ RUN apt update && \
python3-astropy \ python3-astropy \
python3-numpy \ python3-numpy \
python3-h5py \ python3-h5py \
python3-setuptools python3-setuptools \
wget
COPY . /src COPY . /src
......
...@@ -14,7 +14,7 @@ def parse_args(): ...@@ -14,7 +14,7 @@ def parse_args():
parser.add_argument('--samples_size', help='Samples size in seconds', default=3600) parser.add_argument('--samples_size', help='Samples size in seconds', default=3600)
parser.add_argument('--averaging_window', help='Averaging window in seconds', default=1) parser.add_argument('--averaging_window', help='Averaging window in seconds', default=1)
parser.add_argument('--export_only_station', help='Selects only one station to be exported', default=None)
return parser.parse_args() return parser.parse_args()
...@@ -24,7 +24,8 @@ def main(): ...@@ -24,7 +24,8 @@ def main():
metadata = averaging.extract_metadata(dataset) metadata = averaging.extract_metadata(dataset)
os.makedirs(args.output_directory, exist_ok=True) os.makedirs(args.output_directory, exist_ok=True)
for dynspec in metadata: for dynspec in metadata:
if args.export_only_station and args.export_only_station not in metadata[dynspec]['BEAM_STATIONS_LIST']:
continue
averaging.split_samples(dynspec, averaging.split_samples(dynspec,
metadata[dynspec], metadata[dynspec],
dataset, args.samples_size, args.averaging_window, args.output_directory) dataset, args.samples_size, args.averaging_window, args.output_directory)
......
import h5py import h5py
from argparse import ArgumentParser from argparse import ArgumentParser
import os import os
from typing import Dict, Optional, Iterable, Any from typing import Dict, Optional, Iterable, Any, Union, ByteString, AnyStr
from datetime import datetime, timedelta from datetime import datetime, timedelta
import json import json
import numpy import numpy
...@@ -14,6 +14,14 @@ import matplotlib.dates as mdates ...@@ -14,6 +14,14 @@ import matplotlib.dates as mdates
logging.basicConfig(format='%(asctime)s [%(levelname)s] %(message)s', level=logging.INFO) logging.basicConfig(format='%(asctime)s [%(levelname)s] %(message)s', level=logging.INFO)
def decode_str(str_or_byteslike: Union[ByteString, AnyStr]):
try:
return str_or_byteslike.decode()
except (UnicodeDecodeError, AttributeError):
return str_or_byteslike
_ROOT_SELECTED_FIELDS = ( _ROOT_SELECTED_FIELDS = (
"ANTENNA_SET", "ANTENNA_SET",
"CHANNELS_PER_SUBANDS", "CHANNELS_PER_SUBANDS",
...@@ -91,8 +99,9 @@ class SmartJsonEncoder(json.JSONEncoder): ...@@ -91,8 +99,9 @@ class SmartJsonEncoder(json.JSONEncoder):
elif isinstance(o, datetime): elif isinstance(o, datetime):
return o.isoformat() return o.isoformat()
else: else:
return super().default(int(o)) return super().default(o)
except TypeError: except TypeError:
print(o)
raise Exception('Cannot convert ' + str(type(o))) raise Exception('Cannot convert ' + str(type(o)))
...@@ -107,7 +116,7 @@ def parse_args(): ...@@ -107,7 +116,7 @@ def parse_args():
def open_dataset(path): def open_dataset(path):
if not os.path.exists(path): if not os.path.exists(path):
raise FileNotFoundError(f'Cannot find file at {path}') raise FileNotFoundError(f'Cannot find file at {path}')
return h5py.File(path) return h5py.File(path, mode='r')
def copy_attrs_to_dict(h5_leaf, dict_container: Optional[Dict] = None, def copy_attrs_to_dict(h5_leaf, dict_container: Optional[Dict] = None,
...@@ -117,25 +126,18 @@ def copy_attrs_to_dict(h5_leaf, dict_container: Optional[Dict] = None, ...@@ -117,25 +126,18 @@ def copy_attrs_to_dict(h5_leaf, dict_container: Optional[Dict] = None,
if dict_container is None: if dict_container is None:
dict_container = {} dict_container = {}
if include_fields is not None:
for key in include_fields:
try:
value = h5_leaf.attrs[key]
if isinstance(value, datetime):
dict_container[key] = value.isoformat()
else:
dict_container[key] = value
except KeyError:
logging.error('missing field %s', key)
else:
for key, value in h5_leaf.attrs.items(): for key, value in h5_leaf.attrs.items():
if include_fields is not None and key not in include_fields:
continue
if exclude_fields_set and key in exclude_fields_set: if exclude_fields_set and key in exclude_fields_set:
continue continue
if isinstance(value, datetime): if isinstance(value, datetime):
dict_container[key] = value.isoformat() dict_container[key] = value.isoformat()
elif isinstance(value, list) or isinstance(value, tuple) or isinstance(value, numpy.ndarray):
dict_container[key] = list(map(decode_str, value))
else: else:
dict_container[key] = value dict_container[key] = decode_str(value)
return dict_container return dict_container
...@@ -337,16 +339,17 @@ def split_samples(dynspec_name, ...@@ -337,16 +339,17 @@ def split_samples(dynspec_name,
:return: :return:
""" """
time_delta, *_ = dataset[dynspec_name]['COORDINATES']['TIME'].attrs['INCREMENT'] time_delta, *_ = decode_str(dataset[dynspec_name]['COORDINATES']['TIME'].attrs['INCREMENT'])
obs_start_time = parse_datetime_str(dataset[dynspec_name].attrs['DYNSPEC_START_UTC']) obs_start_time = parse_datetime_str(decode_str(dataset[dynspec_name].attrs['DYNSPEC_START_UTC']))
obs_end_time = parse_datetime_str(dataset[dynspec_name].attrs['DYNSPEC_STOP_UTC']) obs_end_time = parse_datetime_str(decode_str(dataset[dynspec_name].attrs['DYNSPEC_STOP_UTC']))
frequency = dataset[dynspec_name]['COORDINATES']['SPECTRAL'].attrs['AXIS_VALUE_WORLD'] frequency = dataset[dynspec_name]['COORDINATES']['SPECTRAL'].attrs['AXIS_VALUE_WORLD']
antenna_set = metadata['ANTENNA_SET'] antenna_set = metadata['ANTENNA_SET']
start_frequency, end_frequency = frequency[0] / 1.e6, frequency[-1] / 1.e6 start_frequency, end_frequency = frequency[0] / 1.e6, frequency[-1] / 1.e6
station_name, *_ = dataset[dynspec_name].attrs['BEAM_STATIONS_LIST'] station_name, *_ = metadata['BEAM_STATIONS_LIST']
station_name = decode_str(station_name)
averaging_window_in_samples = int(numpy.ceil(averaging_window / time_delta)) averaging_window_in_samples = int(numpy.ceil(averaging_window / time_delta))
averaging_window_in_seconds = averaging_window_in_samples * time_delta averaging_window_in_seconds = averaging_window_in_samples * time_delta
...@@ -362,11 +365,11 @@ def split_samples(dynspec_name, ...@@ -362,11 +365,11 @@ def split_samples(dynspec_name,
start_sample_datetime = round_down_datetime(start_obs_datetime + timedelta(seconds=sample_window * i), start_sample_datetime = round_down_datetime(start_obs_datetime + timedelta(seconds=sample_window * i),
averaging_window) averaging_window)
end_sample_datetime = round_up_datetime(start_obs_datetime + timedelta(seconds=sample_window * (i + 1)), end_sample_datetime = round_up_datetime(start_obs_datetime + timedelta(seconds=sample_window * (i + 1)),
averaging_window) averaging_window)
indexs = numpy.where(numpy.logical_and(time_obs > start_sample_datetime.timestamp(), indexs = numpy.where(numpy.logical_and(time_obs > start_sample_datetime.timestamp(),
time_obs <= end_sample_datetime.timestamp()))[0] time_obs <= end_sample_datetime.timestamp()))[0]
print(start_sample_datetime, end_sample_datetime, station_name, len(indexs), total_time_samples)
start_index, end_index = indexs[0], indexs[-1] start_index, end_index = indexs[0], indexs[-1]
fname = start_sample_datetime.strftime( fname = start_sample_datetime.strftime(
......
...@@ -15,4 +15,4 @@ cd ${TEST_DIR} ...@@ -15,4 +15,4 @@ cd ${TEST_DIR}
echo "current directory is " $PWD echo "current directory is " $PWD
# SET user and pass in ~/.wgetrc to be able to download the data # SET user and pass in ~/.wgetrc to be able to download the data
wget --recursive --no-parent -nH --reject="index.html*" --cut-dirs=4 https://sdc-dev.astron.nl/files/test_data/scintillation_dataset/ wget --recursive -q --no-parent -nH --reject="index.html*" --cut-dirs=4 https://sdc-dev.astron.nl/files/test_data/scintillation_dataset/
...@@ -4,20 +4,24 @@ import json ...@@ -4,20 +4,24 @@ import json
from glob import glob from glob import glob
from scintillation.averaging import open_dataset, extract_metadata, compute_start_end_azimuth_elevation from scintillation.averaging import open_dataset, extract_metadata, compute_start_end_azimuth_elevation
basepath = os.path.dirname(__file__) basepath = os.path.dirname(__file__)
test_datasets = glob(os.path.join(basepath, '*.h5')) test_datasets = glob(os.path.join(basepath, 'data', '*.h5'))
test_data_file_path = os.path.join(basepath, 'test_metadata.json') test_data_file_path = os.path.join(basepath, 'test_metadata.json')
def is_test_data_present():
return len(test_datasets) > 0
class TestMetadata(unittest.TestCase): class TestMetadata(unittest.TestCase):
@unittest.skipUnless(is_test_data_present(), 'test data is missing')
def test_azimuth_elevation_computation(self): def test_azimuth_elevation_computation(self):
dataset = open_dataset(test_datasets[0]) dataset = open_dataset(test_datasets[0])
metadata_per_dynspec = extract_metadata(dataset) metadata_per_dynspec = extract_metadata(dataset)
metadata, *_ = metadata_per_dynspec.values() metadata, *_ = metadata_per_dynspec.values()
compute_start_end_azimuth_elevation(metadata) compute_start_end_azimuth_elevation(metadata)
@unittest.skipUnless(is_test_data_present(), 'test data is missing')
unittest.skip('doing others')
def test_fields_name(self): def test_fields_name(self):
with open(test_data_file_path, 'r') as fin: with open(test_data_file_path, 'r') as fin:
expected_metadata_dict = json.load(fin) expected_metadata_dict = json.load(fin)
......
import unittest
import os
import tempfile
from glob import glob
from scintillation.averaging import open_dataset, extract_metadata, split_samples
basepath = os.path.dirname(__file__)
test_datasets = glob(os.path.join(basepath, 'data', '*.h5'))
def is_test_data_present():
return len(test_datasets) > 0
class TestMetadata(unittest.TestCase):
@unittest.skipUnless(is_test_data_present(), 'missing test data')
def test_preview_generation(self):
with tempfile.TemporaryDirectory() as output_dir:
dataset = open_dataset(test_datasets[0])
metadata = extract_metadata(dataset)
dynspec, *_ = metadata.keys()
split_samples(dynspec, metadata[dynspec], dataset, 3600, 10, output_dir)
out_fits_files = glob(os.path.join(output_dir, '*.fits'))
out_png_files = glob(os.path.join(output_dir, '*.png'))
out_json_files = glob(os.path.join(output_dir, '*.json'))
self.assertTrue(len(out_fits_files) > 0, msg='Fits files not generated')
self.assertTrue(len(out_png_files) > 0, msg='PNG files not generated')
self.assertTrue(len(out_json_files) > 0, msg='Json files not generated')
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment