...
 
Commits (83)
......@@ -20,3 +20,5 @@ SAS/OTB/jRSP/autoconf_share
SAS/OTB/jRSP/bootstrap
SAS/OTB/jRSP/configure.in
**/.idea
SAS/TMSS/frontend/tmss_webapp/package-lock.json
SAS/TMSS/frontend/tmss_webapp/node_modules/
......@@ -219,7 +219,7 @@ dockerize_TMSS:
- docker push nexus.cep4.control.lofar:18080/tmss_django:$CI_COMMIT_SHORT_SHA
- docker tag tmss_testprovider:$CI_COMMIT_SHORT_SHA nexus.cep4.control.lofar:18080/tmss_testprovider:$CI_COMMIT_SHORT_SHA
- docker push nexus.cep4.control.lofar:18080/tmss_testprovider:$CI_COMMIT_SHORT_SHA
- docker logout $CI_NEXUS_REGISTRY
- docker logout $CI_NEXUS_REGISTRY
dependencies:
- build_TMSS
- unit_test_TMSS
......@@ -311,7 +311,7 @@ deploy-tmss-test:
- chmod 700 ~/.ssh
- ssh-keyscan scu199.control.lofar >> ~/.ssh/known_hosts
- chmod 644 ~/.ssh/known_hosts
script:
script:
- cd SAS/TMSS
- ssh lofarsys@scu199.control.lofar "docker-compose -f docker-compose-scu199.yml down"
- scp docker-compose-scu199.yml lofarsys@scu199.control.lofar:~/
......@@ -349,6 +349,19 @@ deploy-tmss-ua:
only:
- "master"
deploy-tmss-dockerhub:
stage: deploy
script:
- docker login -u $CI_DOCKERHUB_USERNAME -p $CI_DOCKERHUB_PASSWORD
- docker tag tmss_django:$CI_COMMIT_SHORT_SHA lofar/tmss:master-$CI_COMMIT_SHORT_SHA
- docker push lofar/tmss:master-$CI_COMMIT_SHORT_SHA
- docker logout
dependencies:
- dockerize_TMSS
when: manual
only:
- "master"
deploy-MCU_MAC-test:
stage: deploy
before_script:
......
find_program(SPHINX_APIDOC_EXECUTABLE
NAMES sphinx-apidoc sphinx-apidoc-3 sphinx-apidoc-3.6
DOC "Path to sphinx-apidoc executable")
find_program(SPHINX_BUILD_EXECUTABLE
NAMES sphinx-build sphinx-build-3 sphinx-build-3.6
DOC "Path to sphinx-build executable")
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Sphinx
"Failed to locate sphinx-apidoc executable"
SPHINX_APIDOC_EXECUTABLE)
find_package_handle_standard_args(Sphinx
"Failed to locate sphinx-build executable"
SPHINX_BUILD_EXECUTABLE)
# - Generate script to configure and run the code documentation tool Sphinx.
# $Id$
#
# Copyright (C) 2010
# ASTRON (Netherlands Institute for Radio Astronomy)
# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
#
# This file is part of the LOFAR software suite.
# The LOFAR software suite is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# The LOFAR software suite is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
include(LofarMacros)
include(FindPythonModule)
# Only process this file when we're ate the top-level source directory.
if("${CMAKE_SOURCE_DIR}" STREQUAL "${CMAKE_CURRENT_SOURCE_DIR}")
# Locate the sphinx programs.
find_package(Sphinx)
# Set Sphinx input directory, containing the *.rst files that are used to create the HTML documenation.
# These files are first created using the sphinx-apidoc utility on the python module created by make.
if(NOT SPHINX_SOURCE_DIR)
set(SPHINX_SOURCE_DIR "${CMAKE_BINARY_DIR}/doc/sphinx/source" CACHE PATH
"Directory where Sphinx will read the source rst files from to generate documentation")
endif(NOT SPHINX_SOURCE_DIR)
file(MAKE_DIRECTORY "${SPHINX_SOURCE_DIR}")
# Generate the CMake script that will be invoked by 'make doc'.
configure_file(
"${CMAKE_SOURCE_DIR}/CMake/docscripts/MakeSphinxDoc.cmake.in"
"${CMAKE_BINARY_DIR}/MakeSphinxDoc.cmake" @ONLY)
# Define custom target 'doc'.
add_custom_target(doc
COMMAND "${CMAKE_COMMAND}"
-D SPHINX_SOURCE_DIR="${SPHINX_SOURCE_DIR}"
-D PYTHON_BUILD_DIR="${PYTHON_BUILD_DIR}"
-D LOFARROOT="${CMAKE_SOURCE_DIR}"
-P "${CMAKE_BINARY_DIR}/MakeSphinxDoc.cmake"
COMMENT "Defining target 'doc' ...")
endif("${CMAKE_SOURCE_DIR}" STREQUAL "${CMAKE_CURRENT_SOURCE_DIR}")
# - Template for script MakeSphinxDoc.cmake used to generate code documentation.
#
# The MakeSphinxDoc.cmake script is used to generate source code documentation
# using Sphinx. The configured script will be invoked when the custom target
# 'doc' is (re)created, i.e. by doing a 'make doc'.
# $Id$
#
# Copyright (C) 2010
# ASTRON (Netherlands Foundation for Research in Astronomy)
# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands, softwaresupport@astron.nl
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
message("Running Sphinx Apidoc to produce *.rst files for current LOFAR build ...")
message("@SPHINX_APIDOC_EXECUTABLE@ -o ${SPHINX_SOURCE_DIR} ${PYTHON_BUILD_DIR}")
execute_process(
COMMAND "@SPHINX_APIDOC_EXECUTABLE@" -o "${SPHINX_SOURCE_DIR}" "${PYTHON_BUILD_DIR}"
RESULT_VARIABLE _result)
if(NOT _result EQUAL 0)
message(SEND_ERROR "Sphinx apidoc returned with error: ${_result}")
endif(NOT _result EQUAL 0)
message("Copying over Sphinx Makefile and config file and prepared *.rst files to build dir ...")
execute_process(
COMMAND cp "${LOFARROOT}/doc/sphinx/Makefile" "${CMAKE_BINARY_DIR}/doc/sphinx/"
RESULT_VARIABLE _result)
if(NOT _result EQUAL 0)
message(SEND_ERROR "copy Sphinx Makefile returned with error: ${_result}")
endif(NOT _result EQUAL 0)
execute_process(
COMMAND cp "${LOFARROOT}/doc/sphinx/source/conf.py" "${CMAKE_BINARY_DIR}/doc/sphinx/source/"
RESULT_VARIABLE _result)
if(NOT _result EQUAL 0)
message(SEND_ERROR "copy Sphinx config returned with error: ${_result}")
endif(NOT _result EQUAL 0)
FILE(GLOB RSTFiles "${LOFARROOT}/doc/sphinx/source/*.rst")
execute_process(
COMMAND cp "${RSTFiles}" "${CMAKE_BINARY_DIR}/doc/sphinx/source/"
RESULT_VARIABLE _result)
if(NOT _result EQUAL 0)
message(SEND_ERROR "copy prepared *.rst files returned with error: ${_result}")
endif(NOT _result EQUAL 0)
message("Executing Sphinx Makefile to create html documentation for current build ...")
execute_process(
COMMAND make SPHINXBUILD=@SPHINX_BUILD_EXECUTABLE@ html
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/doc/sphinx/"
RESULT_VARIABLE _result)
if(NOT _result EQUAL 0)
message(SEND_ERROR "Sphinx make returned with error: ${_result}")
endif(NOT _result EQUAL 0)
......@@ -41,6 +41,11 @@ endif(NOT DEFINED BUILD_PACKAGES)
## ---------------------------------------------------------------------------
include(LofarDoxygen)
## ---------------------------------------------------------------------------
## Check if Sphinx is present; if so, generate sphinx documentation
## ---------------------------------------------------------------------------
include(LofarSphinx)
## ---------------------------------------------------------------------------
## Generate configuration header file.
## ---------------------------------------------------------------------------
......
# $Id$
lofar_package(MAC_Services 1.0 DEPENDS PyMessaging OTDB_Services pyparameterset Docker ResourceAssignmentService TBBService TMSSClient)
IF(BUILD_TESTING)
lofar_package(MAC_Services 1.0 DEPENDS PyMessaging OTDB_Services pyparameterset Docker ResourceAssignmentService TBBService TMSSClient TMSS)
ELSE()
lofar_package(MAC_Services 1.0 DEPENDS PyMessaging OTDB_Services pyparameterset Docker ResourceAssignmentService TBBService TMSSClient)
ENDIF(BUILD_TESTING)
add_subdirectory(src)
add_subdirectory(test)
......
......@@ -154,27 +154,31 @@ class Parset(dict):
return result
def processingNumberOfCoresPerTask(self):
result = int(self[PARSET_PREFIX + "Observation.Cluster.ProcessingCluster.numberOfCoresPerTask"]) or None
if not result:
logger.warning('Invalid Observation.Cluster.ProcessingCluster.numberOfCoresPerTask: %s, defaulting to %i',
result, DEFAULT_NUMBER_OF_CORES_PER_TASK)
result = DEFAULT_NUMBER_OF_CORES_PER_TASK
return result
try:
result = int(self[PARSET_PREFIX + "Observation.Cluster.ProcessingCluster.numberOfCoresPerTask"]) or None
if not result:
logger.warning('Invalid Observation.Cluster.ProcessingCluster.numberOfCoresPerTask: %s, defaulting to %i',
result, DEFAULT_NUMBER_OF_CORES_PER_TASK)
result = DEFAULT_NUMBER_OF_CORES_PER_TASK
return result
except:
return DEFAULT_NUMBER_OF_CORES_PER_TASK
def processingNumberOfTasks(self):
""" Parse the number of nodes to allocate from
"Observation.Cluster.ProcessingCluster.numberOfTasks" """
result = int(self[PARSET_PREFIX +
"Observation.Cluster.ProcessingCluster.numberOfTasks"].strip()) or None
# apply bound
if not result or result <= 0 or result > NUMBER_OF_NODES * NUMBER_OF_CORES_PER_NODE:
logger.warning('Invalid Observation.Cluster.ProcessingCluster.numberOfTasks: %s, defaulting to %s',
result, DEFAULT_NUMBER_OF_TASKS)
result = DEFAULT_NUMBER_OF_TASKS
try:
result = int(self[PARSET_PREFIX + "Observation.Cluster.ProcessingCluster.numberOfTasks"].strip()) or None
return result
# apply bound
if not result or result <= 0 or result > NUMBER_OF_NODES * NUMBER_OF_CORES_PER_NODE:
logger.warning('Invalid Observation.Cluster.ProcessingCluster.numberOfTasks: %s, defaulting to %s',
result, DEFAULT_NUMBER_OF_TASKS)
result = DEFAULT_NUMBER_OF_TASKS
return result
except:
return DEFAULT_NUMBER_OF_TASKS
@staticmethod
def dockerRepository():
......@@ -365,8 +369,7 @@ class PipelineControlTMSSHandler(TMSSSubTaskEventMessageHandler):
scheduled_pipeline_subtask_ids = []
for subtask in scheduled_subtasks:
try:
bits = subtask['url'].split('/')
subtask_id = int(bits[bits.index("subtask") + 1])
subtask_id = subtask['id']
scheduled_pipeline_subtask_ids.append(subtask_id)
except Exception as e:
logger.error(e)
......@@ -378,11 +381,11 @@ class PipelineControlTMSSHandler(TMSSSubTaskEventMessageHandler):
try:
subtask = self.tmss_client.get_subtask(subtask_id)
subtask_template = self.tmss_client.get_url_as_json_object(subtask['specifications_template'])
if 'pipeline' not in subtask_template['type']:
logger.info("skipping scheduled subtask id=%s of non-pipeline type '%s'", subtask_id, subtask_template['type'])
if 'pipeline' not in subtask_template['type_value']:
logger.info("skipping scheduled subtask id=%s of non-pipeline type '%s'", subtask_id, subtask_template['type_value'])
continue
parset = Parset(parameterset.fromString(self.tmss_client.get_subtask_parset(subtask_id)))
parset = Parset(parameterset.fromString(self.tmss_client.get_subtask_parset(subtask_id)).dict())
if not parset or not self._shouldHandle(parset):
continue
self._startPipeline(subtask_id, parset)
......@@ -395,13 +398,14 @@ class PipelineControlTMSSHandler(TMSSSubTaskEventMessageHandler):
try:
subtask = self.tmss_client.get_subtask(subtask_id)
subtask_template = self.tmss_client.get_url_as_json_object(subtask['specifications_template'])
if 'pipeline' not in subtask_template['type']:
logger.info("skipping scheduled subtask id=%s of non-pipeline type '%s'", subtask_id, subtask_template['type'])
if 'pipeline' not in subtask_template['type_value']:
logger.info("skipping scheduled subtask id=%s of non-pipeline type '%s'", subtask_id, subtask_template['type_value'])
return
logger.info("getting parset for scheduled subtask id=%s of type '%s'", subtask_id, subtask_template['type_value'])
parset = self.tmss_client.get_subtask_parset(subtask_id)
parset = parameterset.fromString(parset)
parset = Parset(parset)
parset = Parset(parset.dict())
if parset and self._shouldHandle(parset):
self._startPipeline(subtask_id, parset)
except Exception as e:
......@@ -436,9 +440,11 @@ class PipelineControlTMSSHandler(TMSSSubTaskEventMessageHandler):
# Avoid race conditions by checking whether we haven't already sent the job
# to SLURM. Our QUEUED status update may still be being processed.
if self.slurm.isQueuedOrRunning(subtask_id):
logger.info("Pipeline %s is already queued or running in SLURM.", subtask_id)
return
# if self.slurm.isQueuedOrRunning(subtask_id):
# logger.info("Pipeline %s is already queued or running in SLURM.", subtask_id)
# return
self.tmss_client.set_subtask_status(subtask_id, "queueing")
logger.info("***** START Subtask ID %s *****", subtask_id)
......@@ -513,8 +519,8 @@ class PipelineControlTMSSHandler(TMSSSubTaskEventMessageHandler):
# print some info
echo Running on $SLURM_NODELIST
# notify TMSS that we're running
runcmd {setStatus_started}
# notify TMSS that we're starting
runcmd {setStatus_starting}
# notify ganglia
wget -O - -q "http://ganglia.control.lofar/ganglia/api/events.php?action=add&start_time=now&summary=Pipeline {obsid} ACTIVE&host_regex="
......@@ -522,6 +528,9 @@ class PipelineControlTMSSHandler(TMSSSubTaskEventMessageHandler):
# fetch parset
runcmd {getParset} > {parset_file}
# notify TMSS that we're (almost) running (should be called from within the pipeline...)
runcmd {setStatus_started}
# run the pipeline
runcmd docker-run-slurm.sh --rm --net=host \
-e LOFARENV={lofarenv} \
......@@ -536,22 +545,13 @@ class PipelineControlTMSSHandler(TMSSSubTaskEventMessageHandler):
runcmd {setStatus_finishing}
if [ $RESULT -eq 0 ]; then
# !!! TODO: Review behavior for TMSS
# wait for MoM to pick up feedback before we set finished status
# AS: I increased this to 300 sec to be in line with the wait time after observation finished
# and because we still note quite a lot of feedback issues in MoM
runcmd sleep 300
# if we reached this point, the pipeline ran succesfully
runcmd {setStatus_finished}
# notify ganglia
wget -O - -q "http://ganglia.control.lofar/ganglia/api/events.php?action=add&start_time=now&summary=Pipeline {obsid} FINISHED&host_regex="
else
# !!! TODO: Review behavior for TMSS
# If we are killed by the pipeline being set to finished, we just went from finished->finishing
# but our abort_trigger may already have been cancelled. Set the status here too to avoid lingering
# in finishing
# !!! TODO: How to set an "unsuccesfull" finished state in TMSS?
runcmd {setStatus_finished}
fi
......@@ -568,6 +568,7 @@ class PipelineControlTMSSHandler(TMSSSubTaskEventMessageHandler):
cluster=parset.processingCluster(),
getParset=getParset_cmdline(),
setStatus_starting=setStatus_cmdline("starting"),
setStatus_started=setStatus_cmdline("started"),
setStatus_finishing=setStatus_cmdline("finishing"),
setStatus_finished=setStatus_cmdline("finished"),
......
......@@ -419,52 +419,11 @@ class TestPipelineControlTMSSClassMethods(unittest.TestCase):
logger.warning('END TEST_SHOULDHANDLE')
@unit_test
@integration_test
class TestPipelineControlTMSS(unittest.TestCase):
# TODO: write similar test as t_qaservice, which integrates tmss_test_env and (in this case) pipelinecontrol
pass
def test_check_scheduled_pipelines(self):
""" Test whether we start pipelines that have status scheduled in TMSS. """
logger.warning('TEST_CHECKSCHEDULED')
with unittest.mock.patch('lofar.mac.PipelineControl.TMSSsession.get_subtask_parset') as mock_get_subtask_parset:
mock_get_subtask_parset.side_effect = lambda id: "ObsSW.Observation.processType=Pipeline\n" \
"ObsSW.Observation.Cluster.ProcessingCluster.clusterName=CEP4\n"
with unittest.mock.patch('lofar.mac.PipelineControl.TMSSsession.get_subtasks') as mock_get_subtasks:
mock_get_subtasks.side_effect = lambda state: json.loads('[{"url":"http://localhost:8008/api/subtask/2000001/?format=json","tags":["TMSS","TESTING"],"created_at":"2020-05-11T06:39:01.907446","updated_at":"2020-05-11T17:49:47.455010","start_time":null,"stop_time":null,"specifications_doc":{"demixer":{"baselines":"CS*,RS*&","time_steps":1,"demix_always":[],"ignore_target":false,"demix_if_needed":[],"frequency_steps":4,"demix_time_steps":1,"demix_frequency_steps":4},"aoflagger":{"strategy":"HBAdefault"},"preflagger0":{"channels":"0..nchan/32-1,31*nchan/32..nchan-1"},"preflagger1":{"corrtype":"auto"},"storagemanager":"dysco"},"do_cancel":null,"priority":1,"scheduler_input_doc":{},"state":"http://localhost:8008/api/subtask_state/scheduled/?format=json","task_blueprint":"http://localhost:8008/api/task_blueprint/20/?format=json","specifications_template":"http://localhost:8008/api/subtask_template/2/?format=json","schedule_method":"http://localhost:8008/api/schedule_method/manual/?format=json","cluster":"http://localhost:8008/api/cluster/1/?format=json"},' \
'{"url":"http://localhost:8008/api/subtask/2000042/?format=json","tags":["TMSS","TESTING"],"created_at":"2020-05-11T06:39:01.907446","updated_at":"2020-05-11T17:49:47.455010","start_time":null,"stop_time":null,"specifications_doc":{"demixer":{"baselines":"CS*,RS*&","time_steps":1,"demix_always":[],"ignore_target":false,"demix_if_needed":[],"frequency_steps":4,"demix_time_steps":1,"demix_frequency_steps":4},"aoflagger":{"strategy":"HBAdefault"},"preflagger0":{"channels":"0..nchan/32-1,31*nchan/32..nchan-1"},"preflagger1":{"corrtype":"auto"},"storagemanager":"dysco"},"do_cancel":null,"priority":1,"scheduler_input_doc":{},"state":"http://localhost:8008/api/subtask_state/scheduled/?format=json","task_blueprint":"http://localhost:8008/api/task_blueprint/20/?format=json","specifications_template":"http://localhost:8008/api/subtask_template/2/?format=json","schedule_method":"http://localhost:8008/api/schedule_method/manual/?format=json","cluster":"http://localhost:8008/api/cluster/1/?format=json"}]')
with unittest.mock.patch('lofar.mac.PipelineControl.PipelineControlTMSSHandler._startPipeline') as mock_startPipeline:
handler = PipelineControlTMSSHandler()
handler.check_scheduled_pipelines()
mock_get_subtasks.assert_called_with(state="scheduled")
self.assertTrue(call(2000001) in mock_get_subtask_parset.call_args_list)
self.assertTrue(call(2000042) in mock_get_subtask_parset.call_args_list)
mock_startPipeline.assert_called()
logger.warning('END TEST_CHECKSCHEDULED')
def test_onSubTaskScheduled(self):
""" Test whether pipelines are started with correct Parset and ID TMSS. """
logger.warning('TEST_ONSCHEDULED')
with unittest.mock.patch('lofar.mac.PipelineControl.TMSSsession.get_subtask_parset') as mock_get_subtask_parset:
mock_get_subtask_parset.side_effect = lambda id: "ObsSW.Observation.processType=Pipeline\n" \
"ObsSW.Observation.Cluster.ProcessingCluster.clusterName=CEP4\n"
with unittest.mock.patch('lofar.mac.PipelineControl.PipelineControlTMSSHandler._startPipeline') as mock_startPipeline:
handler = PipelineControlTMSSHandler()
handler.onSubTaskScheduled(1234, "scheduling", "scheduled")
mock_get_subtask_parset.assert_called_with(1234)
mock_startPipeline.assert_called()
logger.warning('END TEST_ONSCHEDULED')
if __name__ == "__main__":
......
......@@ -18,7 +18,9 @@
# $Id$
include(LofarCTest)
lofar_add_test(t_hdf5_io)
set_tests_properties(t_hdf5_io PROPERTIES TIMEOUT 300)
IF(BUILD_TESTING)
lofar_add_test(t_hdf5_io)
set_tests_properties(t_hdf5_io PROPERTIES TIMEOUT 300)
ENDIF(BUILD_TESTING)
......@@ -17,7 +17,11 @@
# $Id$
lofar_package(QA_Service 1.0 DEPENDS QA_Common PyMessaging OTDB_Services pyparameterset TMSSClient)
IF(BUILD_TESTING)
lofar_package(QA_Service 1.0 DEPENDS QA_Common PyMessaging OTDB_Services pyparameterset TMSSClient TMSS)
ELSE()
lofar_package(QA_Service 1.0 DEPENDS QA_Common PyMessaging OTDB_Services pyparameterset TMSSClient)
ENDIF(BUILD_TESTING)
add_subdirectory(lib)
add_subdirectory(bin)
......
......@@ -87,9 +87,9 @@ class QAFilteringTMSSSubTaskBusListener(TMSSSubTaskBusListener):
with TMSSsession.create_from_dbcreds_for_ldap() as tmsssession:
subtask = tmsssession.get_subtask(subtask_id)
spec = tmsssession.get_url_as_json_object(subtask['specifications_template'])
if '/qa_files/' in spec['type']:
if 'qa_files' == spec['type_value']:
self._send_qa_command_message(subtask_id, DEFAULT_DO_QAFILE_CONVERSION_SUBJECT)
elif '/qa_plots/' in spec['type']:
elif 'qa_plots' == spec['type_value']:
self._send_qa_command_message(subtask_id, DEFAULT_DO_QAPLOTS_SUBJECT)
def __init__(self, exchange: str = DEFAULT_BUSNAME, broker: str = DEFAULT_BROKER):
......@@ -234,7 +234,15 @@ class QAService:
self.tmsssession.set_subtask_status(subtask_id, 'starting')
self.tmsssession.set_subtask_status(subtask_id, 'started')
hdf5_path = self.h5_lustre_filepath(otdb_id or subtask_id)
input_dataproducts = self.tmsssession.get_subtask_input_dataproducts(subtask_id=subtask_id)
if len(input_dataproducts) != 1:
raise ValueError("QA subtask %s should have exactly 1 input dataproduct" % subtask_id)
hdf5_path = os.path.join(input_dataproducts[0]['directory'], input_dataproducts[0]['filename'])
else:
hdf5_path = self.h5_lustre_filepath(otdb_id or subtask_id)
plot_dir_path = self._create_plots_for_h5_file(hdf5_path, otdb_id, subtask_id)
if plot_dir_path:
......@@ -304,22 +312,42 @@ class QAService:
try:
obs_id = otdb_id or subtask_id
# define default h5 filename use default cep4 qa output dir
hdf5_path = self.h5_lustre_filepath(obs_id)
h5_dir_path, h5_filename = os.path.split(hdf5_path)
nr_of_timestamps = -1
nr_of_subbands = -1
logger.info('trying to convert MS uv dataset with otdb_id=%s subtask_id=%s if any', otdb_id, subtask_id)
cmd = ['ms2hdf5', '--cep4', '-p', '-22']
if subtask_id:
subtask = self.tmsssession.get_subtask(subtask_id=subtask_id)
nr_of_timestamps = subtask['specifications_doc'].get('nr_of_timestamps', -1)
nr_of_subbands = subtask['specifications_doc'].get('nr_of_subbands', -1)
cmd += ['-t', str(nr_of_timestamps), '-s', str(nr_of_subbands)]
logger.info('trying to convert MS uv dataset with otdb_id=%s subtask_id=%s if any', otdb_id, subtask_id)
cmd = ['ms2hdf5', '-o', str(obs_id), '--cep4', '-p', '-22', '-t', str(nr_of_timestamps), '-s', str(nr_of_subbands), ]
cmd += ['--output_dir', h5_dir_path]
cmd += ['--output_filename', h5_filename]
input_dataproducts = self.tmsssession.get_subtask_input_dataproducts(subtask_id=subtask_id)
if not input_dataproducts:
raise ValueError("QA subtask %s has no input dataproducts" % subtask_id)
directory = input_dataproducts[0]['directory']
cmd += ['-m', directory]
output_dataproducts = self.tmsssession.get_subtask_output_dataproducts(subtask_id=subtask_id)
if len(output_dataproducts) != 1:
raise ValueError("QA subtask %s should have exactly 1 dataproduct, but it has %s. urls: %s" % (subtask_id,
len(output_dataproducts),
', '.join(dp['url'] for dp in output_dataproducts)))
hdf5_path = os.path.join(output_dataproducts[0]['directory'], output_dataproducts[0]['filename'])
cmd += ['--output_dir', output_dataproducts[0]['directory'],
'--output_filename', output_dataproducts[0]['filename']]
elif otdb_id:
# define default h5 filename use default cep4 qa output dir
hdf5_path = self.h5_lustre_filepath(obs_id)
h5_dir_path, h5_filename = os.path.split(hdf5_path)
cmd += ['--output_dir', h5_dir_path, '--output_filename', h5_filename]
cmd += ['-o', str(obs_id)]
else:
raise ValueError("either otdb_id or subtask_id needs to be set")
# wrap the command in a cep4 docker ssh call
cmd = wrap_command_for_docker(cmd, 'adder', 'latest')
......@@ -358,11 +386,28 @@ class QAService:
subtask = self.tmsssession.get_subtask(subtask_id=subtask_id)
#TODO: use settings from subtask to tweak plot_hdf5_dynamic_spectra options
input_dataproducts = self.tmsssession.get_subtask_input_dataproducts(subtask_id=subtask_id)
if len(input_dataproducts) != 1:
raise ValueError("QA subtask %s should have exactly 1 input dataproduct, but it has %s. urls: %s" % (subtask_id,
len(input_dataproducts),
', '.join(dp['url'] for dp in input_dataproducts)))
hdf5_path = os.path.join(input_dataproducts[0]['directory'], input_dataproducts[0]['filename'])
output_dataproducts = self.tmsssession.get_subtask_output_dataproducts(subtask_id=subtask_id)
if len(output_dataproducts) != 1:
raise ValueError("QA subtask %s should have exactly 1 output dataproduct, but it has %s. urls: %s" % (subtask_id,
len(output_dataproducts),
', '.join(dp['url'] for dp in output_dataproducts)))
task_plot_dir_path = output_dataproducts[0]['directory']
base_plot_dir_path = os.path.dirname(task_plot_dir_path)
for plot_options in [['-1', '-acb'], # 'hot' autocor/crosscor, per baseline scaling with distinct polarization scales, in dB
['-1', '-acg'], # 'complex' autocor/crosscor, all baseline scaling with same polarization scales, in dB
['-1', '-acn', '--raw'], # normalized 'hot' autocor/crosscor, raw
['-4']]: # delay-rate
cmd = ['plot_hdf5_dynamic_spectra', '-o %s' % (base_plot_dir_path,), '--force', '--cep4'] + plot_options + [hdf5_path]
cmd = ['plot_hdf5_dynamic_spectra', '-o', base_plot_dir_path, '--force', '--cep4'] + plot_options + [hdf5_path]
# wrap the command in a cep4 ssh call to docker container
cmd = wrap_command_for_docker(cmd, 'adder', 'latest')
......@@ -461,6 +506,9 @@ class QAService:
def _move_plots_to_nfs_dir(self, plot_dir_path):
try:
if not plot_dir_path.endswith('/'):
plot_dir_path += '/'
plot_dir_name = os.path.basename(plot_dir_path)
plot_nfs_base_path = os.path.join(QAService.QA_NFS_BASE_DIR, 'plots')
......
......@@ -507,7 +507,6 @@ class TestQAService(unittest.TestCase):
logger.info(' -- test_05_qa_service_for_expected_behaviour_on_tmss_events -- ')
from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator
from lofar.sas.tmss.tmss.tmssapp.subtasks import create_observation_to_qafile_subtask, create_qafile_to_qaplots_subtask
tmss_client = self.tmss_test_env.create_tmss_client()
with tmss_client:
......@@ -538,8 +537,9 @@ class TestQAService(unittest.TestCase):
#TODO: merge adder branch into trunk so we can use plot_hdf5_dynamic_spectra on the test-h5 file to create plots
if 'plot_hdf5_dynamic_spectra' in cmd:
# replace the plot_hdf5_dynamic_spectra command which runs normally in the docker container
# by a call to bash true, so the 'plot_hdf5_dynamic_spectra' call returns 0 exit code
mocked_cmd = ['true']
# by a call to bash mkdir, so the 'plot_hdf5_dynamic_spectra' call returns 0 exit code, and we have an output directory to play with.
plot_path = os.path.join(cmd[cmd.index('-o')+1], os.path.basename(cmd[-1]).replace('_QA.h5',''))
mocked_cmd = ['mkdir', '-p', plot_path, ';', 'touch', os.path.join(plot_path, 'foo.bar')]
logger.info('''mocked_wrap_command_for_docker returning mocked command: '%s', instead of original command: '%s' ''',
' '.join(mocked_cmd), ' '.join(cmd))
return mocked_cmd
......@@ -559,24 +559,37 @@ class TestQAService(unittest.TestCase):
auth=(self.tmss_test_env.client_credentials.dbcreds.user,
self.tmss_test_env.client_credentials.dbcreds.password))
# create a observation output dataproduct, which automatically creates the needed observation subtask and it's outputs etc.
uv_dataproduct = tdc.post_data_and_get_response_as_json_object(tdc.Dataproduct(filename="my_uv_dataprodcut.MS", directory=self.TEST_DIR), '/dataproduct/')
uvdp_producer = tdc.get_response_as_json_object(uv_dataproduct['producer'])
obs_subtask = tdc.get_response_as_json_object(uvdp_producer['subtask'])
tmss_client.set_subtask_status(obs_subtask['id'], 'finished')
qafile_subtask_template = tmss_client.get_subtask_template(name="QA file conversion")
qafile_subtask_spec_doc = add_defaults_to_json_object_for_schema({}, qafile_subtask_template['schema'])
subtask_url = tdc.post_data_and_get_url(tdc.Subtask(specifications_template_url=qafile_subtask_template['url'],
specifications_doc=qafile_subtask_spec_doc),
'/subtask/')
subtask_id = subtask_url.split('/')[-2]
subtask = tdc.post_data_and_get_response_as_json_object(tdc.Subtask(specifications_template_url=qafile_subtask_template['url'],
specifications_doc=qafile_subtask_spec_doc), '/subtask/')
subtask_id = subtask['id']
qaplots_subtask_template = tmss_client.get_subtask_template(name="QA plots")
qaplots_subtask_spec_doc = add_defaults_to_json_object_for_schema({}, qaplots_subtask_template['schema'])
subtask_url2 = tdc.post_data_and_get_url(tdc.Subtask(specifications_template_url=qaplots_subtask_template['url'],
specifications_doc=qaplots_subtask_spec_doc), '/subtask/')
subtask_id2 = subtask_url2.split('/')[-2]
subtask_input = tdc.post_data_and_get_response_as_json_object(tdc.SubtaskInput(subtask['url'], dataproduct_urls=[uv_dataproduct['url']], subtask_output_url=uvdp_producer['url']), '/subtask_input/')
subtask_output = tdc.post_data_and_get_response_as_json_object(tdc.SubtaskOutput(subtask['url']), '/subtask_output/')
tmss_client.set_subtask_status(subtask_id, 'defined')
# trigger a qa process by setting the tmss subtask to scheduled
# this will result in the QAService actually doing its magic
tmss_client.set_subtask_status(subtask_id, 'scheduled')
# but first, we need to override where the output dataproduct is written,
# so, stop listening, schedule (getting the default output dataproduct), and then override the directory, and start listening again (picking up the scheduled-event).
qaservice.filtering_tmssbuslistener.stop_listening()
tmss_client.schedule_subtask(subtask_id)
dataproduct = tmss_client.get_subtask_output_dataproducts(subtask_id)[0]
tmss_client.session.patch(dataproduct['url'], json={'directory': os.path.join(self.TEST_DIR, 'h5')}, params={'format':'json'})
dataproduct = tmss_client.get_subtask_output_dataproducts(subtask_id)[0]
qaservice.filtering_tmssbuslistener.start_listening()
qaplots_subtask_template = tmss_client.get_subtask_template(name="QA plots")
qaplots_subtask_spec_doc = add_defaults_to_json_object_for_schema({}, qaplots_subtask_template['schema'])
# start waiting until ConvertedMS2Hdf5 event message received (or timeout)
qa_listener.converted_event.wait(30)
......@@ -600,7 +613,24 @@ class TestQAService(unittest.TestCase):
self.assertTrue('subtask_id' in qa_listener.clustered_msg_content)
self.assertTrue('hdf5_file_path' in qa_listener.clustered_msg_content)
tmss_client.set_subtask_status(subtask_id2, 'scheduled')
subtask2 = tdc.post_data_and_get_response_as_json_object(tdc.Subtask(specifications_template_url=qaplots_subtask_template['url'],
specifications_doc=qaplots_subtask_spec_doc), '/subtask/')
tdc.post_data_and_get_response_as_json_object(tdc.SubtaskInput(subtask2['url'], dataproduct_urls=[dataproduct['url']], subtask_output_url=subtask_output['url']), '/subtask_input/')
subtask2_output = tdc.post_data_and_get_response_as_json_object(tdc.SubtaskOutput(subtask2['url']), '/subtask_output/')
subtask2_id = subtask2['id']
tmss_client.set_subtask_status(subtask2_id, 'defined')
# normally the tmss_subtask_scheduling service schedules the next subtask automagically
# But in this test we do that by hand, since the tmss_subtask_scheduling service is not part of this test.
# Again, we need to override where the output dataproduct is written,
# so, stop listening, schedule (getting the default output dataproduct), and then override the directory, and start listening again (picking up the scheduled-event).
qaservice.filtering_tmssbuslistener.stop_listening()
tmss_client.schedule_subtask(subtask2_id)
dataproduct = tmss_client.get_subtask_output_dataproducts(subtask2_id)[0]
tmss_client.session.patch(dataproduct['url'], json={'directory': os.path.join(self.TEST_DIR, 'plots', 'L%s' % (obs_subtask['id'],))}, params={'format':'json'})
dataproduct = tmss_client.get_subtask_output_dataproducts(subtask2_id)[0]
qaservice.filtering_tmssbuslistener.start_listening()
# start waiting until CreatedInspectionPlots event message received (or timeout)
qa_listener.plotted_event.wait(30)
......
......@@ -92,7 +92,7 @@ def main_set_subtask_state():
try:
with TMSSsession.create_from_dbcreds_for_ldap() as session:
changed_subtask = session.set_subtask_status(args.subtask_id, args.state)
print("%s now has state %s" % (changed_subtask['url'], changed_subtask['state']))
print("%s now has state %s, see: %s" % (changed_subtask['id'], changed_subtask['state_value'], changed_subtask['url']))
except Exception as e:
print(e)
exit(1)
......
......@@ -166,7 +166,7 @@ class TMSSsession(object):
params['format'] ='json'
response = self.session.get(url=full_url, params=params)
logger.info("[%s] %s %s on %s", response.request.method.upper(), response.status_code, responses.get(response.status_code), response.request.url)
logger.info("%s %s %s on %s", response.request.method.upper(), response.status_code, responses.get(response.status_code), response.request.url)
if response.status_code >= 200 and response.status_code < 300:
result = json.loads(response.content.decode('utf-8'))
......@@ -204,6 +204,14 @@ class TMSSsession(object):
return None
return result
def get_subtask_output_dataproducts(self, subtask_id: int) -> []:
'''get the output dataproducts of the subtask with the given subtask_id'''
return self.get_path_as_json_object('subtask/%s/output_dataproducts' % subtask_id)
def get_subtask_input_dataproducts(self, subtask_id: int) -> []:
'''get the input dataproducts of the subtask with the given subtask_id'''
return self.get_path_as_json_object('subtask/%s/input_dataproducts' % subtask_id)
def specify_observation_task(self, task_id: int) -> requests.Response:
"""specify observation for the given draft task by just doing a REST API call """
result = self.session.get(url='%s/api/task/%s/specify_observation' % (self.base_url, task_id))
......
This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app).
## Available Scripts
In the project directory, you can run:
### `yarn start`
Runs the app in the development mode.<br />
Open [http://localhost:3000](http://localhost:3000) to view it in the browser.
The page will reload if you make edits.<br />
You will also see any lint errors in the console.
### `yarn test`
Launches the test runner in the interactive watch mode.<br />
See the section about [running tests](https://facebook.github.io/create-react-app/docs/running-tests) for more information.
### `yarn build`
Builds the app for production to the `build` folder.<br />
It correctly bundles React in production mode and optimizes the build for the best performance.
The build is minified and the filenames include the hashes.<br />
Your app is ready to be deployed!
See the section about [deployment](https://facebook.github.io/create-react-app/docs/deployment) for more information.
### `yarn eject`
**Note: this is a one-way operation. Once you `eject`, you can’t go back!**
If you aren’t satisfied with the build tool and configuration choices, you can `eject` at any time. This command will remove the single build dependency from your project.
Instead, it will copy all the configuration files and the transitive dependencies (webpack, Babel, ESLint, etc) right into your project so you have full control over them. All of the commands except `eject` will still work, but they will point to the copied scripts so you can tweak them. At this point you’re on your own.
You don’t have to ever use `eject`. The curated feature set is suitable for small and middle deployments, and you shouldn’t feel obligated to use this feature. However we understand that this tool wouldn’t be useful if you couldn’t customize it when you are ready for it.
## Learn More
You can learn more in the [Create React App documentation](https://facebook.github.io/create-react-app/docs/getting-started).
To learn React, check out the [React documentation](https://reactjs.org/).
### Code Splitting
This section has moved here: https://facebook.github.io/create-react-app/docs/code-splitting
### Analyzing the Bundle Size
This section has moved here: https://facebook.github.io/create-react-app/docs/analyzing-the-bundle-size
### Making a Progressive Web App
This section has moved here: https://facebook.github.io/create-react-app/docs/making-a-progressive-web-app
### Advanced Configuration
This section has moved here: https://facebook.github.io/create-react-app/docs/advanced-configuration
### Deployment
This section has moved here: https://facebook.github.io/create-react-app/docs/deployment
### `yarn build` fails to minify
This section has moved here: https://facebook.github.io/create-react-app/docs/troubleshooting#npm-run-build-fails-to-minify
{
"name": "tmss_gui",
"version": "0.1.0",
"private": true,
"dependencies": {
"@json-editor/json-editor": "^2.3.0",
"@testing-library/jest-dom": "^4.2.4",
"@testing-library/react": "^9.3.2",
"@testing-library/user-event": "^7.1.2",
"axios": "^0.19.2",
"font-awesome": "^4.7.0",
"history": "^5.0.0",
"node-sass": "^4.12.0",
"primeflex": "^1.3.0",
"primeicons": "^4.0.0",
"primereact": "^4.2.2",
"react": "^16.13.1",
"react-app-polyfill": "^1.0.6",
"react-bootstrap": "^1.0.1",
"react-bootstrap-datetimepicker": "0.0.22",
"react-dom": "^16.13.1",
"react-frame-component": "^4.1.2",
"react-json-view": "^1.19.1",
"react-router-dom": "^5.2.0",
"react-scripts": "3.4.1",
"react-transition-group": "^1.2.1",
"typescript": "^3.9.5",
"yup": "^0.29.1"
},
"scripts": {
"start": "react-scripts start",
"build": "react-scripts build",
"test": "react-scripts test",
"eject": "react-scripts eject"
},
"eslintConfig": {
"extends": "react-app"
},
"browserslist": {
"production": [
">0.2%",
"not dead",
"not op_mini all"
],
"development": [
"last 1 chrome version",
"last 1 firefox version",
"last 1 safari version"
]
}
}
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<link rel="icon" href="%PUBLIC_URL%/favicon.ico" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta name="theme-color" content="#000000" />
<meta
name="description"
content="Telescope Manager Specification System"
/>
<link rel="apple-touch-icon" href="%PUBLIC_URL%/logo192.png" />
<!--
manifest.json provides metadata used when your web app is installed on a
user's mobile device or desktop. See https://developers.google.com/web/fundamentals/web-app-manifest/
-->
<link rel="manifest" href="%PUBLIC_URL%/manifest.json" />
<!--
Notice the use of %PUBLIC_URL% in the tags above.
It will be replaced with the URL of the `public` folder during the build.
Only files inside the `public` folder can be referenced from the HTML.
Unlike "/favicon.ico" or "favicon.ico", "%PUBLIC_URL%/favicon.ico" will
work correctly both with client-side routing and a non-root public URL.
Learn how to configure a non-root public URL by running `npm run build`.
-->
<title>TMSS</title>
</head>
<body>
<noscript>You need to enable JavaScript to run this app.</noscript>
<div id="root"></div>
<!--
This HTML file is a template.
If you open it directly in the browser, you will see an empty page.
You can add webfonts, meta tags, or analytics to this file.
The build step will place the bundled scripts into the <body> tag.
To begin the development, run `npm start` or `yarn start`.
To create a production bundle, use `npm run build` or `yarn build`.
-->
</body>
</html>
{
"short_name": "React App",
"name": "Create React App Sample",
"icons": [
{
"src": "favicon.ico",
"sizes": "64x64 32x32 24x24 16x16",
"type": "image/x-icon"
},
{
"src": "logo192.png",
"type": "image/png",
"sizes": "192x192"
},
{
"src": "logo512.png",
"type": "image/png",
"sizes": "512x512"
}
],
"start_url": ".",
"display": "standalone",
"theme_color": "#000000",
"background_color": "#ffffff"
}
# https://www.robotstxt.org/robotstxt.html
User-agent: *
Disallow:
.App {
/* text-align: center; */
}
.App-logo {
height: 40vmin;
pointer-events: none;
}
@media (prefers-reduced-motion: no-preference) {
.App-logo {
animation: App-logo-spin infinite 20s linear;
}
}
.App-header {
background-color: #303d59;
height: 50px;
display: flex;
flex-direction: column;
/* align-items: center; */
justify-content: center;
font-size: calc(10px + 2vmin);
color: white;
padding-left: 10px;
}
.App-link {
color: #61dafb;
}
@keyframes App-logo-spin {
from {
transform: rotate(0deg);
}
to {
transform: rotate(360deg);
}
}
import React, {Component} from 'react';
import { BrowserRouter as Router } from 'react-router-dom';
import {AppTopbar} from './layout/components/AppTopbar';
import {AppMenu} from './layout/components/AppMenu';
import { AppFooter } from './layout/components/AppFooter';
import {RoutedContent} from './routes';
// import {Dashboard} from './routes/dashboard/dashboard';
import './layout/layout.scss';
// import './App.css';
class App extends Component {
constructor() {
super();
this.state = {
currentMenu: '',
currentPath: '/'
}
this.onMenuItemClick = this.onMenuItemClick.bind(this)
this.menu = [
{label: 'Dashboard', icon: 'pi pi-fw pi-home', to:'/dashboard'},
{label: 'Scheduling Units', icon: 'pi pi-fw pi-calendar', to:'/scheduling'}
];
// this.menuComponent = {'Dashboard': Dashboard}
}
onMenuItemClick(event) {
console.log(event);
this.setState({currentMenu:event.item.label, currentPath: event.item.path});
}
render() {
return (
<React.Fragment>
<div className="App">
<AppTopbar></AppTopbar>
<Router basename={ this.state.currentPath }>
<AppMenu model={this.menu} onMenuItemClick={this.onMenuItemClick} />
<div className="layout-wrapper layout-static layout-static-sidebar-active">
<div className="layout-main">
<RoutedContent />
</div>
</div>
</Router>
<AppFooter></AppFooter>
</div>
</React.Fragment>
);
}
}
export default App;
import React from 'react';
import { render } from '@testing-library/react';
import App from './App';
test('renders ASTRON in footer', () => {
const { getByText } = render(<App />);
const linkElement = getByText("ASTRON");
expect(linkElement).toBeInTheDocument();
});
body {
margin: 0;
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen',
'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue',
sans-serif;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
code {
font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New',
monospace;
}
import React from 'react';
import ReactDOM from 'react-dom';
import './index.css';
import App from './App';
import * as serviceWorker from './serviceWorker';
ReactDOM.render(
<React.StrictMode>
<App />
</React.StrictMode>,
document.getElementById('root')
);
// If you want your app to work offline and load faster, you can change
// unregister() to register() below. Note this comes with some pitfalls.
// Learn more about service workers: https://bit.ly/CRA-PWA
serviceWorker.unregister();
//Suggested location to add your overrides so that migration would be easy by just updating the SASS folder in the future
\ No newline at end of file
/* General */
$fontSize:14px;
$bodyBgColor:#edf0f5;
$textColor:#333333;
$textSecondaryColor:#707070;
$borderRadius:3px;
$dividerColor:#e3e3e3;
$transitionDuration:.2s;
$maskBgColor:#424242;
/* Menu Common */
$menuitemBadgeBgColor:#007be5;
$menuitemBadgeColor:#ffffff;
$submenuFontSize:13px;
$menuitemActiveRouteColor:#1fa1fc;
/* Menu Light */
$menuBgColorFirst:#f3f4f9;
$menuBgColorLast:#d7dbe8;
$menuitemColor:#232428;
$menuitemHoverColor:#0388e5;
$menuitemActiveColor:#0388e5;
$menuitemActiveBgColor:#ffffff;
$menuitemBorderColor:rgba(207, 211, 224, 0.6);
/* Menu Dark */
$menuDarkBgColorFirst:#ffffff;
$menuDarkBgColorLast:#ffffff;
$menuitemDarkColor:#0388e5;
$menuitemDarkHoverColor:#0388e5;
$menuitemDarkActiveColor:#0388e5;
$menuitemDarkActiveBgColor:#ffffff;
$menuitemDarkBorderColor:rgba(246, 248, 252, 0.918);
/* Topbar */
$topbarLeftBgColor:#3d4977;
$topbarRightBgColor:#3d4977;
$topbarItemBadgeBgColor:#ef6262;
$topbarItemBadgeColor:#ffffff;
$topbarItemColor:#ffffff;
$topbarItemHoverColor:#c3e8fb;
$topbarSearchInputBorderBottomColor:#ffffff;
$topbarSearchInputColor:#ffffff;
/* Footer */
$footerBgColor:#ffffff;
\ No newline at end of file
import React, { Component } from 'react';
export class AppFooter extends Component {
render() {
return (
<div className="layout-footer">
<span className="footer-text" style={{'marginRight': '5px'}}><strong>TMSS</strong> by <strong>ASTRON</strong></span>
</div>
);
}
}
export default AppFooter;
\ No newline at end of file
import React from 'react';
import { render } from '@testing-library/react';
import AppFooter from './AppFooter';
test('renders ASTRON in footer', () => {
const { getByText } = render(<AppFooter />);
const linkElement = getByText("ASTRON");
expect(linkElement).toBeInTheDocument();
});
import React, { Component } from 'react';
import {NavLink} from 'react-router-dom'
import PropTypes from 'prop-types';
import classNames from 'classnames';
class AppSubmenu extends Component {
static defaultProps = {
className: null,
items: null,
onMenuItemClick: null,
root: false,
permissions: null
}
static propTypes = {
className: PropTypes.string,
items: PropTypes.array,
onMenuItemClick: PropTypes.func,
root: PropTypes.bool,
permissions: PropTypes.array
}
constructor(props) {
super(props);
this.state = {activeIndex: null};
}
onMenuItemClick(event, item, index) {
//avoid processing disabled items
if(item.disabled) {
event.preventDefault();