diff --git a/src/ska/logging/transactions.py b/src/ska/logging/transactions.py index 514287133d822d9d1a3ac4398ccd759c72fb6798..a29cdf60b7b7f4ae16e7f1b3b565757bc0fd3445 100644 --- a/src/ska/logging/transactions.py +++ b/src/ska/logging/transactions.py @@ -18,8 +18,7 @@ class TransactionIDTagsFilter(logging.Filter): """ def __init__(self, *args, **kwargs): - """Override logging.Filter.__init__ to keep track of the transaction ID and callstack - """ + """Override logging.Filter.__init__ to keep track of the transaction ID and callstack""" self.transaction_id = kwargs.pop("transaction_id", None) self.callstack = kwargs.pop("call_stack", None) super(TransactionIDTagsFilter, self).__init__(*args, **kwargs) @@ -182,7 +181,9 @@ class Transaction: def __exit__(self, exc_type, exc_val, exc_tb): if exc_type: - self.logger.exception(f"Transaction[{self._transaction_id}]: Exception[{self._name}]") + self.logger.exception( + f"Transaction[{self._transaction_id}]: Exception[{self._name}] marker[{self._random_marker}]" + ) self.logger.info( f"Transaction[{self._transaction_id}]: Exit[{self._name}] " diff --git a/tests/conftest.py b/tests/conftest.py index 93171c53d2bfe219aa51f832b620d07b556af950..24184a14606025a1dd0861da951bfb0729c38a9d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -131,6 +131,12 @@ def get_all_record_logs(logger): return recorder.logs +def clear_logger_logs(logger): + recorder = get_named_handler(logger, "recorder") + recorder.logs = [] + recorder.records = [] + + def get_named_handler(logger, name): """Search up through logger hierarchy to find a handler with the specified name.""" while logger: diff --git a/tests/test_transactions_threaded.py b/tests/test_transactions_threaded.py new file mode 100644 index 0000000000000000000000000000000000000000..62c00ada916c0e601d9f818c82c8b5bad0a7212c --- /dev/null +++ b/tests/test_transactions_threaded.py @@ -0,0 +1,164 @@ +"""Tests for the logging transactions module focusing on threads""" +import pytest + +from threading import Thread + +from ska.logging import transaction +from tests.conftest import get_all_record_logs, clear_logger_logs + + +class ThreadingLogsGenerator: + """Generate logs by spawning a number of threads and logging in them + Some uses the transaction context and some not. + """ + def __init__(self, logger=None, pass_logger=False): + self.logger = logger + self.pass_logger = pass_logger + + def thread_with_transaction_exception(self, thread_index): + if self.pass_logger: + try: + with transaction(f"Transaction thread [{thread_index}]", logger=self.logger): + self.logger.info( + f"Transaction thread in transaction [{thread_index}], in transaction" + ) + raise RuntimeError("An exception has occurred") + except RuntimeError: + pass + else: + try: + with transaction(f"Transaction thread [{thread_index}]"): + self.logger.info( + f"Transaction thread in transaction [{thread_index}], in transaction" + ) + raise RuntimeError("An exception has occurred") + except RuntimeError: + pass + + def thread_with_transaction(self, thread_index): + if self.pass_logger: + with transaction(f"Transaction thread [{thread_index}]", logger=self.logger): + self.logger.info(f"Transaction thread [{thread_index}], in transaction") + self.logger.info(f"Thread log [{thread_index}], no transaction") + else: + with transaction(f"Transaction thread [{thread_index}]"): + self.logger.info(f"Transaction thread [{thread_index}], in transaction") + self.logger.info(f"Thread log [{thread_index}], no transaction") + + def thread_without_transaction(self, thread_index): + self.logger.info(f"Thread log [{thread_index}], no transaction") + + def get_logs(self): + clear_logger_logs(self.logger) + test_threads = [] + for thread_index in range(10): + thread_in_transaction = Thread( + target=self.thread_with_transaction, args=(thread_index,) + ) + thread_no_transaction = Thread( + target=self.thread_without_transaction, args=(thread_index,) + ) + thread_exception = Thread( + target=self.thread_with_transaction_exception, args=(thread_index,) + ) + test_threads.append(thread_in_transaction) + test_threads.append(thread_no_transaction) + test_threads.append(thread_exception) + + for t in test_threads: + t.start() + for t in test_threads: + t.join() + + return get_all_record_logs(self.logger) + + +@pytest.fixture +def threaded_logs_local_logger(ensures_tags_logger): + tlg = ThreadingLogsGenerator(logger=ensures_tags_logger, pass_logger=True) + return tlg.get_logs() + + +@pytest.fixture +def threaded_logs_global_logger(ensures_tags_logger): + tlg = ThreadingLogsGenerator(logger=ensures_tags_logger, pass_logger=False) + return tlg.get_logs() + + +class TestThreadScenarios: + @pytest.mark.xfail + def test_logs_outside_transaction_has_no_transaction_ids( + self, threaded_logs_global_logger, threaded_logs_local_logger + ): + all_logs = threaded_logs_global_logger + threaded_logs_local_logger + outside_transaction_logs = [log for log in all_logs if "no transaction" in log] + assert outside_transaction_logs + for log in outside_transaction_logs: + assert "transaction_id:txn" not in log, f"transaction_id should not be in log {log}" + + @pytest.mark.xfail + def test_no_duplicate_transaction_ids(self, threaded_logs_local_logger): + all_logs = threaded_logs_local_logger + transaction_logs = [log for log in all_logs if "Transaction thread" in log] + assert transaction_logs + for log in transaction_logs: + assert "transaction_id" in log, f"Expected transaction_id in {log}" + assert log.count("transaction_id") == 1, f"Only expected one transaction_id in {log}" + + def test_internal_log_has_transaction_id(self, threaded_logs_local_logger): + all_logs = threaded_logs_local_logger + internal_logs = [log for log in all_logs if "in transaction" in log] + assert internal_logs + for log in internal_logs: + assert "transaction_id" in log + + def test_internal_log_has_no_transaction_id(self, threaded_logs_global_logger): + all_logs = threaded_logs_global_logger + internal_logs = [log for log in all_logs if "in transaction" in log] + assert internal_logs + for log in internal_logs: + assert "transaction_id" not in log + + def test_enter_exit_exception_matches( + self, threaded_logs_global_logger, threaded_logs_local_logger + ): + all_logs = threaded_logs_global_logger + threaded_logs_local_logger + enter_exit_logs = [] + enter_exit_logs += [log for log in all_logs if "Enter[" in log] + enter_exit_logs += [log for log in all_logs if "Exit[" in log] + assert enter_exit_logs + assert len(enter_exit_logs) % 2 == 0 + + # Group enter exit by marker + markers = {} + for log in enter_exit_logs: + marker = log[-6:-1] + assert marker.isdigit() + markers.setdefault(marker, []).append(log) + assert markers + + for key, log_pair in markers.items(): + assert len(log_pair) == 2 + assert "Enter" in log_pair[0] + assert "Exit" in log_pair[1] + assert log_pair[0].endswith(f"[{key}]") + assert log_pair[1].endswith(f"[{key}]") + + exception_logs = [log for log in all_logs if "RuntimeError" in log] + for log in exception_logs: + marker_start = log.index("marker[") + marker = log[marker_start + 7 : marker_start + 12] + assert marker.isdigit() + assert marker in markers, "An exception marker has no match with a start/end log" + markers[marker].append(log) + + # There is an equal number test transactions that has exceptions to those that do not + count_exceptions = 0 + count_no_exceptions = 0 + for key, logs in markers.items(): + if len(logs) == 2: + count_no_exceptions += 1 + if len(logs) == 3: + count_exceptions += 1 + assert count_no_exceptions != 0 + assert count_no_exceptions == count_exceptions