From a75be5d8e6b5664e27072cde9d4d994876d7a7b3 Mon Sep 17 00:00:00 2001 From: Joerg Henrichs Date: Thu, 5 Feb 2026 00:26:25 +1100 Subject: [PATCH 1/7] #3318 Only set PSyclone and fparser log levels (not the root log level). --- src/psyclone/generator.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/src/psyclone/generator.py b/src/psyclone/generator.py index 2865f5d77d..c121a23e29 100644 --- a/src/psyclone/generator.py +++ b/src/psyclone/generator.py @@ -621,13 +621,21 @@ def main(arguments): # Set the logging system up. loglevel = LOG_LEVELS[args.log_level] + logger_psyclone = logging.getLogger("psyclone") + logger_fparser = logging.getLogger("fparser") if args.log_file: - logname = args.log_file - logging.basicConfig(filename=logname, - level=loglevel) + handler = logging.FileHandler(args.log_file, mode="a", + encoding="utf-8") + else: - logging.basicConfig(level=loglevel) - logger = logging.getLogger(__name__) + handler = logging.StreamHandler() + + formatter = logging.Formatter('%(name)s - %(levelname)s - %(message)s') + for logger in [logger_fparser, logger_psyclone]: + logger.setLevel(loglevel) + handler.setFormatter(formatter) + logger.addHandler(handler) + logger.debug("Logging system initialised. Level is %s.", args.log_level) # Validate that the given arguments are for the right operation mode From a8e6aada7c53ef4389867f7ed94bc2a72e90ac92 Mon Sep 17 00:00:00 2001 From: Joerg Henrichs Date: Thu, 5 Feb 2026 14:51:13 +1100 Subject: [PATCH 2/7] #3318 Minor code improvements, updated documentation, and fixed all tests. --- doc/developer_guide/coding-style.rst | 20 ++++++++- src/psyclone/generator.py | 19 +++++---- src/psyclone/tests/configuration_test.py | 2 +- src/psyclone/tests/conftest.py | 29 ++++++++++++- src/psyclone/tests/generator_test.py | 8 ++-- src/psyclone/tests/gocean1p0_config_test.py | 2 +- src/psyclone/tests/parse/file_info_test.py | 16 +++---- .../tests/parse/module_manager_test.py | 4 +- .../tests/psyad/adjoint_visitor_test.py | 30 ++++++++----- src/psyclone/tests/psyad/tl2ad_test.py | 6 +-- .../tests/psyir/nodes/extract_node_test.py | 3 +- .../tests/psyir/nodes/omp_directives_test.py | 21 ++++++---- .../tests/psyir/symbols/symbol_table_test.py | 9 ++-- .../tests/psyir/tools/call_tree_utils_test.py | 42 ++++++++++++------- .../tests/psyir/tools/read_write_info_test.py | 3 +- .../parallel_loop_trans_test.py | 6 ++- 16 files changed, 153 insertions(+), 67 deletions(-) diff --git a/doc/developer_guide/coding-style.rst b/doc/developer_guide/coding-style.rst index 86da994d5d..6537870583 100644 --- a/doc/developer_guide/coding-style.rst +++ b/doc/developer_guide/coding-style.rst @@ -167,7 +167,8 @@ options or programmatically with: .. code-block:: python import logging - logging.basicConfig(level=logging.LEVEL) + logger = logging.getLogger("psyclone") + logger.setLevel(logging.LEVEL) where ``logging.LEVEL`` is the required level of those defined in the standard logging library. @@ -200,8 +201,23 @@ standard ``warnings`` import, i.e.: warnings.warn(self._deprecation_warning, DeprecationWarning, 2) Logging should also be explicitly tested through the use of the ``caplog`` -pytest fixture. +pytest fixture. It is important that the expected logger is specified +in the test: if `main` (in `generate.py`) is executed, a handler is attached +to the PSyclone top-level logger. This means that any log event is not +propagated to the Python root level logger, and ``caplog`` without +a logger specified will only test the root logger. This can result +tests passing when executed on their own, but failing randomly when +executed with other tests: these test might call ``main`` (and +therefore prevent the root logger to receive the message), and +``caplog`` will then fail (if no logger is specified). Here the +proper way of testing log messages in a test: +.. code-block:: python + + with caplog.at_level(logging.WARNING, + logger="psyclone.psyir.tools.read_write_info"): + ... + assert "your message here" in caplog.text .. _interface_description: diff --git a/src/psyclone/generator.py b/src/psyclone/generator.py index c121a23e29..cfffaf71ce 100644 --- a/src/psyclone/generator.py +++ b/src/psyclone/generator.py @@ -620,21 +620,26 @@ def main(arguments): args = parser.parse_args(arguments) # Set the logging system up. - loglevel = LOG_LEVELS[args.log_level] - logger_psyclone = logging.getLogger("psyclone") - logger_fparser = logging.getLogger("fparser") if args.log_file: handler = logging.FileHandler(args.log_file, mode="a", encoding="utf-8") - else: handler = logging.StreamHandler() - formatter = logging.Formatter('%(name)s - %(levelname)s - %(message)s') + # We set both top-level loggers for PSyclone and fparser to + # the same configuration: + logger_psyclone = logging.getLogger('psyclone') + logger_fparser = logging.getLogger('fparser') + formatter = logging.Formatter('%(levelname)s:%(name)s:%(message)s') for logger in [logger_fparser, logger_psyclone]: - logger.setLevel(loglevel) + logger.setLevel(LOG_LEVELS[args.log_level]) handler.setFormatter(formatter) - logger.addHandler(handler) + # Certain tests call main several times, which would add handlers + # over and over. Only attach a handler once: + if len(logger.handlers) == 0: + logger.addHandler(handler) + + logger = logging.getLogger(__name__) logger.debug("Logging system initialised. Level is %s.", args.log_level) diff --git a/src/psyclone/tests/configuration_test.py b/src/psyclone/tests/configuration_test.py index 171b006c75..babe095399 100644 --- a/src/psyclone/tests/configuration_test.py +++ b/src/psyclone/tests/configuration_test.py @@ -633,7 +633,7 @@ def test_deprecated_access_mapping(tmpdir, caplog): ("[lfric]\naccess_mapping = gh_read: read, gh_write: write, " "gh_readwrite: readwrite,\n gh_inc: inc, " "gh_sum: sum\n")) - with caplog.at_level(logging.WARN): + with caplog.at_level(logging.WARN, logger="psyclone.configuration"): _ = get_config(config_file, content) assert "Configuration file contains an ACCESS_MAPPING entry" in caplog.text diff --git a/src/psyclone/tests/conftest.py b/src/psyclone/tests/conftest.py index f31d0dcfce..7d40c4afd9 100644 --- a/src/psyclone/tests/conftest.py +++ b/src/psyclone/tests/conftest.py @@ -39,8 +39,11 @@ ''' Module which performs pytest set-up so that we can specify command-line options. Also creates certain test fixtures. ''' -import os import copy +import logging +import os +import sys + import pytest from fparser.two.parser import ParserFactory @@ -94,6 +97,30 @@ def have_graphviz(): # pragma: no-cover return True +@pytest.fixture(scope="function", autouse=True) +def setup_logging(): + """ + This fixture sets up logging the same way `main` does. + Using this ensures that any caplog tests specify the expected + logger. If `main` is executed, a handler is attached to + the psyclone top-level logger, which means the messages are not + propagated to the python root logger, and caplog `at_level(x)` + will fail (since it only tests the root logger, which is not called). + This can result in inconsistent test failures depending on order + of test execution: test will pass if `main` has not been executed + by another test previously (since no handler is then attached), + and will fail if `main` has been executed previously. + The proper way of testing with caplog is: + caplog.at_level(x, logger="psyclone.module.module...") + """ + + logger_psyclone = logging.getLogger('psyclone') + handler = logging.StreamHandler() + logger_psyclone.addHandler(handler) + # Disable the logger, which is the default + logger_psyclone.setLevel(sys.maxsize) + + @pytest.fixture(scope="session", autouse=True) def setup_psyclone_config(): '''This per session fixture defines the environment variable diff --git a/src/psyclone/tests/generator_test.py b/src/psyclone/tests/generator_test.py index 75b08a6836..8066986e63 100644 --- a/src/psyclone/tests/generator_test.py +++ b/src/psyclone/tests/generator_test.py @@ -485,7 +485,7 @@ def _broken(_1, _2): raise ValueError("This is a test") monkeypatch.setattr(FortranReader, "psyir_from_file", _broken) - with caplog.at_level(logging.ERROR): + with caplog.at_level(logging.ERROR, logger="psyclone.generator"): with pytest.raises(SystemExit): _ = generate( os.path.join(BASE_PATH, "gocean1p0", "single_invoke.f90"), @@ -899,7 +899,7 @@ def test_keep_comments_and_keep_directives(capsys, caplog, tmpdir_factory): """ assert output == correct - with caplog.at_level(logging.WARNING): + with caplog.at_level(logging.WARNING, logger="psyclone.generator"): main([filename, "--keep-directives"]) assert ("keep_directives requires keep_comments so " "PSyclone enabled keep_comments." in caplog.text) @@ -1296,7 +1296,7 @@ def test_code_transformation_fixed_form(tmpdir, capsys, caplog): caplog.clear() # Check an unknown file extension gives a log message and fails for a # fixed form input. - with caplog.at_level(logging.INFO): + with caplog.at_level(logging.INFO, logger="psyclone.generator"): inputfile = str(tmpdir.join("fixed_form.1s2")) with open(inputfile, "w", encoding='utf-8') as my_file: my_file.write(code) @@ -1358,7 +1358,7 @@ def test_code_transformation_parse_failure(tmpdir, caplog, capsys): inputfile = str(tmpdir.join("funny_syntax.f90")) with open(inputfile, "w", encoding='utf-8') as my_file: my_file.write(code) - with caplog.at_level(logging.ERROR): + with caplog.at_level(logging.ERROR, logger="psyclone.generator"): with pytest.raises(SystemExit): code_transformation_mode(inputfile, None, None, False, False, False) diff --git a/src/psyclone/tests/gocean1p0_config_test.py b/src/psyclone/tests/gocean1p0_config_test.py index d64b45cc3f..ec0f1f4e4a 100644 --- a/src/psyclone/tests/gocean1p0_config_test.py +++ b/src/psyclone/tests/gocean1p0_config_test.py @@ -88,7 +88,7 @@ def test_command_line(capsys, caplog): GOLoop._bounds_lookup = {} # Check that --config with a parameter is accepted but logs a warning # about the deprecated access_mapping entry. - with caplog.at_level(logging.WARN): + with caplog.at_level(logging.WARN, logger="psyclone.configuration"): main(options+["--config", config_file, f90_file]) assert ("Configuration file contains an ACCESS_MAPPING entry. This is " "deprecated" in caplog.text) diff --git a/src/psyclone/tests/parse/file_info_test.py b/src/psyclone/tests/parse/file_info_test.py index 0c53579c90..662a9cf4e3 100644 --- a/src/psyclone/tests/parse/file_info_test.py +++ b/src/psyclone/tests/parse/file_info_test.py @@ -84,7 +84,7 @@ def test_file_info_missing_file(caplog): """ finfo = FileInfo("missing.txt") - with caplog.at_level(logging.INFO): + with caplog.at_level(logging.INFO, logger="psyclone.parse.file_info"): with pytest.raises(FileNotFoundError) as err: _ = finfo.get_source_code() assert "Source file 'missing.txt': loading source code" in caplog.text @@ -101,7 +101,7 @@ def test_file_info_cached_source_code(tmpdir, caplog): with open(fname, "w", encoding="utf-8") as fout: fout.write(content) finfo = FileInfo(fname, cache_active=True) - with caplog.at_level(logging.INFO): + with caplog.at_level(logging.INFO, logger="psyclone.parse.file_info"): input1 = finfo.get_source_code() assert input1 == content assert "a_file.txt': loaded OK" in caplog.text @@ -116,14 +116,14 @@ def test_file_info_cached_source_code(tmpdir, caplog): assert finfo._cache_data_save is None # Load fparser tree to start caching - with caplog.at_level(logging.INFO): + with caplog.at_level(logging.INFO, logger="psyclone.parse.file_info"): finfo.get_fparser_tree() assert finfo._cache_data_save is not None assert "No cache file '" in caplog.text assert "Cache file updated with hashsum " in caplog.text caplog.clear() - with caplog.at_level(logging.INFO): + with caplog.at_level(logging.INFO, logger="psyclone.parse.file_info"): finfo = FileInfo(fname, cache_active=True) input1 = finfo.get_fparser_tree() assert finfo._cache_data_load is not None @@ -285,7 +285,7 @@ def test_file_info_load_from_cache_corrupted(tmpdir, caplog): assert file_info._cache_data_save is None # Load with damaged cache file - with caplog.at_level(logging.INFO): + with caplog.at_level(logging.INFO, logger="psyclone.parse.file_info"): psyir_node = file_info.get_psyir() assert isinstance(psyir_node, Node) assert "Error while reading cache file - ignoring: " in caplog.text @@ -353,7 +353,7 @@ def test_file_info_source_changed(tmpdir, caplog): file_info: FileInfo = FileInfo(filename, cache_active=True) # Load, but not from cache - with caplog.at_level(logging.INFO): + with caplog.at_level(logging.INFO, logger="psyclone.parse.file_info"): psyir_node = file_info.get_psyir() assert "Cache hashsum mismatch: source " in caplog.text assert "Cache file updated with hashsum " in caplog.text @@ -415,7 +415,7 @@ def test_file_info_cachefile_not_writable(tmpdir, caplog): # If the psyir, hence, fparser tree is requested, creating # the cache will fail, but the psyir node itself will # still be returned. - with caplog.at_level(logging.WARN): + with caplog.at_level(logging.WARN, logger="psyclone.parse.file_info"): psyir_node = file_info.get_psyir() assert isinstance(psyir_node, Node) assert "Unable to write to cache file: " in caplog.text @@ -438,7 +438,7 @@ def fun_exception(a, b): monkeypatch.setattr("pickle.dump", fun_exception) - with caplog.at_level(logging.WARN): + with caplog.at_level(logging.WARN, logger="psyclone.parse.file_info"): psyir_node = file_info.get_psyir() assert isinstance(psyir_node, Node) assert "Error while storing cache data - ignoring:" in caplog.text diff --git a/src/psyclone/tests/parse/module_manager_test.py b/src/psyclone/tests/parse/module_manager_test.py index a2fdcddbc1..57a1f2d8af 100644 --- a/src/psyclone/tests/parse/module_manager_test.py +++ b/src/psyclone/tests/parse/module_manager_test.py @@ -507,13 +507,13 @@ def test_mod_manager_load_all_module_trigger_error_module_read_twice( mod_man.add_files("d1/a_mod.f90") # Load all module infos - with caplog.at_level(logging.INFO): + with caplog.at_level(logging.INFO, logger="psyclone.parse.module_manager"): mod_man.load_all_module_infos() assert "Loading module information for file 'd1/a_mod.f90'" in caplog.text # Doing this a 2nd time should not raise any error - with caplog.at_level(logging.INFO): + with caplog.at_level(logging.INFO, logger="psyclone.parse.module_manager"): mod_man.load_all_module_infos() assert "Module 'a_mod' already processed" in caplog.text diff --git a/src/psyclone/tests/psyad/adjoint_visitor_test.py b/src/psyclone/tests/psyad/adjoint_visitor_test.py index 20b70df173..3c41799e6a 100644 --- a/src/psyclone/tests/psyad/adjoint_visitor_test.py +++ b/src/psyclone/tests/psyad/adjoint_visitor_test.py @@ -175,10 +175,12 @@ def test_create_container_logger(caplog): ''' tangent_linear = FileContainer("blah") adj_visitor = AdjointVisitor(["dummy"]) - with caplog.at_level(logging.INFO): + with caplog.at_level(logging.INFO, + logger="psyclone.psyad.adjoint_visitor"): _ = adj_visitor.container_node(tangent_linear) assert caplog.text == "" - with caplog.at_level(logging.DEBUG): + with caplog.at_level(logging.DEBUG, + logger="psyclone.psyad.adjoint_visitor"): _ = adj_visitor._visit(tangent_linear) assert "Copying Container" in caplog.text @@ -221,10 +223,12 @@ def test_create_schedule_logger(caplog, fortran_reader): tl_schedule = tl_psyir.children[0] assert isinstance(tl_schedule, Schedule) adj_visitor = AdjointVisitor(["a", "b", "c"]) - with caplog.at_level(logging.INFO): + with caplog.at_level(logging.INFO, + logger="psyclone.psyad.adjoint_visitor"): _ = adj_visitor.schedule_node(tl_schedule) assert caplog.text == "" - with caplog.at_level(logging.DEBUG): + with caplog.at_level(logging.DEBUG, + logger="psyclone.psyad.adjoint_visitor"): _ = adj_visitor._visit(tl_schedule) assert "Transforming Schedule" in caplog.text assert "Zero-ing any local active variables" in caplog.text @@ -522,10 +526,12 @@ def test_assignment_node_logger(caplog, fortran_reader): adj_visitor = AdjointVisitor(["a", "b", "c"]) # set up self._active_variables _ = adj_visitor._visit(tl_psyir) - with caplog.at_level(logging.INFO): + with caplog.at_level(logging.INFO, + logger="psyclone.psyad.adjoint_visitor"): _ = adj_visitor.assignment_node(assignment) assert caplog.text == "" - with caplog.at_level(logging.DEBUG): + with caplog.at_level(logging.DEBUG, + logger="psyclone.psyad.adjoint_visitor"): _ = adj_visitor.assignment_node(assignment) assert "Transforming active assignment" in caplog.text @@ -741,10 +747,12 @@ def test_loop_logger(fortran_reader, caplog): _ = adj_visitor._visit(tl_psyir) # active loop - with caplog.at_level(logging.INFO): + with caplog.at_level(logging.INFO, + logger="psyclone.psyad.adjoint_visitor"): _ = adj_visitor.loop_node(tl_loop) assert caplog.text == "" - with caplog.at_level(logging.DEBUG): + with caplog.at_level(logging.DEBUG, + logger="psyclone.psyad.adjoint_visitor"): _ = adj_visitor.loop_node(tl_loop) assert "Transforming active loop" in caplog.text @@ -808,10 +816,12 @@ def test_ifblock_logger(fortran_reader, caplog): tl_psyir = fortran_reader.psyir_from_source(TL_IF_CODE) adj_visitor = AdjointVisitor(["d", "e"]) - with caplog.at_level(logging.INFO): + with caplog.at_level(logging.INFO, + logger="psyclone.psyad.adjoint_visitor"): _ = adj_visitor._visit(tl_psyir) assert "Transforming active ifblock" not in caplog.text - with caplog.at_level(logging.DEBUG): + with caplog.at_level(logging.DEBUG, + logger="psyclone.psyad.adjoint_visitor"): _ = adj_visitor._visit(tl_psyir) assert "Transforming active ifblock" in caplog.text diff --git a/src/psyclone/tests/psyad/tl2ad_test.py b/src/psyclone/tests/psyad/tl2ad_test.py index 3736c1abb2..38ef516378 100644 --- a/src/psyclone/tests/psyad/tl2ad_test.py +++ b/src/psyclone/tests/psyad/tl2ad_test.py @@ -101,7 +101,7 @@ def test_generate_adjoint_str(caplog, tmpdir): assert expected in result assert test_harness == "" - with caplog.at_level(logging.DEBUG): + with caplog.at_level(logging.DEBUG, logger="psyclone.psyad.tl2ad"): result, test_harness = generate_adjoint_str(tl_code, ["a", "b"]) assert tl_code in caplog.text @@ -202,7 +202,7 @@ def test_generate_adjoint_str_trans(tmpdir): assert Compile(tmpdir).string_compiles(result) -def test_generate_adjoint_str_trans_error(tmpdir): +def test_generate_adjoint_str_trans_error(): '''Test that the generate_adjoint_str() function successfully catches an error from the preprocess_trans() function. @@ -298,7 +298,7 @@ def test_generate_adjoint_str_generate_harness_logging(caplog): with caplog.at_level(logging.INFO): _ = generate_adjoint_str(tl_code, ["field"], create_test=True) assert caplog.text == "" - with caplog.at_level(logging.DEBUG): + with caplog.at_level(logging.DEBUG, logger="psyclone.psyad.tl2ad"): _, harness = generate_adjoint_str(tl_code, ["field"], create_test=True) if not caplog.text: diff --git a/src/psyclone/tests/psyir/nodes/extract_node_test.py b/src/psyclone/tests/psyir/nodes/extract_node_test.py index 47c0442898..ec3f69335c 100644 --- a/src/psyclone/tests/psyir/nodes/extract_node_test.py +++ b/src/psyclone/tests/psyir/nodes/extract_node_test.py @@ -611,7 +611,8 @@ def test_extract_ignore_wrong_vars(fortran_reader: FortranReader, # ---------------------------------------- monkeypatch.setattr(ExtractNode, "get_ignored_variables", lambda self: [("", "xx")]) - with caplog.at_level(logging.WARNING): + with caplog.at_level(logging.WARNING, + logger="psyclone.psyir.tools.read_write_info"): code = fortran_writer(psyir) assert 'PreStart("test", "test-r0", 3, 2)' in code diff --git a/src/psyclone/tests/psyir/nodes/omp_directives_test.py b/src/psyclone/tests/psyir/nodes/omp_directives_test.py index be60a40ba7..1070b1baf7 100644 --- a/src/psyclone/tests/psyir/nodes/omp_directives_test.py +++ b/src/psyclone/tests/psyir/nodes/omp_directives_test.py @@ -146,7 +146,8 @@ def test_ompparallel_lowering(fortran_reader, monkeypatch, caplog): a_sym = Symbol("a") monkeypatch.setattr(pdir, "infer_sharing_attributes", lambda: ({}, {}, {a_sym})) - with caplog.at_level(logging.WARNING): + with caplog.at_level(logging.WARNING, + logger="psyclone.psyir.nodes.omp_directives"): pdir.lower_to_language_level() assert ("Lowering 'OMPParallelDirective' detected a possible race " "condition for symbol 'a'. Make sure this is a false WaW " @@ -238,7 +239,8 @@ def test_omp_parallel_do_lowering(fortran_reader, monkeypatch, caplog): # Monkeypatch a case with shared variables that need synchronisation monkeypatch.setattr(pdir, "infer_sharing_attributes", lambda: ({}, {}, {Symbol("a")})) - with caplog.at_level(logging.WARNING): + with caplog.at_level(logging.WARNING, + logger="psyclone.psyir.nodes.omp_directives"): pdir.lower_to_language_level() assert ("Lowering 'OMPParallelDoDirective' detected a possible race " "condition for symbol 'a'. Make sure this is a false WaW " @@ -5064,7 +5066,8 @@ def test_multiple_reduction_same_var_diff_op(fortran_reader, fortran_writer, end function''') omplooptrans = OMPLoopTrans(omp_directive="paralleldo") loop = psyir.walk(Loop)[0] - with caplog.at_level(logging.INFO): + with caplog.at_level(logging.INFO, + logger="psyclone.psyir.tools.reduction_inference"): with pytest.raises(TransformationError) as err: omplooptrans.apply(loop, enable_reductions=True) assert ("Variable 'acc' is read first, which indicates a reduction" @@ -5118,7 +5121,8 @@ def test_non_reduction1(fortran_reader, fortran_writer, caplog): end function''') omplooptrans = OMPLoopTrans(omp_directive="paralleldo") loop = psyir.walk(Loop)[0] - with caplog.at_level(logging.INFO): + with caplog.at_level(logging.INFO, + logger="psyclone.psyir.tools.reduction_inference"): with pytest.raises(TransformationError) as err: omplooptrans.apply(loop, enable_reductions=True) assert ("Variable 'count' is read first, which indicates a reduction" @@ -5144,7 +5148,8 @@ def test_non_reduction2(fortran_reader, fortran_writer, caplog): end function''') omplooptrans = OMPLoopTrans(omp_directive="paralleldo") loop = psyir.walk(Loop)[0] - with caplog.at_level(logging.INFO): + with caplog.at_level(logging.INFO, + logger="psyclone.psyir.tools.reduction_inference"): with pytest.raises(TransformationError) as err: omplooptrans.apply(loop, enable_reductions=True) assert ("Variable 'count' is read first, which indicates a reduction" @@ -5170,7 +5175,8 @@ def test_non_reduction3(fortran_reader, fortran_writer, caplog): end function''') omplooptrans = OMPLoopTrans(omp_directive="paralleldo") loop = psyir.walk(Loop)[0] - with caplog.at_level(logging.INFO): + with caplog.at_level(logging.INFO, + logger="psyclone.psyir.tools.reduction_inference"): with pytest.raises(TransformationError) as err: omplooptrans.apply(loop, enable_reductions=True) assert ("Variable 'count' is read first, which indicates a reduction" @@ -5263,7 +5269,8 @@ def test_reduction_struct_member(fortran_reader, fortran_writer, caplog): end function''') omplooptrans = OMPLoopTrans(omp_directive="paralleldo") loop = psyir.walk(Loop)[0] - with caplog.at_level(logging.INFO): + with caplog.at_level(logging.INFO, + logger="psyclone.psyir.tools.reduction_inference"): with pytest.raises(TransformationError) as err: omplooptrans.apply(loop, enable_reductions=True) assert ("Variable 'struct%acc' is read first, which indicates a reduction" diff --git a/src/psyclone/tests/psyir/symbols/symbol_table_test.py b/src/psyclone/tests/psyir/symbols/symbol_table_test.py index 8870be3a10..2b0f1ffc77 100644 --- a/src/psyclone/tests/psyir/symbols/symbol_table_test.py +++ b/src/psyclone/tests/psyir/symbols/symbol_table_test.py @@ -2945,7 +2945,8 @@ def test_resolve_imports(fortran_reader, tmp_path, monkeypatch, caplog): assert not isinstance(b_1, symbols.DataSymbol) # Resolve only 'not_used3' from wildcard imports - with caplog.at_level(logging.INFO): + with caplog.at_level(logging.INFO, + logger="psyclone.psyir.symbols.symbol_table"): subroutine.symbol_table.resolve_imports( symbol_target=symbols.Symbol('not_used3')) not_used3 = subroutine.symbol_table.lookup('not_used3') @@ -3001,7 +3002,8 @@ def test_resolve_imports(fortran_reader, tmp_path, monkeypatch, caplog): # Now resolve all found containers (this will not fail for the # unavailable c_mod, but it will be logged) - with caplog.at_level(logging.WARNING): + with caplog.at_level(logging.WARNING, + logger="psyclone.psyir.symbols.symbol_table"): subroutine.symbol_table.resolve_imports() assert "Module 'c_mod' not found" in caplog.text @@ -3042,7 +3044,8 @@ def test_resolve_imports_missing_container(monkeypatch, caplog): monkeypatch.setattr(csym, "find_container_psyir", lambda local_node: None) table.add(csym) # Resolving imports should run without problems, but log a Warning. - with caplog.at_level(logging.WARNING): + with caplog.at_level(logging.WARNING, + logger="psyclone.psyir.symbols.symbol_table"): table.resolve_imports() assert "Module 'a_mod' not found" in caplog.text diff --git a/src/psyclone/tests/psyir/tools/call_tree_utils_test.py b/src/psyclone/tests/psyir/tools/call_tree_utils_test.py index b6e2df332f..039904a32c 100644 --- a/src/psyclone/tests/psyir/tools/call_tree_utils_test.py +++ b/src/psyclone/tests/psyir/tools/call_tree_utils_test.py @@ -283,7 +283,8 @@ def test_get_non_local_read_write_info(caplog): # Since the right search path is missing, this will result # in the testkern_import_symbols_mod module not being found: read_write_info = ReadWriteInfo() - with caplog.at_level(logging.WARNING): + with caplog.at_level(logging.WARNING, + logger="psyclone.psyir.tools.call_tree_utils"): rw_info = ctu.get_non_local_read_write_info(schedule, read_write_info) assert ("Could not find module 'testkern_import_symbols_mod' - ignored." in caplog.text) @@ -304,7 +305,8 @@ def test_get_non_local_read_write_info(caplog): # infrastructure directory has not been added, so constants_mod cannot # be found: rw_info = ReadWriteInfo() - with caplog.at_level(logging.WARNING): + with caplog.at_level(logging.WARNING, + logger="psyclone.psyir.tools.call_tree_utils"): ctu.get_non_local_read_write_info(schedule, rw_info) assert "Unknown routine 'unknown_subroutine - ignored." in caplog.text assert ("Cannot find module 'constants_mod' - ignoring unknown symbol " @@ -331,7 +333,8 @@ def test_get_non_local_read_write_info(caplog): mod_man.add_ignore_module("constants_mod") rw_info = ReadWriteInfo() caplog.clear() - with caplog.at_level(logging.WARNING): + with caplog.at_level(logging.WARNING, + logger="psyclone.psyir.tools.call_tree_utils"): ctu.get_non_local_read_write_info(schedule, rw_info) assert "Unknown routine 'unknown_subroutine - ignored." in caplog.text assert "constants_mod" not in caplog.text @@ -360,7 +363,8 @@ def test_get_non_local_read_write_info_errors(caplog): routine.detach() rw_info = ReadWriteInfo() - with caplog.at_level(logging.WARNING): + with caplog.at_level(logging.WARNING, + logger="psyclone.psyir.tools.call_tree_utils"): ctu.get_non_local_read_write_info(schedule, rw_info) assert (f"Could not get PSyIR for Routine 'testkern_import_symbols_code' " f"from module '{kernels[0].module_name}' as no possible" @@ -370,14 +374,16 @@ def test_get_non_local_read_write_info_errors(caplog): # representing the routine. cntr.symbol_table.add(RoutineSymbol("testkern_import_symbols_code")) rw_info = ReadWriteInfo() - with caplog.at_level(logging.WARNING): + with caplog.at_level(logging.WARNING, + logger="psyclone.psyir.tools.call_tree_utils"): ctu.get_non_local_read_write_info(schedule, rw_info) assert (f"Could not get PSyIR for Routine 'testkern_import_symbols_code' " f"from module '{kernels[0].module_name}' -" in caplog.text) # Remove the module Container from the PSyIR. cntr.detach() - with caplog.at_level(logging.WARNING): + with caplog.at_level(logging.WARNING, + logger="psyclone.psyir.tools.call_tree_utils"): ctu.get_non_local_read_write_info(schedule, rw_info) assert (f"Could not get PSyIR for module '{kernels[0].module_name}'" in caplog.text) @@ -404,7 +410,8 @@ def test_call_tree_utils_resolve_calls_unknowns(caplog): None)] ctu = CallTreeUtils() rw_info = ReadWriteInfo() - with caplog.at_level(logging.WARNING): + with caplog.at_level(logging.WARNING, + logger="psyclone.psyir.tools.call_tree_utils"): ctu._resolve_calls_and_unknowns(todo, rw_info) assert "Cannot find module 'unknown_module' - ignored." in caplog.text assert rw_info.read_list == [] @@ -423,7 +430,8 @@ def test_call_tree_utils_resolve_calls_unknowns(caplog): # Now try to find a routine that does not exist in an existing module: todo = [('routine', 'module_with_var_mod', Signature("does-not-exist"), None)] - with caplog.at_level(logging.WARNING): + with caplog.at_level(logging.WARNING, + logger="psyclone.psyir.tools.call_tree_utils"): ctu._resolve_calls_and_unknowns(todo, rw_info) assert ("Cannot resolve routine 'does-not-exist' in module " "'module_with_var_mod' - ignored." in caplog.text) @@ -457,7 +465,8 @@ def test_call_tree_utils_resolve_calls_unknowns(caplog): cntr.symbol_table.add(RoutineSymbol("module_subroutine")) todo = [('routine', 'module_with_var_mod', Signature("module_subroutine"), info)] - with caplog.at_level(logging.WARNING): + with caplog.at_level(logging.WARNING, + logger="psyclone.psyir.tools.call_tree_utils"): ctu._resolve_calls_and_unknowns(todo, rw_info) assert ("Cannot find routine 'module_subroutine' in module " "'module_with_var_mod' - ignored" in caplog.text) @@ -468,13 +477,15 @@ def test_call_tree_utils_resolve_calls_unknowns(caplog): cntr.symbol_table.remove(rsym) todo = [('unknown', 'module_with_var_mod', Signature("module_subroutine"), info)] - with caplog.at_level(logging.WARNING): + with caplog.at_level(logging.WARNING, + logger="psyclone.psyir.tools.call_tree_utils"): ctu._resolve_calls_and_unknowns(todo, rw_info) assert "Cannot find symbol 'module_subroutine'." in caplog.text todo = [('routine', 'module_with_var_mod', Signature("module_subroutine"), info)] - with caplog.at_level(logging.WARNING): + with caplog.at_level(logging.WARNING, + logger="psyclone.psyir.tools.call_tree_utils"): ctu._resolve_calls_and_unknowns(todo, rw_info) assert ("Cannot resolve routine 'module_subroutine' in module " "'module_with_var_mod' - ignored" in caplog.text) @@ -483,7 +494,8 @@ def test_call_tree_utils_resolve_calls_unknowns(caplog): cntr.detach() todo = [('unknown', 'module_with_var_mod', Signature("module_subroutine"), info)] - with caplog.at_level(logging.WARNING): + with caplog.at_level(logging.WARNING, + logger="psyclone.psyir.tools.call_tree_utils"): ctu._resolve_calls_and_unknowns(todo, rw_info) assert ("Cannot get PSyIR for module 'module_with_var_mod' - ignoring " "unknown symbol 'module_subroutine'" in caplog.text) @@ -623,7 +635,8 @@ def testcall_tree_utils_non_local_inout_parameters(caplog): # The example does contain an unknown subroutine (by design), and the # infrastructure directory has not been added, so constants_mod cannot # be found: - with caplog.at_level(logging.WARNING): + with caplog.at_level(logging.WARNING, + logger="psyclone.psyir.tools.call_tree_utils"): rw_info = ctu.get_in_out_parameters(schedule, collect_non_local_symbols=True) assert "Unknown routine 'unknown_subroutine - ignored." in caplog.text @@ -656,7 +669,8 @@ def test_call_tree_error_var_not_found(caplog): read_write_info = ReadWriteInfo() ctu = CallTreeUtils() sva = AccessSequence(Signature("a")) - with caplog.at_level(logging.WARNING): + with caplog.at_level(logging.WARNING, + logger="psyclone.psyir.tools.call_tree_utils"): ctu._resolve_calls_and_unknowns([("unknown", "constants_mod", Signature("does_not_exist"), sva)], diff --git a/src/psyclone/tests/psyir/tools/read_write_info_test.py b/src/psyclone/tests/psyir/tools/read_write_info_test.py index ff1d8c5499..04d0888229 100644 --- a/src/psyclone/tests/psyir/tools/read_write_info_test.py +++ b/src/psyclone/tests/psyir/tools/read_write_info_test.py @@ -148,7 +148,8 @@ def test_remove_var(caplog) -> None: # sig_b must have the module name specified, # otherwise it must not be removed, and a warning must be logged: - with caplog.at_level(logging.WARNING): + with caplog.at_level(logging.WARNING, + logger="psyclone.psyir.tools.read_write_info"): rwi.remove(sig_b) assert ("Variable 'b' is to be removed from ReadWriteInfo, but it's " "neither in the list of read variables" in caplog.text) diff --git a/src/psyclone/tests/psyir/transformations/parallel_loop_trans_test.py b/src/psyclone/tests/psyir/transformations/parallel_loop_trans_test.py index 48f6593c63..4374ef8dd6 100644 --- a/src/psyclone/tests/psyir/transformations/parallel_loop_trans_test.py +++ b/src/psyclone/tests/psyir/transformations/parallel_loop_trans_test.py @@ -37,9 +37,10 @@ ''' pytest tests for the parallel_loop_trans module. ''' -import pytest import logging +import pytest + from psyclone.psyir.symbols import INTEGER_TYPE from psyclone.psyir.nodes import ( Assignment, IfBlock, Loop, OMPParallelDoDirective, Routine, WhileLoop, @@ -660,7 +661,8 @@ def test_paralooptrans_with_array_privatisation(fortran_reader, # (and it permits valid loops - in case we have a bulk list of symbols for # all the code, but it will log the symbols not found) caplog.clear() - with caplog.at_level(logging.WARNING): + with caplog.at_level(logging.WARNING, + logger="psyclone.psyir.transformations"): trans.apply(loop, {"privatise_arrays": True, "force_private": ["ztmp2", "symbol_not_in_loop"]}) assert ("!$omp parallel do default(shared) private(ji,jj,ztmp) " From a8b6ff83c266c51af3e74faa8e397dfec6092336 Mon Sep 17 00:00:00 2001 From: Joerg Henrichs Date: Thu, 5 Feb 2026 15:25:51 +1100 Subject: [PATCH 3/7] #3318 Updated documentation (remove that psyclone logging options will affect user scripts). --- doc/user_guide/psyclone_command.rst | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/doc/user_guide/psyclone_command.rst b/doc/user_guide/psyclone_command.rst index 7b867b94f9..7f1f5046fd 100644 --- a/doc/user_guide/psyclone_command.rst +++ b/doc/user_guide/psyclone_command.rst @@ -521,8 +521,7 @@ Enabling the Logging Infrastructure ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PSyclone supports logging which can provide additional information -on what is happening inside PSyclone. This logging will also -control the behaviour of any logging calls inside a user script. +on what is happening inside PSyclone. Logging output can be controlled through the ``--log-level`` option. By default, logging is set to ``OFF``, which means From 3b7671724cded4582d051996f156e5b0bab7f84b Mon Sep 17 00:00:00 2001 From: Joerg Henrichs Date: Thu, 5 Feb 2026 15:47:01 +1100 Subject: [PATCH 4/7] #3318 Updated tests to cover all new lines of code. --- src/psyclone/generator.py | 3 +- src/psyclone/tests/conftest.py | 5 +++ src/psyclone/tests/generator_test.py | 64 +++++++++++++++++----------- 3 files changed, 46 insertions(+), 26 deletions(-) diff --git a/src/psyclone/generator.py b/src/psyclone/generator.py index cfffaf71ce..75132c3930 100644 --- a/src/psyclone/generator.py +++ b/src/psyclone/generator.py @@ -635,7 +635,8 @@ def main(arguments): logger.setLevel(LOG_LEVELS[args.log_level]) handler.setFormatter(formatter) # Certain tests call main several times, which would add handlers - # over and over. Only attach a handler once: + # over and over (which results in duplicated messages). + # Only attach a handler once: if len(logger.handlers) == 0: logger.addHandler(handler) diff --git a/src/psyclone/tests/conftest.py b/src/psyclone/tests/conftest.py index 7d40c4afd9..67fe127022 100644 --- a/src/psyclone/tests/conftest.py +++ b/src/psyclone/tests/conftest.py @@ -115,6 +115,11 @@ def setup_logging(): """ logger_psyclone = logging.getLogger('psyclone') + # In case that some tests have added handlers - remove them all + while logger_psyclone.handlers: + logger_psyclone.removeHandler(logger_psyclone.handlers[0]) + + # Then add exactly one handler (which is done what happens in main): handler = logging.StreamHandler() logger_psyclone.addHandler(handler) # Disable the logger, which is the default diff --git a/src/psyclone/tests/generator_test.py b/src/psyclone/tests/generator_test.py index 8066986e63..df1e536f43 100644 --- a/src/psyclone/tests/generator_test.py +++ b/src/psyclone/tests/generator_test.py @@ -786,7 +786,45 @@ def test_main_invalid_api(capsys): assert output == expected_output -def test_main_api(capsys, caplog): +def test_main_logger(capsys, caplog): + """ + Test the setup of the logger. + """ + + # The conftest `setup_logging` fixture will add a handler to the + # PSyclone top-level logger - meaning the corresponding line in + # generator.py is not executed. Remove the handler here so we + # trigger adding a handler in generator.py + logger = logging.getLogger("psyclone") + logger.removeHandler(logger.handlers[0]) + + filename = os.path.join(NEMO_BASE_PATH, "explicit_do_long_line.f90") + # Give invalid logging level + # Reset capsys + capsys.readouterr() + with pytest.raises(SystemExit): + main([filename, "-api", "lfric", "--log-level", "fail"]) + _, err = capsys.readouterr() + # Error message check truncated as Python 3.13 changes how the + # array is output. + assert ("error: argument --log-level: invalid choice: 'fail'" + in err) + + # Test we get the logging debug correctly with caplog. This + # overrides the file output that PSyclone attempts. + caplog.clear() + # Pytest fully controls the logging level, overriding anything we + # set in generator.main so we can't test for it. + with caplog.at_level(logging.DEBUG): + main([filename, "-api", "dynamo0.3", "--log-level", "DEBUG", + "--log-file", "test.out"]) + assert Config.get().api == "lfric" + assert caplog.records[0].levelname == "DEBUG" + assert ("Logging system initialised. Level is DEBUG." in + caplog.record_tuples[0][2]) + + +def test_main_api(): ''' Test that the API can be set by a command line parameter, also using the API name aliases. ''' @@ -816,30 +854,6 @@ def test_main_api(capsys, caplog): main([filename, "-api", "dynamo0.3"]) assert Config.get().api == "lfric" - # Give invalid logging level - # Reset capsys - capsys.readouterr() - with pytest.raises(SystemExit): - main([filename, "-api", "dynamo0.3", "--log-level", "fail"]) - _, err = capsys.readouterr() - # Error message check truncated as Python 3.13 changes how the - # array is output. - assert ("error: argument --log-level: invalid choice: 'fail'" - in err) - - # Test we get the logging debug correctly with caplog. This - # overrides the file output that PSyclone attempts. - caplog.clear() - # Pytest fully controls the logging level, overriding anything we - # set in generator.main so we can't test for it. - with caplog.at_level(logging.DEBUG): - main([filename, "-api", "dynamo0.3", "--log-level", "DEBUG", - "--log-file", "test.out"]) - assert Config.get().api == "lfric" - assert caplog.records[0].levelname == "DEBUG" - assert ("Logging system initialised. Level is DEBUG." in - caplog.record_tuples[0][2]) - def test_keep_comments_and_keep_directives(capsys, caplog, tmpdir_factory): ''' Test the keep comments and keep directives arguments to main. ''' From 2552716f3747d2eee2884b9ad6f3f87dc893d27d Mon Sep 17 00:00:00 2001 From: Joerg Henrichs Date: Mon, 16 Feb 2026 12:58:10 +1100 Subject: [PATCH 5/7] #3318 Addressed issues raised in review. --- doc/developer_guide/coding-style.rst | 21 ++------- doc/developer_guide/working_practises.rst | 23 ++++++++++ doc/user_guide/psyclone_command.rst | 2 +- src/psyclone/generator.py | 11 ++--- src/psyclone/tests/conftest.py | 4 +- src/psyclone/tests/generator_test.py | 20 +++++---- src/psyclone/tests/parse/file_info_test.py | 18 ++++---- .../tests/psyad/adjoint_visitor_test.py | 32 +++++--------- .../tests/psyir/tools/call_tree_utils_test.py | 44 +++++++------------ 9 files changed, 85 insertions(+), 90 deletions(-) diff --git a/doc/developer_guide/coding-style.rst b/doc/developer_guide/coding-style.rst index 6537870583..11bde449fe 100644 --- a/doc/developer_guide/coding-style.rst +++ b/doc/developer_guide/coding-style.rst @@ -161,7 +161,7 @@ Logging PSyclone supports the Python ``logging`` module, to help report useful information throughout the code. By default the logging infrastructure -is switched off, but can be enabled either through the PSyclone command's +is switched off, but can be enabled either through the PSyclone command-line options or programmatically with: .. code-block:: python @@ -200,24 +200,9 @@ standard ``warnings`` import, i.e.: warnings.warn(self._deprecation_warning, DeprecationWarning, 2) -Logging should also be explicitly tested through the use of the ``caplog`` -pytest fixture. It is important that the expected logger is specified -in the test: if `main` (in `generate.py`) is executed, a handler is attached -to the PSyclone top-level logger. This means that any log event is not -propagated to the Python root level logger, and ``caplog`` without -a logger specified will only test the root logger. This can result -tests passing when executed on their own, but failing randomly when -executed with other tests: these test might call ``main`` (and -therefore prevent the root logger to receive the message), and -``caplog`` will then fail (if no logger is specified). Here the -proper way of testing log messages in a test: +Care must be taken when testing logging calls added to PSyclone. See +:ref:`logging_testing` for details. -.. code-block:: python - - with caplog.at_level(logging.WARNING, - logger="psyclone.psyir.tools.read_write_info"): - ... - assert "your message here" in caplog.text .. _interface_description: diff --git a/doc/developer_guide/working_practises.rst b/doc/developer_guide/working_practises.rst index 8a16fbbe3c..44ddc4e957 100644 --- a/doc/developer_guide/working_practises.rst +++ b/doc/developer_guide/working_practises.rst @@ -175,6 +175,29 @@ provided to the pytest command line. It also ensures that (if compilation testing is enabled) the LFRic-stub and GOcean infrastructure libraries are compiled prior to any tests running. +.. _logging_testing: + +Testing PSyclone's Logging Calls +-------------------------------- +Logging should also be explicitly tested through the use of the ``caplog`` +pytest fixture. It is important that the expected logger is specified +in the test: if `main` (in `generate.py`) is executed, a handler is attached +to the PSyclone top-level logger. This means that no log events are +propagated to the Python root level logger, and ``caplog`` without +a logger specified will only test the root logger. This can result in +tests passing when executed on their own, but failing randomly when +executed with other tests: these test might call ``main`` (and +therefore prevent the root logger to receive the message), and +``caplog`` will then fail (if no logger is specified). Here is the +proper way of testing log messages in a test: + +.. code-block:: python + + with caplog.at_level(logging.WARNING, + logger="psyclone.psyir.tools.read_write_info"): + ... + assert "your message here" in caplog.text + .. _test_coverage: diff --git a/doc/user_guide/psyclone_command.rst b/doc/user_guide/psyclone_command.rst index 7f1f5046fd..449cef89de 100644 --- a/doc/user_guide/psyclone_command.rst +++ b/doc/user_guide/psyclone_command.rst @@ -531,7 +531,7 @@ detailed in the ``psyclone -h`` information. By default the output from the logging goes into stderr. To control the logging output, PSyclone provides the ``--log-file`` option. If this is set, the logging output will instead -be directed to the provided file. +be directed to the specified file. Keeping Comments and Directives ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/psyclone/generator.py b/src/psyclone/generator.py index 75132c3930..b902a962a6 100644 --- a/src/psyclone/generator.py +++ b/src/psyclone/generator.py @@ -632,13 +632,14 @@ def main(arguments): logger_fparser = logging.getLogger('fparser') formatter = logging.Formatter('%(levelname)s:%(name)s:%(message)s') for logger in [logger_fparser, logger_psyclone]: - logger.setLevel(LOG_LEVELS[args.log_level]) - handler.setFormatter(formatter) # Certain tests call main several times, which would add handlers # over and over (which results in duplicated messages). - # Only attach a handler once: - if len(logger.handlers) == 0: - logger.addHandler(handler) + # So, don't attach a handler if one is already there + if logger.handlers: + continue + logger.setLevel(LOG_LEVELS[args.log_level]) + handler.setFormatter(formatter) + logger.addHandler(handler) logger = logging.getLogger(__name__) diff --git a/src/psyclone/tests/conftest.py b/src/psyclone/tests/conftest.py index 67fe127022..172c6785bf 100644 --- a/src/psyclone/tests/conftest.py +++ b/src/psyclone/tests/conftest.py @@ -101,7 +101,7 @@ def have_graphviz(): # pragma: no-cover def setup_logging(): """ This fixture sets up logging the same way `main` does. - Using this ensures that any caplog tests specify the expected + Using this ensures that any caplog tests must specify the expected logger. If `main` is executed, a handler is attached to the psyclone top-level logger, which means the messages are not propagated to the python root logger, and caplog `at_level(x)` @@ -119,7 +119,7 @@ def setup_logging(): while logger_psyclone.handlers: logger_psyclone.removeHandler(logger_psyclone.handlers[0]) - # Then add exactly one handler (which is done what happens in main): + # Then add exactly one handler (which is what happens in main): handler = logging.StreamHandler() logger_psyclone.addHandler(handler) # Disable the logger, which is the default diff --git a/src/psyclone/tests/generator_test.py b/src/psyclone/tests/generator_test.py index df1e536f43..0b60e0d4de 100644 --- a/src/psyclone/tests/generator_test.py +++ b/src/psyclone/tests/generator_test.py @@ -786,7 +786,7 @@ def test_main_invalid_api(capsys): assert output == expected_output -def test_main_logger(capsys, caplog): +def test_main_logger(capsys, caplog, tmp_path): """ Test the setup of the logger. """ @@ -810,18 +810,22 @@ def test_main_logger(capsys, caplog): assert ("error: argument --log-level: invalid choice: 'fail'" in err) - # Test we get the logging debug correctly with caplog. This - # overrides the file output that PSyclone attempts. + # Test we get the logging debug correctly with caplog, including + # redirection into a file: caplog.clear() - # Pytest fully controls the logging level, overriding anything we - # set in generator.main so we can't test for it. + out_file = str(tmp_path / "test.out") with caplog.at_level(logging.DEBUG): main([filename, "-api", "dynamo0.3", "--log-level", "DEBUG", - "--log-file", "test.out"]) + "--log-file", out_file]) assert Config.get().api == "lfric" assert caplog.records[0].levelname == "DEBUG" - assert ("Logging system initialised. Level is DEBUG." in - caplog.record_tuples[0][2]) + assert "Logging system initialised. Level is DEBUG." in caplog.text + # Check that we have a file handler installed as expected + file_handlers = [h for h in logger.handlers + if isinstance(h, logging.FileHandler)] + # There should be exactly one file handler, pointing to out_file: + assert len(file_handlers) == 1 + assert file_handlers[0].baseFilename == out_file def test_main_api(): diff --git a/src/psyclone/tests/parse/file_info_test.py b/src/psyclone/tests/parse/file_info_test.py index 662a9cf4e3..28f77bdd71 100644 --- a/src/psyclone/tests/parse/file_info_test.py +++ b/src/psyclone/tests/parse/file_info_test.py @@ -50,6 +50,8 @@ end program main """ +TEST_LOGGER = "psyclone.parse.file_info" + def test_file_info_constructor(): """ @@ -84,7 +86,7 @@ def test_file_info_missing_file(caplog): """ finfo = FileInfo("missing.txt") - with caplog.at_level(logging.INFO, logger="psyclone.parse.file_info"): + with caplog.at_level(logging.INFO, logger=TEST_LOGGER): with pytest.raises(FileNotFoundError) as err: _ = finfo.get_source_code() assert "Source file 'missing.txt': loading source code" in caplog.text @@ -101,7 +103,7 @@ def test_file_info_cached_source_code(tmpdir, caplog): with open(fname, "w", encoding="utf-8") as fout: fout.write(content) finfo = FileInfo(fname, cache_active=True) - with caplog.at_level(logging.INFO, logger="psyclone.parse.file_info"): + with caplog.at_level(logging.INFO, logger=TEST_LOGGER): input1 = finfo.get_source_code() assert input1 == content assert "a_file.txt': loaded OK" in caplog.text @@ -116,14 +118,14 @@ def test_file_info_cached_source_code(tmpdir, caplog): assert finfo._cache_data_save is None # Load fparser tree to start caching - with caplog.at_level(logging.INFO, logger="psyclone.parse.file_info"): + with caplog.at_level(logging.INFO, logger=TEST_LOGGER): finfo.get_fparser_tree() assert finfo._cache_data_save is not None assert "No cache file '" in caplog.text assert "Cache file updated with hashsum " in caplog.text caplog.clear() - with caplog.at_level(logging.INFO, logger="psyclone.parse.file_info"): + with caplog.at_level(logging.INFO, logger=TEST_LOGGER): finfo = FileInfo(fname, cache_active=True) input1 = finfo.get_fparser_tree() assert finfo._cache_data_load is not None @@ -285,7 +287,7 @@ def test_file_info_load_from_cache_corrupted(tmpdir, caplog): assert file_info._cache_data_save is None # Load with damaged cache file - with caplog.at_level(logging.INFO, logger="psyclone.parse.file_info"): + with caplog.at_level(logging.INFO, logger=TEST_LOGGER): psyir_node = file_info.get_psyir() assert isinstance(psyir_node, Node) assert "Error while reading cache file - ignoring: " in caplog.text @@ -353,7 +355,7 @@ def test_file_info_source_changed(tmpdir, caplog): file_info: FileInfo = FileInfo(filename, cache_active=True) # Load, but not from cache - with caplog.at_level(logging.INFO, logger="psyclone.parse.file_info"): + with caplog.at_level(logging.INFO, logger=TEST_LOGGER): psyir_node = file_info.get_psyir() assert "Cache hashsum mismatch: source " in caplog.text assert "Cache file updated with hashsum " in caplog.text @@ -415,7 +417,7 @@ def test_file_info_cachefile_not_writable(tmpdir, caplog): # If the psyir, hence, fparser tree is requested, creating # the cache will fail, but the psyir node itself will # still be returned. - with caplog.at_level(logging.WARN, logger="psyclone.parse.file_info"): + with caplog.at_level(logging.WARN, logger=TEST_LOGGER): psyir_node = file_info.get_psyir() assert isinstance(psyir_node, Node) assert "Unable to write to cache file: " in caplog.text @@ -438,7 +440,7 @@ def fun_exception(a, b): monkeypatch.setattr("pickle.dump", fun_exception) - with caplog.at_level(logging.WARN, logger="psyclone.parse.file_info"): + with caplog.at_level(logging.WARN, logger=TEST_LOGGER): psyir_node = file_info.get_psyir() assert isinstance(psyir_node, Node) assert "Error while storing cache data - ignoring:" in caplog.text diff --git a/src/psyclone/tests/psyad/adjoint_visitor_test.py b/src/psyclone/tests/psyad/adjoint_visitor_test.py index 3c41799e6a..ce94806203 100644 --- a/src/psyclone/tests/psyad/adjoint_visitor_test.py +++ b/src/psyclone/tests/psyad/adjoint_visitor_test.py @@ -89,6 +89,8 @@ "end subroutine test\n" ) +TEST_LOGGER = "psyclone.psyad.adjoint_visitor" + def check_adjoint(tl_fortran, active_variable_names, expected_ad_fortran, tmpdir, fortran_writer): @@ -175,12 +177,10 @@ def test_create_container_logger(caplog): ''' tangent_linear = FileContainer("blah") adj_visitor = AdjointVisitor(["dummy"]) - with caplog.at_level(logging.INFO, - logger="psyclone.psyad.adjoint_visitor"): + with caplog.at_level(logging.INFO, logger=TEST_LOGGER): _ = adj_visitor.container_node(tangent_linear) assert caplog.text == "" - with caplog.at_level(logging.DEBUG, - logger="psyclone.psyad.adjoint_visitor"): + with caplog.at_level(logging.DEBUG, logger=TEST_LOGGER): _ = adj_visitor._visit(tangent_linear) assert "Copying Container" in caplog.text @@ -223,12 +223,10 @@ def test_create_schedule_logger(caplog, fortran_reader): tl_schedule = tl_psyir.children[0] assert isinstance(tl_schedule, Schedule) adj_visitor = AdjointVisitor(["a", "b", "c"]) - with caplog.at_level(logging.INFO, - logger="psyclone.psyad.adjoint_visitor"): + with caplog.at_level(logging.INFO, logger=TEST_LOGGER): _ = adj_visitor.schedule_node(tl_schedule) assert caplog.text == "" - with caplog.at_level(logging.DEBUG, - logger="psyclone.psyad.adjoint_visitor"): + with caplog.at_level(logging.DEBUG, logger=TEST_LOGGER): _ = adj_visitor._visit(tl_schedule) assert "Transforming Schedule" in caplog.text assert "Zero-ing any local active variables" in caplog.text @@ -526,12 +524,10 @@ def test_assignment_node_logger(caplog, fortran_reader): adj_visitor = AdjointVisitor(["a", "b", "c"]) # set up self._active_variables _ = adj_visitor._visit(tl_psyir) - with caplog.at_level(logging.INFO, - logger="psyclone.psyad.adjoint_visitor"): + with caplog.at_level(logging.INFO, logger=TEST_LOGGER): _ = adj_visitor.assignment_node(assignment) assert caplog.text == "" - with caplog.at_level(logging.DEBUG, - logger="psyclone.psyad.adjoint_visitor"): + with caplog.at_level(logging.DEBUG, logger=TEST_LOGGER): _ = adj_visitor.assignment_node(assignment) assert "Transforming active assignment" in caplog.text @@ -747,12 +743,10 @@ def test_loop_logger(fortran_reader, caplog): _ = adj_visitor._visit(tl_psyir) # active loop - with caplog.at_level(logging.INFO, - logger="psyclone.psyad.adjoint_visitor"): + with caplog.at_level(logging.INFO, logger=TEST_LOGGER): _ = adj_visitor.loop_node(tl_loop) assert caplog.text == "" - with caplog.at_level(logging.DEBUG, - logger="psyclone.psyad.adjoint_visitor"): + with caplog.at_level(logging.DEBUG, logger=TEST_LOGGER): _ = adj_visitor.loop_node(tl_loop) assert "Transforming active loop" in caplog.text @@ -816,12 +810,10 @@ def test_ifblock_logger(fortran_reader, caplog): tl_psyir = fortran_reader.psyir_from_source(TL_IF_CODE) adj_visitor = AdjointVisitor(["d", "e"]) - with caplog.at_level(logging.INFO, - logger="psyclone.psyad.adjoint_visitor"): + with caplog.at_level(logging.INFO, logger=TEST_LOGGER): _ = adj_visitor._visit(tl_psyir) assert "Transforming active ifblock" not in caplog.text - with caplog.at_level(logging.DEBUG, - logger="psyclone.psyad.adjoint_visitor"): + with caplog.at_level(logging.DEBUG, logger=TEST_LOGGER): _ = adj_visitor._visit(tl_psyir) assert "Transforming active ifblock" in caplog.text diff --git a/src/psyclone/tests/psyir/tools/call_tree_utils_test.py b/src/psyclone/tests/psyir/tools/call_tree_utils_test.py index 039904a32c..03d453e4d9 100644 --- a/src/psyclone/tests/psyir/tools/call_tree_utils_test.py +++ b/src/psyclone/tests/psyir/tools/call_tree_utils_test.py @@ -60,6 +60,8 @@ from psyclone.tests.parse.conftest \ import mod_man_test_setup_directories # noqa: F401 +TEST_LOGGER = "psyclone.psyir.tools.call_tree_utils" + # ----------------------------------------------------------------------------- @pytest.mark.usefixtures("clear_module_manager_instance") @@ -283,8 +285,7 @@ def test_get_non_local_read_write_info(caplog): # Since the right search path is missing, this will result # in the testkern_import_symbols_mod module not being found: read_write_info = ReadWriteInfo() - with caplog.at_level(logging.WARNING, - logger="psyclone.psyir.tools.call_tree_utils"): + with caplog.at_level(logging.WARNING, logger=TEST_LOGGER): rw_info = ctu.get_non_local_read_write_info(schedule, read_write_info) assert ("Could not find module 'testkern_import_symbols_mod' - ignored." in caplog.text) @@ -305,8 +306,7 @@ def test_get_non_local_read_write_info(caplog): # infrastructure directory has not been added, so constants_mod cannot # be found: rw_info = ReadWriteInfo() - with caplog.at_level(logging.WARNING, - logger="psyclone.psyir.tools.call_tree_utils"): + with caplog.at_level(logging.WARNING, logger=TEST_LOGGER): ctu.get_non_local_read_write_info(schedule, rw_info) assert "Unknown routine 'unknown_subroutine - ignored." in caplog.text assert ("Cannot find module 'constants_mod' - ignoring unknown symbol " @@ -333,8 +333,7 @@ def test_get_non_local_read_write_info(caplog): mod_man.add_ignore_module("constants_mod") rw_info = ReadWriteInfo() caplog.clear() - with caplog.at_level(logging.WARNING, - logger="psyclone.psyir.tools.call_tree_utils"): + with caplog.at_level(logging.WARNING, logger=TEST_LOGGER): ctu.get_non_local_read_write_info(schedule, rw_info) assert "Unknown routine 'unknown_subroutine - ignored." in caplog.text assert "constants_mod" not in caplog.text @@ -363,8 +362,7 @@ def test_get_non_local_read_write_info_errors(caplog): routine.detach() rw_info = ReadWriteInfo() - with caplog.at_level(logging.WARNING, - logger="psyclone.psyir.tools.call_tree_utils"): + with caplog.at_level(logging.WARNING, logger=TEST_LOGGER): ctu.get_non_local_read_write_info(schedule, rw_info) assert (f"Could not get PSyIR for Routine 'testkern_import_symbols_code' " f"from module '{kernels[0].module_name}' as no possible" @@ -374,16 +372,14 @@ def test_get_non_local_read_write_info_errors(caplog): # representing the routine. cntr.symbol_table.add(RoutineSymbol("testkern_import_symbols_code")) rw_info = ReadWriteInfo() - with caplog.at_level(logging.WARNING, - logger="psyclone.psyir.tools.call_tree_utils"): + with caplog.at_level(logging.WARNING, logger=TEST_LOGGER): ctu.get_non_local_read_write_info(schedule, rw_info) assert (f"Could not get PSyIR for Routine 'testkern_import_symbols_code' " f"from module '{kernels[0].module_name}' -" in caplog.text) # Remove the module Container from the PSyIR. cntr.detach() - with caplog.at_level(logging.WARNING, - logger="psyclone.psyir.tools.call_tree_utils"): + with caplog.at_level(logging.WARNING, logger=TEST_LOGGER): ctu.get_non_local_read_write_info(schedule, rw_info) assert (f"Could not get PSyIR for module '{kernels[0].module_name}'" in caplog.text) @@ -410,8 +406,7 @@ def test_call_tree_utils_resolve_calls_unknowns(caplog): None)] ctu = CallTreeUtils() rw_info = ReadWriteInfo() - with caplog.at_level(logging.WARNING, - logger="psyclone.psyir.tools.call_tree_utils"): + with caplog.at_level(logging.WARNING, logger=TEST_LOGGER): ctu._resolve_calls_and_unknowns(todo, rw_info) assert "Cannot find module 'unknown_module' - ignored." in caplog.text assert rw_info.read_list == [] @@ -430,8 +425,7 @@ def test_call_tree_utils_resolve_calls_unknowns(caplog): # Now try to find a routine that does not exist in an existing module: todo = [('routine', 'module_with_var_mod', Signature("does-not-exist"), None)] - with caplog.at_level(logging.WARNING, - logger="psyclone.psyir.tools.call_tree_utils"): + with caplog.at_level(logging.WARNING, logger=TEST_LOGGER): ctu._resolve_calls_and_unknowns(todo, rw_info) assert ("Cannot resolve routine 'does-not-exist' in module " "'module_with_var_mod' - ignored." in caplog.text) @@ -465,8 +459,7 @@ def test_call_tree_utils_resolve_calls_unknowns(caplog): cntr.symbol_table.add(RoutineSymbol("module_subroutine")) todo = [('routine', 'module_with_var_mod', Signature("module_subroutine"), info)] - with caplog.at_level(logging.WARNING, - logger="psyclone.psyir.tools.call_tree_utils"): + with caplog.at_level(logging.WARNING, logger=TEST_LOGGER): ctu._resolve_calls_and_unknowns(todo, rw_info) assert ("Cannot find routine 'module_subroutine' in module " "'module_with_var_mod' - ignored" in caplog.text) @@ -477,15 +470,13 @@ def test_call_tree_utils_resolve_calls_unknowns(caplog): cntr.symbol_table.remove(rsym) todo = [('unknown', 'module_with_var_mod', Signature("module_subroutine"), info)] - with caplog.at_level(logging.WARNING, - logger="psyclone.psyir.tools.call_tree_utils"): + with caplog.at_level(logging.WARNING, logger=TEST_LOGGER): ctu._resolve_calls_and_unknowns(todo, rw_info) assert "Cannot find symbol 'module_subroutine'." in caplog.text todo = [('routine', 'module_with_var_mod', Signature("module_subroutine"), info)] - with caplog.at_level(logging.WARNING, - logger="psyclone.psyir.tools.call_tree_utils"): + with caplog.at_level(logging.WARNING, logger=TEST_LOGGER): ctu._resolve_calls_and_unknowns(todo, rw_info) assert ("Cannot resolve routine 'module_subroutine' in module " "'module_with_var_mod' - ignored" in caplog.text) @@ -494,8 +485,7 @@ def test_call_tree_utils_resolve_calls_unknowns(caplog): cntr.detach() todo = [('unknown', 'module_with_var_mod', Signature("module_subroutine"), info)] - with caplog.at_level(logging.WARNING, - logger="psyclone.psyir.tools.call_tree_utils"): + with caplog.at_level(logging.WARNING, logger=TEST_LOGGER): ctu._resolve_calls_and_unknowns(todo, rw_info) assert ("Cannot get PSyIR for module 'module_with_var_mod' - ignoring " "unknown symbol 'module_subroutine'" in caplog.text) @@ -635,8 +625,7 @@ def testcall_tree_utils_non_local_inout_parameters(caplog): # The example does contain an unknown subroutine (by design), and the # infrastructure directory has not been added, so constants_mod cannot # be found: - with caplog.at_level(logging.WARNING, - logger="psyclone.psyir.tools.call_tree_utils"): + with caplog.at_level(logging.WARNING, logger=TEST_LOGGER): rw_info = ctu.get_in_out_parameters(schedule, collect_non_local_symbols=True) assert "Unknown routine 'unknown_subroutine - ignored." in caplog.text @@ -669,8 +658,7 @@ def test_call_tree_error_var_not_found(caplog): read_write_info = ReadWriteInfo() ctu = CallTreeUtils() sva = AccessSequence(Signature("a")) - with caplog.at_level(logging.WARNING, - logger="psyclone.psyir.tools.call_tree_utils"): + with caplog.at_level(logging.WARNING, logger=TEST_LOGGER): ctu._resolve_calls_and_unknowns([("unknown", "constants_mod", Signature("does_not_exist"), sva)], From 4b9e9778082e7a36c1fedb968075b8d203110d75 Mon Sep 17 00:00:00 2001 From: Joerg Henrichs Date: Mon, 16 Feb 2026 13:00:21 +1100 Subject: [PATCH 6/7] #3318 Fixed pylint warnings, use get_invoke. --- .../tests/psyir/nodes/omp_directives_test.py | 176 +++++++----------- 1 file changed, 68 insertions(+), 108 deletions(-) diff --git a/src/psyclone/tests/psyir/nodes/omp_directives_test.py b/src/psyclone/tests/psyir/nodes/omp_directives_test.py index 1070b1baf7..c85b9b9be6 100644 --- a/src/psyclone/tests/psyir/nodes/omp_directives_test.py +++ b/src/psyclone/tests/psyir/nodes/omp_directives_test.py @@ -40,13 +40,12 @@ ''' Performs py.test tests on the OpenMP PSyIR Directive nodes. ''' -import os import re -import pytest import logging + +import pytest + from psyclone.errors import UnresolvedDependencyError -from psyclone.parse.algorithm import parse -from psyclone.psyGen import PSyFactory from psyclone.psyir import nodes from psyclone import psyGen from psyclone.psyir.nodes import ( @@ -75,12 +74,10 @@ LFRicOMPLoopTrans, OMPParallelLoopTrans, LFRicOMPParallelLoopTrans, OMPSingleTrans, OMPMasterTrans, OMPLoopTrans, TransformationError) +from psyclone.tests.utilities import get_invoke -BASE_PATH = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname( - os.path.abspath(__file__)))), "test_files", "lfric") -GOCEAN_BASE_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), - os.pardir, os.pardir, "test_files", - "gocean1p0") +TEST_LOGGER_OMP = "psyclone.psyir.nodes.omp_directives" +TEST_LOGGER_INF = "psyclone.psyir.tools.reduction_inference" def test_ompparallel_lowering(fortran_reader, monkeypatch, caplog): @@ -146,8 +143,7 @@ def test_ompparallel_lowering(fortran_reader, monkeypatch, caplog): a_sym = Symbol("a") monkeypatch.setattr(pdir, "infer_sharing_attributes", lambda: ({}, {}, {a_sym})) - with caplog.at_level(logging.WARNING, - logger="psyclone.psyir.nodes.omp_directives"): + with caplog.at_level(logging.WARNING, logger=TEST_LOGGER_OMP): pdir.lower_to_language_level() assert ("Lowering 'OMPParallelDirective' detected a possible race " "condition for symbol 'a'. Make sure this is a false WaW " @@ -239,8 +235,7 @@ def test_omp_parallel_do_lowering(fortran_reader, monkeypatch, caplog): # Monkeypatch a case with shared variables that need synchronisation monkeypatch.setattr(pdir, "infer_sharing_attributes", lambda: ({}, {}, {Symbol("a")})) - with caplog.at_level(logging.WARNING, - logger="psyclone.psyir.nodes.omp_directives"): + with caplog.at_level(logging.WARNING, logger=TEST_LOGGER_OMP): pdir.lower_to_language_level() assert ("Lowering 'OMPParallelDoDirective' detected a possible race " "condition for symbol 'a'. Make sure this is a false WaW " @@ -277,10 +272,9 @@ def test_omp_teams_distribute_parallel_do_strings( def test_ompdo_constructor(): ''' Check that we can make an OMPDoDirective with and without children ''' - _, invoke_info = parse(os.path.join(BASE_PATH, "1_single_invoke.f90"), - api="lfric") - psy = PSyFactory("lfric", distributed_memory=False).create(invoke_info) - schedule = psy.invokes.invoke_list[0].schedule + _, invoke = get_invoke("1_single_invoke.f90", api="lfric", dist_mem=False, + idx=0) + schedule = invoke.schedule ompdo = OMPDoDirective(parent=schedule) # A Directive always has a Schedule assert len(ompdo.children) == 1 @@ -567,11 +561,8 @@ def test_directiveinfer_sharing_attributes_lfric(): discontinuous function spaces. ''' - _, invoke_info = parse( - os.path.join(BASE_PATH, "1_single_invoke_w3.f90"), api="lfric") - psy = PSyFactory("lfric", - distributed_memory=False).create(invoke_info) - invoke = psy.invokes.invoke_list[0] + _, invoke = get_invoke("1_single_invoke_w3.f90", api="lfric", + dist_mem=False, idx=0) schedule = invoke.schedule # We use Transformations to introduce the necessary directives otrans = LFRicOMPLoopTrans() @@ -582,12 +573,7 @@ def test_directiveinfer_sharing_attributes_lfric(): rtrans.apply(schedule.children[0]) directive = schedule.children[0] assert isinstance(directive, OMPParallelDirective) - # TODO #1010 In the LFRic API, the loop bounds are created at code- - # generation time and therefore we cannot generate the list of - # private variables until that is under way. Ultimately this will be - # replaced by a `lower_to_language_level` call. - # pylint: disable=pointless-statement - psy.gen + # Now check that infer_sharing_attributes returns what we expect pvars, fpvars, sync = directive.infer_sharing_attributes() assert isinstance(pvars, set) @@ -1152,11 +1138,8 @@ def test_omp_forward_dependence(): '''Test that the forward_dependence method works for Directives, returning the closest dependent Node after the current Node in the schedule or None if none are found. ''' - _, invoke_info = parse( - os.path.join(BASE_PATH, "15.14.1_multi_aX_plus_Y_builtin.f90"), - api="lfric") - psy = PSyFactory("lfric", distributed_memory=True).create(invoke_info) - invoke = psy.invokes.invoke_list[0] + _, invoke = get_invoke("15.14.1_multi_aX_plus_Y_builtin.f90", api="lfric", + dist_mem=False, idx=0) schedule = invoke.schedule otrans = LFRicOMPParallelLoopTrans() for child in schedule.children: @@ -1177,11 +1160,8 @@ def test_omp_forward_dependence(): first_omp = schedule.children[0] assert first_omp.forward_dependence() == writer # 3: directive and globalsum dependencies - _, invoke_info = parse( - os.path.join(BASE_PATH, "15.14.3_sum_setval_field_builtin.f90"), - api="lfric") - psy = PSyFactory("lfric", distributed_memory=True).create(invoke_info) - invoke = psy.invokes.invoke_list[0] + _, invoke = get_invoke("15.14.3_sum_setval_field_builtin.f90", + api="lfric", dist_mem=True, idx=0) schedule = invoke.schedule otrans.apply(schedule.children[0]) otrans.apply(schedule.children[1]) @@ -1209,15 +1189,12 @@ def test_omp_single_nowait(nowait): def test_omp_single_strings(nowait): ''' Test the begin_string and end_string methods of the OMPSingle directive ''' - _, invoke_info = parse(os.path.join(GOCEAN_BASE_PATH, "single_invoke.f90"), - api="gocean") + _, invoke = get_invoke("single_invoke.f90", api="gocean", idx=0, + dist_mem=False) single = OMPSingleTrans() - psy = PSyFactory("gocean", distributed_memory=False).\ - create(invoke_info) - schedule = psy.invokes.invoke_list[0].schedule - single.apply(schedule[0], {"nowait": nowait}) - omp_single = schedule[0] + single.apply(invoke.schedule[0], {"nowait": nowait}) + omp_single = invoke.schedule[0] assert omp_single.begin_string() == "omp single" assert omp_single.end_string() == "omp end single" @@ -1238,16 +1215,14 @@ def test_omp_single_validate_child(): def test_omp_single_validate_global_constraints(): ''' Test the validate_global_constraints method of the OMPSingle directive ''' - _, invoke_info = parse(os.path.join(BASE_PATH, "1_single_invoke.f90"), - api="lfric") - single = OMPSingleTrans() - psy = PSyFactory("lfric", distributed_memory=False).\ - create(invoke_info) - schedule = psy.invokes.invoke_list[0].schedule - single.apply(schedule.children[0]) + _, invoke = get_invoke("1_single_invoke.f90", api="lfric", + dist_mem=False, idx=0) + + single = OMPSingleTrans() + single.apply(invoke.schedule.children[0]) with pytest.raises(GenerationError) as excinfo: - schedule.children[0].validate_global_constraints() + invoke.schedule.children[0].validate_global_constraints() assert ("OMPSingleDirective must be inside an OMP parallel region but " + "could not find an ancestor OMPParallelDirective node") in \ str(excinfo.value) @@ -1256,17 +1231,16 @@ def test_omp_single_validate_global_constraints(): def test_omp_single_nested_validate_global_constraints(monkeypatch): ''' Test the validate_global_constraints method of the OMPSingle directive fails when nested OMPSingles happen''' - _, invoke_info = parse(os.path.join(BASE_PATH, "1_single_invoke.f90"), - api="lfric") - single = OMPSingleTrans() + # Alternative excluded node types for monkeypatch excluded_node_types = (nodes.CodeBlock, nodes.Return, nodes.ACCDirective, psyGen.HaloExchange, nodes.OMPParallelDirective) + single = OMPSingleTrans() monkeypatch.setattr(single, "excluded_node_types", excluded_node_types) + _, invoke = get_invoke("1_single_invoke.f90", api="lfric", dist_mem=False, + idx=0) + schedule = invoke.schedule parallel = OMPParallelTrans() - psy = PSyFactory("lfric", distributed_memory=False).\ - create(invoke_info) - schedule = psy.invokes.invoke_list[0].schedule single.apply(schedule.children[0]) single_omp = schedule.children[0] @@ -1290,16 +1264,14 @@ def test_omp_master_strings(): def test_omp_master_validate_global_constraints(): ''' Test the validate_global_constraints method of the OMPMaster directive ''' - _, invoke_info = parse(os.path.join(BASE_PATH, "1_single_invoke.f90"), - api="lfric") + _, invoke = get_invoke("1_single_invoke.f90", api="lfric", + dist_mem=False, idx=0) + master = OMPMasterTrans() - psy = PSyFactory("lfric", distributed_memory=False).\ - create(invoke_info) - schedule = psy.invokes.invoke_list[0].schedule - master.apply(schedule.children[0]) + master.apply(invoke.schedule.children[0]) with pytest.raises(GenerationError) as excinfo: - schedule.children[0].validate_global_constraints() + invoke.schedule.children[0].validate_global_constraints() assert ("OMPMasterDirective must be inside an OMP parallel region but " + "could not find an ancestor OMPParallelDirective node") in \ str(excinfo.value) @@ -1308,17 +1280,15 @@ def test_omp_master_validate_global_constraints(): def test_omp_master_nested_validate_global_constraints(monkeypatch): ''' Test the validate_global_constraints method of the OMPMaster directive fails when nested OMPSingles happen''' - _, invoke_info = parse(os.path.join(BASE_PATH, "1_single_invoke.f90"), - api="lfric") - master = OMPMasterTrans() # Alternative excluded node types for monkeypatch excluded_node_types = (nodes.CodeBlock, nodes.Return, nodes.ACCDirective, psyGen.HaloExchange, nodes.OMPParallelDirective) + master = OMPMasterTrans() monkeypatch.setattr(master, "excluded_node_types", excluded_node_types) + _, invoke = get_invoke("1_single_invoke.f90", api="lfric", + dist_mem=False, idx=0) parallel = OMPParallelTrans() - psy = PSyFactory("lfric", distributed_memory=False).\ - create(invoke_info) - schedule = psy.invokes.invoke_list[0].schedule + schedule = invoke.schedule master.apply(schedule.children[0]) master_omp = schedule.children[0] @@ -1340,11 +1310,9 @@ def test_omp_barrier_strings(): def test_omp_barrier_validate_global_constraints(): ''' Test the validate_global_constraints method of the OMPBarrier directive ''' - _, invoke_info = parse(os.path.join(BASE_PATH, "1_single_invoke.f90"), - api="lfric") - psy = PSyFactory("lfric", distributed_memory=False).\ - create(invoke_info) - schedule = psy.invokes.invoke_list[0].schedule + _, invoke = get_invoke("1_single_invoke.f90", api="lfric", + dist_mem=False, idx=0) + schedule = invoke.schedule barrier = OMPBarrierDirective() schedule.addchild(barrier, 0) with pytest.raises(GenerationError) as excinfo: @@ -1355,13 +1323,11 @@ def test_omp_barrier_validate_global_constraints(): # Valid case. barrier = OMPBarrierDirective() parallel = OMPParallelDirective() - _, invoke_info = parse(os.path.join(BASE_PATH, "1_single_invoke.f90"), - api="lfric") - psy = PSyFactory("lfric", distributed_memory=False).\ - create(invoke_info) - schedule = psy.invokes.invoke_list[0].schedule + _, invoke = get_invoke("1_single_invoke.f90", api="lfric", + dist_mem=False, idx=0) + parallel.children[0].addchild(barrier) - schedule.addchild(parallel) + invoke.schedule.addchild(parallel) barrier.validate_global_constraints() @@ -1438,13 +1404,11 @@ def test_omp_taskloop_validate_child(): def test_omp_taskloop_validate_global_constraints(): ''' Test the validate_global_constraints method of the OMPTaskloop directive ''' - _, invoke_info = parse(os.path.join(BASE_PATH, "1_single_invoke_w3.f90"), - api="lfric") - taskloop = OMPTaskloopTrans() - psy = PSyFactory("lfric", distributed_memory=False).\ - create(invoke_info) - schedule = psy.invokes.invoke_list[0].schedule + _, invoke = get_invoke("1_single_invoke_w3.f90", api="lfric", + dist_mem=False, idx=0) + schedule = invoke.schedule + taskloop = OMPTaskloopTrans() taskloop.apply(schedule.children[0]) with pytest.raises(GenerationError) as excinfo: schedule.children[0].validate_global_constraints() @@ -1495,7 +1459,7 @@ def test_omp_target_nowait_getter_setter(): # Test OMPDeclareTargetDirective -def test_omp_declare_target_directive_constructor_and_strings(monkeypatch): +def test_omp_declare_target_directive_constructor_and_strings(): ''' Test the OMPDeclareTargetDirective constructor and its output strings.''' target = OMPDeclareTargetDirective() @@ -4853,7 +4817,7 @@ def test_add_reduction_clause_loop(fortran_reader, fortran_writer): assert "reduction(+: acc)" in output -def test_reduction_clause_eq(fortran_reader, fortran_writer): +def test_reduction_clause_eq(): ''' Test OMPParallelDoDirective equality with reduction clauses ''' do_directive1 = OMPParallelDoDirective() @@ -4877,7 +4841,7 @@ def test_reduction_clause_eq(fortran_reader, fortran_writer): assert do_directive1 == do_directive3 -def test_add_reduction_clause_validation(fortran_reader, fortran_writer): +def test_add_reduction_clause_validation(): ''' Check that adding a reduction clause with a non-reduction clause argument raises an error. ''' @@ -5046,8 +5010,7 @@ def test_multiple_reduction_same_var(fortran_reader, fortran_writer): assert "reduction(+: acc)" in output -def test_multiple_reduction_same_var_diff_op(fortran_reader, fortran_writer, - caplog): +def test_multiple_reduction_same_var_diff_op(fortran_reader, caplog): ''' Test that a loop containing multiple reductions of the same variable, but involve different operators, is not parallelised. ''' @@ -5066,8 +5029,7 @@ def test_multiple_reduction_same_var_diff_op(fortran_reader, fortran_writer, end function''') omplooptrans = OMPLoopTrans(omp_directive="paralleldo") loop = psyir.walk(Loop)[0] - with caplog.at_level(logging.INFO, - logger="psyclone.psyir.tools.reduction_inference"): + with caplog.at_level(logging.INFO, logger=TEST_LOGGER_INF): with pytest.raises(TransformationError) as err: omplooptrans.apply(loop, enable_reductions=True) assert ("Variable 'acc' is read first, which indicates a reduction" @@ -5101,7 +5063,7 @@ def test_nested_reductions(fortran_reader, fortran_writer): assert "collapse(2)" in output -def test_non_reduction1(fortran_reader, fortran_writer, caplog): +def test_non_reduction1(fortran_reader, caplog): ''' Test that a loop that looks like it contains a reduction (but doesn't), is not parallelised. ''' @@ -5121,8 +5083,7 @@ def test_non_reduction1(fortran_reader, fortran_writer, caplog): end function''') omplooptrans = OMPLoopTrans(omp_directive="paralleldo") loop = psyir.walk(Loop)[0] - with caplog.at_level(logging.INFO, - logger="psyclone.psyir.tools.reduction_inference"): + with caplog.at_level(logging.INFO, logger=TEST_LOGGER_INF): with pytest.raises(TransformationError) as err: omplooptrans.apply(loop, enable_reductions=True) assert ("Variable 'count' is read first, which indicates a reduction" @@ -5133,7 +5094,7 @@ def test_non_reduction1(fortran_reader, fortran_writer, caplog): "supported for reductions" in caplog.text) -def test_non_reduction2(fortran_reader, fortran_writer, caplog): +def test_non_reduction2(fortran_reader, caplog): ''' Test that x = x + x does not lead to a reduction clause. ''' psyir = fortran_reader.psyir_from_source(''' @@ -5148,8 +5109,7 @@ def test_non_reduction2(fortran_reader, fortran_writer, caplog): end function''') omplooptrans = OMPLoopTrans(omp_directive="paralleldo") loop = psyir.walk(Loop)[0] - with caplog.at_level(logging.INFO, - logger="psyclone.psyir.tools.reduction_inference"): + with caplog.at_level(logging.INFO, logger=TEST_LOGGER_INF): with pytest.raises(TransformationError) as err: omplooptrans.apply(loop, enable_reductions=True) assert ("Variable 'count' is read first, which indicates a reduction" @@ -5160,7 +5120,7 @@ def test_non_reduction2(fortran_reader, fortran_writer, caplog): "supported for reductions" in caplog.text) -def test_non_reduction3(fortran_reader, fortran_writer, caplog): +def test_non_reduction3(fortran_reader, caplog): ''' Test that x = x / 2 does not lead to a reduction clause. ''' psyir = fortran_reader.psyir_from_source(''' @@ -5175,8 +5135,7 @@ def test_non_reduction3(fortran_reader, fortran_writer, caplog): end function''') omplooptrans = OMPLoopTrans(omp_directive="paralleldo") loop = psyir.walk(Loop)[0] - with caplog.at_level(logging.INFO, - logger="psyclone.psyir.tools.reduction_inference"): + with caplog.at_level(logging.INFO, logger=TEST_LOGGER_INF): with pytest.raises(TransformationError) as err: omplooptrans.apply(loop, enable_reductions=True) assert ("Variable 'count' is read first, which indicates a reduction" @@ -5251,7 +5210,7 @@ def test_reduction_omp_parallel_loop_trans(fortran_reader, fortran_writer): assert "reduction(+: acc)" in output -def test_reduction_struct_member(fortran_reader, fortran_writer, caplog): +def test_reduction_struct_member(fortran_reader, caplog): ''' Test that reduction loops involving struct members are not parallelised. This is not yet supported by OpenMP. ''' @@ -5269,8 +5228,7 @@ def test_reduction_struct_member(fortran_reader, fortran_writer, caplog): end function''') omplooptrans = OMPLoopTrans(omp_directive="paralleldo") loop = psyir.walk(Loop)[0] - with caplog.at_level(logging.INFO, - logger="psyclone.psyir.tools.reduction_inference"): + with caplog.at_level(logging.INFO, logger=TEST_LOGGER_INF): with pytest.raises(TransformationError) as err: omplooptrans.apply(loop, enable_reductions=True) assert ("Variable 'struct%acc' is read first, which indicates a reduction" @@ -5388,6 +5346,8 @@ def test_firstprivate_with_uninitialised(fortran_reader, fortran_writer): def test_critical_begin_and_end_string(): + """Tests critical begin and end directives. + """ directive = OMPCriticalDirective() assert directive.begin_string() == "omp critical" assert directive.end_string() == "omp end critical" From 05877aba08ff38b071b2968175cb2cad571be438 Mon Sep 17 00:00:00 2001 From: Andrew Porter Date: Tue, 17 Feb 2026 09:19:33 +0000 Subject: [PATCH 7/7] #3319 update changelog --- changelog | 3 +++ 1 file changed, 3 insertions(+) diff --git a/changelog b/changelog index 1a1ef01ce5..b13c5ed0f8 100644 --- a/changelog +++ b/changelog @@ -1,3 +1,6 @@ + 46) PR #3319 for #3318. Alters the logging set-up performed by PSyclone + so that it does not affect the root Python logger. + 45) PR #3125 towards #3060. IntrinsicCall reference_accesses now returns appropriate access_types (instead of a generic READWRITE).