Skip to content

Commit

Permalink
Merge branch 'linker_wrapper_new' into fix_two_phase_compilation_errors
Browse files Browse the repository at this point in the history
  • Loading branch information
hiker committed Jan 9, 2025
2 parents bd9605d + 8e67176 commit 84056f1
Show file tree
Hide file tree
Showing 8 changed files with 50 additions and 42 deletions.
7 changes: 6 additions & 1 deletion Documentation/source/site-specific-config.rst
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,12 @@ contains all the tools that will be used during the build process, but
it can only store one tool per category. If a certain tool should not
be defined in the toolbox, the default from the `ToolRepository` will
be used. This is useful for many standard tools like `git`, `rsync`
etc that de-facto will never be changed.
etc that de-facto will never be changed. Fab will check if a tool
is actually available on the system before adding it to a ToolBox.
This is typically done by trying to run the tool with some testing
parameters, for example requesting its version number. If this fails,
the tool is considered not to be available and will not be used (unless
the user explicitly puts a tool into the ToolBox).

.. note:: If you need to use for example different compilers for
different files, you would implement this as a `meta-compiler`:
Expand Down
42 changes: 24 additions & 18 deletions source/fab/parse/c.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,44 +11,48 @@
from pathlib import Path
from typing import List, Optional, Union, Tuple

from fab.dep_tree import AnalysedDependent

try:
import clang # type: ignore
import clang.cindex # type: ignore
except ImportError:
clang = None

from fab.build_config import BuildConfig
from fab.dep_tree import AnalysedDependent
from fab.util import log_or_dot, file_checksum

logger = logging.getLogger(__name__)


class AnalysedC(AnalysedDependent):
"""
An analysis result for a single C file, containing symbol definitions and dependencies.
An analysis result for a single C file, containing symbol definitions and
dependencies.
Note: We don't need to worry about compile order with pure C projects; we can compile all in one go.
However, with a *Fortran -> C -> Fortran* dependency chain, we do need to ensure that one Fortran file
is compiled before another, so this class must be part of the dependency tree analysis.
Note: We don't need to worry about compile order with pure C projects; we
can compile all in one go. However, with a *Fortran -> C -> Fortran*
dependency chain, we do need to ensure that one Fortran file is
compiled before another, so this class must be part of the
dependency tree analysis.
"""
# Note: This subclass adds nothing to it's parent, which provides everything it needs.
# We'd normally remove an irrelevant class like this but we want to keep the door open
# for filtering analysis results by type, rather than suffix.
pass
# Note: This subclass adds nothing to it's parent, which provides
# everything it needs. We'd normally remove an irrelevant class
# like this but we want to keep the door open for filtering
# analysis results by type, rather than suffix.


class CAnalyser(object):
class CAnalyser:
"""
Identify symbol definitions and dependencies in a C file.
"""

def __init__(self):
def __init__(self, config: BuildConfig):

# runtime
self._config = None
self._config = config
self._include_region: List[Tuple[int, str]] = []

# todo: simplifiy by passing in the file path instead of the analysed tokens?
def _locate_include_regions(self, trans_unit) -> None:
Expand Down Expand Up @@ -100,8 +104,7 @@ def _check_for_include(self, lineno) -> Optional[str]:
include_stack.pop()
if include_stack:
return include_stack[-1]
else:
return None
return None

def run(self, fpath: Path) \
-> Union[Tuple[AnalysedC, Path], Tuple[Exception, None]]:
Expand Down Expand Up @@ -149,9 +152,11 @@ def run(self, fpath: Path) \
continue
logger.debug('Considering node: %s', node.spelling)

if node.kind in {clang.cindex.CursorKind.FUNCTION_DECL, clang.cindex.CursorKind.VAR_DECL}:
if node.kind in {clang.cindex.CursorKind.FUNCTION_DECL,
clang.cindex.CursorKind.VAR_DECL}:
self._process_symbol_declaration(analysed_file, node, usr_symbols)
elif node.kind in {clang.cindex.CursorKind.CALL_EXPR, clang.cindex.CursorKind.DECL_REF_EXPR}:
elif node.kind in {clang.cindex.CursorKind.CALL_EXPR,
clang.cindex.CursorKind.DECL_REF_EXPR}:
self._process_symbol_dependency(analysed_file, node, usr_symbols)
except Exception as err:
logger.exception(f'error walking parsed nodes {fpath}')
Expand All @@ -166,7 +171,8 @@ def _process_symbol_declaration(self, analysed_file, node, usr_symbols):
if node.is_definition():
# only global symbols can be used by other files, not static symbols
if node.linkage == clang.cindex.LinkageKind.EXTERNAL:
# This should catch function definitions which are exposed to the rest of the application
# This should catch function definitions which are exposed to
# the rest of the application
logger.debug(' * Is defined in this file')
# todo: ignore if inside user pragmas?
analysed_file.add_symbol_def(node.spelling)
Expand Down
6 changes: 1 addition & 5 deletions source/fab/steps/analyse.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ def analyse(
fortran_analyser = FortranAnalyser(config=config,
std=std,
ignore_mod_deps=ignore_mod_deps)
c_analyser = CAnalyser()
c_analyser = CAnalyser(config=config)

# Creates the *build_trees* artefact from the files in `self.source_getter`.

Expand All @@ -146,10 +146,6 @@ def analyse(
# - At this point we have a source tree for the entire source.
# - (Optionally) Extract a sub tree for every root symbol, if provided. For building executables.

# todo: code smell - refactor (in another PR to keep things small)
fortran_analyser._config = config
c_analyser._config = config

# parse
files: List[Path] = source_getter(config.artefact_store)
analysed_files = _parse_files(config, files=files, fortran_analyser=fortran_analyser, c_analyser=c_analyser)
Expand Down
4 changes: 1 addition & 3 deletions source/fab/steps/psyclone.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,6 @@ def _analyse_x90s(config, x90s: Set[Path]) -> Dict[Path, AnalysedX90]:

# parse
x90_analyser = X90Analyser(config=config)
x90_analyser._config = config
with TimerLogger(f"analysing {len(parsable_x90s)} parsable x90 files"):
x90_results = run_mp(config, items=parsable_x90s, func=x90_analyser.run)
log_or_dot_finish(logger)
Expand All @@ -209,7 +208,7 @@ def _analyse_x90s(config, x90s: Set[Path]) -> Dict[Path, AnalysedX90]:
analysed_x90 = {result.fpath.with_suffix('.x90'): result for result in analysed_x90}

# make the hashes from the original x90s, not the parsable versions which have invoke names removed.
for p, _ in analysed_x90.items():
for p in analysed_x90:
analysed_x90[p]._file_hash = file_checksum(p).file_hash

return analysed_x90
Expand Down Expand Up @@ -250,7 +249,6 @@ def _analyse_kernels(config, kernel_roots) -> Dict[str, int]:
# todo: We'd like to separate that from the general fortran analyser at some point, to reduce coupling.
# The Analyse step also uses the same fortran analyser. It stores its results so they won't be analysed twice.
fortran_analyser = FortranAnalyser(config=config)
fortran_analyser._config = config
with TimerLogger(f"analysing {len(kernel_files)} potential psyclone kernel files"):
fortran_results = run_mp(config, items=kernel_files, func=fortran_analyser.run)
log_or_dot_finish(logger)
Expand Down
4 changes: 3 additions & 1 deletion source/fab/tools/tool_repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def __init__(self):
# Add the common shells. While Fab itself does not need this,
# it is a very convenient tool for user configuration (e.g. to
# query nc-config etc)
for shell_name in ["bash", "sh", "ksh", "dash"]:
for shell_name in ["sh", "bash", "ksh", "dash"]:
self.add_tool(Shell(shell_name))

# Now create the potential mpif90 and Cray ftn wrapper
Expand Down Expand Up @@ -193,6 +193,8 @@ def get_default(self, category: Category,
OpenMP.
:raises KeyError: if the category does not exist.
:raises RuntimeError: if no tool in the requested category is
available on the system.
:raises RuntimeError: if no compiler/linker is found with the
requested level of MPI support (yes or no).
'''
Expand Down
19 changes: 10 additions & 9 deletions tests/unit_tests/parse/c/test_c_analyser.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@


def test_simple_result(tmp_path):
c_analyser = CAnalyser()
c_analyser._config = BuildConfig('proj', ToolBox(), mpi=False,
openmp=False, fab_workspace=tmp_path)
config = BuildConfig('proj', ToolBox(), mpi=False, openmp=False,
fab_workspace=tmp_path)
c_analyser = CAnalyser(config)

with mock.patch('fab.parse.AnalysedFile.save'):
fpath = Path(__file__).parent / "test_c_analyser.c"
Expand Down Expand Up @@ -72,7 +72,7 @@ def __init__(self, spelling, line):
mock_trans_unit = Mock()
mock_trans_unit.cursor.get_tokens.return_value = tokens

analyser = CAnalyser()
analyser = CAnalyser(config=None)
analyser._locate_include_regions(mock_trans_unit)

assert analyser._include_region == expect
Expand All @@ -81,7 +81,7 @@ def __init__(self, spelling, line):
class Test__check_for_include:

def test_vanilla(self):
analyser = CAnalyser()
analyser = CAnalyser(config=None)
analyser._include_region = [
(10, "sys_include_start"),
(20, "sys_include_end"),
Expand Down Expand Up @@ -113,7 +113,7 @@ def _definition(self, spelling, linkage):
node.linkage = linkage
node.spelling = spelling

analyser = CAnalyser()
analyser = CAnalyser(config=None)
analysed_file = Mock()

analyser._process_symbol_declaration(analysed_file=analysed_file, node=node, usr_symbols=None)
Expand All @@ -134,7 +134,7 @@ def _declaration(self, spelling, include_type):
node.is_definition.return_value = False
node.spelling = spelling

analyser = CAnalyser()
analyser = CAnalyser(config=None)
analyser._check_for_include = Mock(return_value=include_type)

usr_symbols = []
Expand All @@ -155,7 +155,7 @@ def test_not_usr_symbol(self):
analysed_file.add_symbol_dep.assert_not_called()

def _dependency(self, spelling, usr_symbols):
analyser = CAnalyser()
analyser = CAnalyser(config=None)
analysed_file = Mock()
node = Mock(spelling=spelling)

Expand All @@ -168,7 +168,8 @@ def test_clang_disable():

with mock.patch('fab.parse.c.clang', None):
with mock.patch('fab.parse.c.file_checksum') as mock_file_checksum:
result = CAnalyser().run(Path(__file__).parent / "test_c_analyser.c")
c_analyser = CAnalyser(config=None)
result = c_analyser.run(Path(__file__).parent / "test_c_analyser.c")

assert isinstance(result[0], ImportWarning)
mock_file_checksum.assert_not_called()
8 changes: 4 additions & 4 deletions tests/unit_tests/tools/test_shell.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,23 +39,23 @@ def test_shell_check_available():

def test_shell_exec_single_arg():
'''Test running a shell script without additional parameters.'''
bash = Shell("ksh")
ksh = Shell("ksh")
mock_result = mock.Mock(returncode=0)
with mock.patch('fab.tools.tool.subprocess.run',
return_value=mock_result) as tool_run:
bash.exec("echo")
ksh.exec("echo")
tool_run.assert_called_with(['ksh', '-c', 'echo'],
capture_output=True, env=None, cwd=None,
check=False)


def test_shell_exec_multiple_args():
'''Test running a shell script with parameters.'''
bash = Shell("ksh")
ksh = Shell("ksh")
mock_result = mock.Mock(returncode=0)
with mock.patch('fab.tools.tool.subprocess.run',
return_value=mock_result) as tool_run:
bash.exec(["some", "shell", "function"])
ksh.exec(["some", "shell", "function"])
tool_run.assert_called_with(['ksh', '-c', 'some', 'shell', 'function'],
capture_output=True, env=None, cwd=None,
check=False)
2 changes: 1 addition & 1 deletion tests/unit_tests/tools/test_tool_repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,5 +182,5 @@ def test_tool_repository_no_tool_available():
is_available.return_value = False
with pytest.raises(RuntimeError) as err:
tr.get_default(Category.SHELL)
assert ("Can't find available 'SHELL' tool. Tools are 'bash,sh,ksh,"
assert ("Can't find available 'SHELL' tool. Tools are 'sh,bash,ksh,"
"dash'" in str(err.value))

0 comments on commit 84056f1

Please sign in to comment.