diff --git a/.github/workflows/build_docs.yml b/.github/workflows/build_docs.yml deleted file mode 100644 index 8716de74..00000000 --- a/.github/workflows/build_docs.yml +++ /dev/null @@ -1,52 +0,0 @@ -name: Build Docs - -on: workflow_dispatch - -jobs: - build-docs: - - runs-on: ubuntu-22.04 - - steps: - - name: set git user - run: | - git config --global user.email "CoreCapabilityDevelopmentTeam@metoffice.gov.uk" - git config --global user.name "SciFab Developers" - - - name: Checkout Fab project files - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.x' - cache: pip - - - name: Install Python libraries - run: | - python -m pip install --upgrade pip - pip install -e .[docs] - - - name: build docs - run: | - cd docs - rm -rf build - sphinx-apidoc --separate --module-first -d 5 -f -o source/apidoc ../source/fab - make html - - - name: move built docs to docs root - run: | - mv docs/build/html/* docs/ - - - name: git add built docs - run: | - git add docs/* - - - name: commit - run: | - git commit -m "docs build" - - - name: push to gh_pages branch - run: | - echo "pushing from $GITHUB_REF_NAME to gh_pages" - git push --force origin $GITHUB_REF_NAME:gh_pages diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml new file mode 100644 index 00000000..b3e6c7c5 --- /dev/null +++ b/.github/workflows/documentation.yml @@ -0,0 +1,56 @@ +name: Build Documentation + +on: + push: + branches: + - 'master' + workflow_dispatch: + +jobs: + sphinx-build: + + runs-on: ubuntu-24.04 + + steps: + - name: Cache Python packages + uses: actions/cache@v4 + with: + path: ~/.pip/cache + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/pyproject.toml') }} + + - name: Install Python + uses: actions/setup-python@v5 + with: + python-version: '3.10' + + - name: Check out Fab source + uses: actions/checkout@v4 + + - name: Install documentation tools + run: pip install .[docs] + + - name: Generate documentation + run: | + cd Documentation + sphinx-apidoc --separate --module-first -d 5 -f -o source/apidoc ../source/fab + make html + + - name: Prepare and upload asset + uses: actions/upload-pages-artifact@v3 + with: + path: Documentation/build/html + + + deploy-documentation: + permissions: + pages: write + id-token: write + environment: + name: github-pages + url: ${{steps.deploy-documentation.outputs.page_url}} + runs-on: ubuntu-24.04 + needs: sphinx-build + steps: + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v4 diff --git a/docs/Makefile b/Documentation/Makefile similarity index 100% rename from docs/Makefile rename to Documentation/Makefile diff --git a/docs/make.bat b/Documentation/make.bat similarity index 95% rename from docs/make.bat rename to Documentation/make.bat index 6fcf05b4..061f32f9 100644 --- a/docs/make.bat +++ b/Documentation/make.bat @@ -1,35 +1,35 @@ -@ECHO OFF - -pushd %~dp0 - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set SOURCEDIR=source -set BUILDDIR=build - -if "%1" == "" goto help - -%SPHINXBUILD% >NUL 2>NUL -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.https://www.sphinx-doc.org/ - exit /b 1 -) - -%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% -goto end - -:help -%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% - -:end -popd +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/source/_templates/crown-copyright.html b/Documentation/source/_templates/crown-copyright.html similarity index 100% rename from docs/source/_templates/crown-copyright.html rename to Documentation/source/_templates/crown-copyright.html diff --git a/docs/source/advanced_config.rst b/Documentation/source/advanced_config.rst similarity index 99% rename from docs/source/advanced_config.rst rename to Documentation/source/advanced_config.rst index 0babf822..faaf0b25 100644 --- a/docs/source/advanced_config.rst +++ b/Documentation/source/advanced_config.rst @@ -254,7 +254,7 @@ in particular where it creates files within it. *.o (compiled object files) *.mod (mod files) metrics/ - my_program.exe + my_program log.txt The *project workspace* folder takes its name from the project label passed in to the build configuration. diff --git a/docs/source/api.rst b/Documentation/source/api.rst similarity index 100% rename from docs/source/api.rst rename to Documentation/source/api.rst diff --git a/docs/source/conf.py b/Documentation/source/conf.py similarity index 98% rename from docs/source/conf.py rename to Documentation/source/conf.py index d036262f..cb6a185b 100644 --- a/docs/source/conf.py +++ b/Documentation/source/conf.py @@ -12,6 +12,7 @@ # import os import sys +from fab import __version__ as fab_version sys.path.insert(0, os.path.abspath('../../source')) @@ -22,8 +23,7 @@ author = 'Fab Team' # The full version, including alpha/beta/rc tags -import fab -release = fab.__version__ +release = fab_version # The version up to the minor patch, for distinguishing multi-version docs version = '.'.join(release.split('.')[:2]) diff --git a/docs/source/config_intro.rst b/Documentation/source/config_intro.rst similarity index 100% rename from docs/source/config_intro.rst rename to Documentation/source/config_intro.rst diff --git a/docs/source/development.rst b/Documentation/source/development.rst similarity index 100% rename from docs/source/development.rst rename to Documentation/source/development.rst diff --git a/docs/source/environment.rst b/Documentation/source/environment.rst similarity index 100% rename from docs/source/environment.rst rename to Documentation/source/environment.rst diff --git a/docs/source/features.rst b/Documentation/source/features.rst similarity index 100% rename from docs/source/features.rst rename to Documentation/source/features.rst diff --git a/docs/source/genindex.rst b/Documentation/source/genindex.rst similarity index 100% rename from docs/source/genindex.rst rename to Documentation/source/genindex.rst diff --git a/docs/source/glossary.rst b/Documentation/source/glossary.rst similarity index 96% rename from docs/source/glossary.rst rename to Documentation/source/glossary.rst index d0cedfe0..b9ff75cb 100644 --- a/docs/source/glossary.rst +++ b/Documentation/source/glossary.rst @@ -29,7 +29,7 @@ Glossary Fab's built-in steps come with sensible defaults so the user doesn't have to write unnecessary config. As an example, the Fortran preprocessor has a default artefact getter which reads *".F90"* files - from the :term:`Artefact Collection` called ``"all_source"``. + from the :term:`Artefact Collection` called ``"INITIAL_SOURCE"``. Artefact getters are derived from :class:`~fab.artefacts.ArtefactsGetter`. @@ -65,7 +65,7 @@ Glossary A folder inside the :term:`Fab Workspace`, containing all source and output from a build config. Root Symbol - The name of a Fortran PROGRAM, or ``"main"`` for C code. Fab builds an exe for every root symbol it's given. + The name of a Fortran PROGRAM, or ``"main"`` for C code. Fab builds an executable for every root symbol it's given. Source Tree The :class:`~fab.steps.analyse.analyse` step produces a dependency tree of the entire project source. diff --git a/docs/source/img/analysis_results_hierarchy.svg b/Documentation/source/img/analysis_results_hierarchy.svg similarity index 100% rename from docs/source/img/analysis_results_hierarchy.svg rename to Documentation/source/img/analysis_results_hierarchy.svg diff --git a/docs/source/img/busby.png b/Documentation/source/img/busby.png similarity index 100% rename from docs/source/img/busby.png rename to Documentation/source/img/busby.png diff --git a/docs/source/img/hist.png b/Documentation/source/img/hist.png similarity index 100% rename from docs/source/img/hist.png rename to Documentation/source/img/hist.png diff --git a/docs/source/img/pie.png b/Documentation/source/img/pie.png similarity index 100% rename from docs/source/img/pie.png rename to Documentation/source/img/pie.png diff --git a/docs/source/img/steps_and_artefacts.svg b/Documentation/source/img/steps_and_artefacts.svg similarity index 100% rename from docs/source/img/steps_and_artefacts.svg rename to Documentation/source/img/steps_and_artefacts.svg diff --git a/docs/source/index.rst b/Documentation/source/index.rst similarity index 100% rename from docs/source/index.rst rename to Documentation/source/index.rst diff --git a/docs/source/install.rst b/Documentation/source/install.rst similarity index 100% rename from docs/source/install.rst rename to Documentation/source/install.rst diff --git a/docs/source/metoffice.rst b/Documentation/source/metoffice.rst similarity index 100% rename from docs/source/metoffice.rst rename to Documentation/source/metoffice.rst diff --git a/docs/source/py-modindex.rst b/Documentation/source/py-modindex.rst similarity index 100% rename from docs/source/py-modindex.rst rename to Documentation/source/py-modindex.rst diff --git a/docs/source/site-specific-config.rst b/Documentation/source/site-specific-config.rst similarity index 100% rename from docs/source/site-specific-config.rst rename to Documentation/source/site-specific-config.rst diff --git a/docs/source/writing_config.rst b/Documentation/source/writing_config.rst similarity index 94% rename from docs/source/writing_config.rst rename to Documentation/source/writing_config.rst index be45ad69..6ad35a09 100644 --- a/docs/source/writing_config.rst +++ b/Documentation/source/writing_config.rst @@ -79,7 +79,7 @@ Please see the documentation for :func:`~fab.steps.find_source_files.find_source including how to exclude certain source code from the build. More grab steps can be found in the :mod:`~fab.steps.grab` module. -After the find_source_files step, there will be a collection called ``"ALL_SOURCE"``, in the artefact store. +After the find_source_files step, there will be a collection called ``"INITIAL_SOURCE"``, in the artefact store. .. [1] See :func:`~fab.steps.c_pragma_injector.c_pragma_injector` for an example of a step which creates artefacts in the source folder. @@ -94,7 +94,7 @@ which must happen before we analyse it. Steps generally create and find artefacts in the :term:`Artefact Store`, arranged into named collections. The :func:`~fab.steps.preprocess.preprocess_fortran` -automatically looks for Fortran source code in a collection named `'ALL_SOURCE'`, +automatically looks for Fortran source code in a collection named `'INITIAL_SOURCE'`, which is the default output from the preceding :funcfind_source_files step. It filters just the (uppercase) ``.F90`` files. @@ -293,8 +293,8 @@ it is the user's responsibility to maintain the default artefact sets My apologies for the LONG lines, they were the only way I could find to have properly indented paragraphs :( -1. :func:`~fab.steps.find_source_files.find_source_files` will add all source files it finds to ``ALL_SOURCE`` (by default, can be overwritten by the user). Any ``.F90`` and ``.f90`` file will also be added to ``FORTRAN_BUILD_FILES``, any ``.c`` file to ``C_BUILD_FILES``, and any ``.x90`` or ``.X90`` file to ``X90_BUILD_FILES``. It can be called several times if files from different root directories need to be added, and it will automatically update the ``*_BUILD_FILES`` sets. -2. Any user script that creates new files can add files to ``ALL_SOURCE`` if required, but also to the corresponding ``*_BUILD_FILES``. This will happen automatically if :func:`~fab.steps.find_source_files.find_source_files` is called to add these newly created files. +1. :func:`~fab.steps.find_source_files.find_source_files` will add all source files it finds to ``INITIAL_SOURCE`` (by default, can be overwritten by the user). Any ``.F90`` and ``.f90`` file will also be added to ``FORTRAN_BUILD_FILES``, any ``.c`` file to ``C_BUILD_FILES``, and any ``.x90`` or ``.X90`` file to ``X90_BUILD_FILES``. It can be called several times if files from different root directories need to be added, and it will automatically update the ``*_BUILD_FILES`` sets. +2. Any user script that creates new files can add files to ``INITIAL_SOURCE`` if required, but also to the corresponding ``*_BUILD_FILES``. This will happen automatically if :func:`~fab.steps.find_source_files.find_source_files` is called to add these newly created files. 3. If :func:`~fab.steps.c_pragma_injector.c_pragma_injector` is being called, it will handle all files in ``C_BUILD_FILES``, and will replace all the original C files with the newly created ones. For backward compatibility it will also store the new objects in the ``PRAGMAD_C`` set. 4. If :func:`~fab.steps.preprocess.preprocess_c` is called, it will preprocess all files in ``C_BUILD_FILES`` (at this stage typically preprocess the files in the original source folder, writing the output files to the build folder), and update that artefact set accordingly. For backward compatibility it will also store the preprocessed files in ``PREPROCESSED_C``. 5. If :func:`~fab.steps.preprocess.preprocess_fortran` is called, it will preprocess all files in ``FORTRAN_BUILD_FILES`` that end on ``.F90``, creating new ``.f90`` files in the build folder. These files will be added to ``PREPROCESSED_FORTRAN``. Then the original ``.F90`` are removed from ``FORTRAN_BUILD_FILES``, and the new preprocessed files (which are in ``PREPROCESSED_FORTRAN``) will be added. Then any ``.f90`` files that are not already in the build folder (an example of this are files created by a user script) are copied from the original source folder into the build folder, and ``FORTRAN_BUILD_FILES`` is updated to use the files in the new location. diff --git a/docs/.nojekyll b/docs/.nojekyll deleted file mode 100644 index e69de29b..00000000 diff --git a/docs/readme b/docs/readme deleted file mode 100644 index 8ad71401..00000000 --- a/docs/readme +++ /dev/null @@ -1,15 +0,0 @@ - - -# these commands build the docs -rm -rf build -rm -rf source/apidoc -sphinx-apidoc --separate --module-first -d 5 -f -o source/apidoc ../source/fab -make html - - -# all in one -rm -rf build && rm -rf source/apidoc && sphinx-apidoc --separate --module-first -d 5 -f -o source/apidoc ../source/fab && make html -firefox build/html/index.html - - -# the -d 5 controls the toc depth on the main api reference page, letting us see the individual step modules \ No newline at end of file diff --git a/run_configs/lfric/atm.py b/run_configs/lfric/atm.py index 8543d9ee..f1c31017 100755 --- a/run_configs/lfric/atm.py +++ b/run_configs/lfric/atm.py @@ -20,7 +20,7 @@ from fab.tools import ToolBox from grab_lfric import lfric_source_config, gpl_utils_source_config -from lfric_common import (configurator, fparser_workaround_stop_concatenation, +from lfric_common import (API, configurator, fparser_workaround_stop_concatenation, get_transformation_script) logger = logging.getLogger('fab') @@ -247,7 +247,7 @@ def file_filtering(config): kernel_roots=[state.build_output / 'lfric' / 'kernel'], transformation_script=get_transformation_script, cli_args=[], - api="dynamo0.3", + api=API, ) # todo: do we need this one in here? diff --git a/run_configs/lfric/gungho.py b/run_configs/lfric/gungho.py index b7b54378..caf59216 100755 --- a/run_configs/lfric/gungho.py +++ b/run_configs/lfric/gungho.py @@ -22,7 +22,7 @@ from fab.tools import ToolBox from grab_lfric import lfric_source_config, gpl_utils_source_config -from lfric_common import (configurator, fparser_workaround_stop_concatenation, +from lfric_common import (API, configurator, fparser_workaround_stop_concatenation, get_transformation_script) logger = logging.getLogger('fab') @@ -72,7 +72,7 @@ kernel_roots=[state.build_output], transformation_script=get_transformation_script, cli_args=[], - api="dynamo0.3", + api=API, ) fparser_workaround_stop_concatenation(state) diff --git a/run_configs/lfric/lfric_common.py b/run_configs/lfric/lfric_common.py index 147702d9..fe8eae03 100644 --- a/run_configs/lfric/lfric_common.py +++ b/run_configs/lfric/lfric_common.py @@ -1,15 +1,19 @@ import logging import os import shutil +from typing import Optional from pathlib import Path from fab.artefacts import ArtefactSet +from fab.build_config import BuildConfig from fab.steps import step from fab.steps.find_source_files import find_source_files from fab.tools import Category, Tool logger = logging.getLogger('fab') +API = "dynamo0.3" + class Script(Tool): '''A simple wrapper that runs a shell script. @@ -111,11 +115,11 @@ def fparser_workaround_stop_concatenation(config): # ============================================================================ -def get_transformation_script(fpath, config): +def get_transformation_script(fpath: Path, + config: BuildConfig) -> Optional[Path]: ''':returns: the transformation script to be used by PSyclone. - :rtype: Path - ''' + optimisation_path = config.source_root / 'optimisation' / 'meto-spice' relative_path = None for base_path in [config.source_root, config.build_output]: @@ -132,4 +136,4 @@ def get_transformation_script(fpath, config): global_transformation_script = optimisation_path / 'global.py' if global_transformation_script.exists(): return global_transformation_script - return "" + return None diff --git a/run_configs/lfric/mesh_tools.py b/run_configs/lfric/mesh_tools.py index 271bc7ad..f49aa43b 100755 --- a/run_configs/lfric/mesh_tools.py +++ b/run_configs/lfric/mesh_tools.py @@ -13,7 +13,7 @@ from fab.steps.psyclone import psyclone, preprocess_x90 from fab.tools import ToolBox -from lfric_common import configurator, fparser_workaround_stop_concatenation +from lfric_common import API, configurator, fparser_workaround_stop_concatenation from grab_lfric import lfric_source_config, gpl_utils_source_config @@ -57,7 +57,7 @@ kernel_roots=[state.build_output], cli_args=['--config', Path(__file__).parent / 'psyclone.cfg'], overrides_folder=state.source_root / 'mesh_tools_overrides', - api="dynamo0.3", + api=API, ) fparser_workaround_stop_concatenation(state) diff --git a/source/fab/artefacts.py b/source/fab/artefacts.py index f7221243..727aa2be 100644 --- a/source/fab/artefacts.py +++ b/source/fab/artefacts.py @@ -27,7 +27,7 @@ class ArtefactSet(Enum): '''A simple enum with the artefact types used internally in Fab. ''' - ALL_SOURCE = auto() + INITIAL_SOURCE = auto() PREPROCESSED_FORTRAN = auto() PREPROCESSED_C = auto() FORTRAN_BUILD_FILES = auto() @@ -42,8 +42,8 @@ class ArtefactSet(Enum): class ArtefactStore(dict): - '''This object stores set of artefacts (which can be of any type). Each artefact - is indexed by a string. + '''This object stores sets of artefacts (which can be of any type). + Each artefact is indexed by either an ArtefactSet enum, or a string. ''' def __init__(self): @@ -89,7 +89,8 @@ def update_dict(self, collection: Union[str, ArtefactSet], :param key: the key in the dictionary to update. :param values: the values to update with. ''' - self[collection][key].update([values] if isinstance(values, str) else values) + self[collection][key].update([values] if isinstance(values, str) + else values) def copy_artefacts(self, source: Union[str, ArtefactSet], dest: Union[str, ArtefactSet], @@ -149,7 +150,8 @@ def __call__(self, artefact_store): class CollectionGetter(ArtefactsGetter): """ - A simple artefact getter which returns one :term:`Artefact Collection` from the artefact_store. + A simple artefact getter which returns one :term:`Artefact Collection` + from the artefact_store. Example:: @@ -170,18 +172,21 @@ def __call__(self, artefact_store): class CollectionConcat(ArtefactsGetter): """ - Returns a concatenated list from multiple :term:`Artefact Collections ` - (each expected to be an iterable). + Returns a concatenated list from multiple + :term:`Artefact Collections ` (each expected to be + an iterable). - An :class:`~fab.artefacts.ArtefactsGetter` can be provided instead of a collection_name. + An :class:`~fab.artefacts.ArtefactsGetter` can be provided instead of a + collection_name. Example:: - # The default source code getter for the Analyse step might look like this. + # The default source code getter for the Analyse step might look + # like this. DEFAULT_SOURCE_GETTER = CollectionConcat([ 'preprocessed_c', 'preprocessed_fortran', - SuffixFilter(ArtefactSet.ALL_SOURCE, '.f90'), + SuffixFilter(ArtefactSet.INITIAL_SOURCE, '.f90'), ]) """ @@ -189,14 +194,16 @@ def __init__(self, collections: Iterable[Union[ArtefactSet, str, ArtefactsGetter]]): """ :param collections: - An iterable containing collection names (strings) or other ArtefactsGetters. + An iterable containing collection names (strings) or + other ArtefactsGetters. """ self.collections = collections # todo: ensure the labelled values are iterables def __call__(self, artefact_store: ArtefactStore): - # todo: this should be a set, in case a file appears in multiple collections + # todo: this should be a set, in case a file appears in + # multiple collections result = [] for collection in self.collections: if isinstance(collection, (str, ArtefactSet)): @@ -208,13 +215,13 @@ def __call__(self, artefact_store: ArtefactStore): class SuffixFilter(ArtefactsGetter): """ - Returns the file paths in a :term:`Artefact Collection` (expected to be an iterable), - filtered by suffix. + Returns the file paths in a :term:`Artefact Collection` (expected to be + an iterable), filtered by suffix. Example:: # The default source getter for the FortranPreProcessor step. - DEFAULT_SOURCE = SuffixFilter(ArtefactSet.ALL_SOURCE, '.F90') + DEFAULT_SOURCE = SuffixFilter(ArtefactSet.INITIAL_SOURCE, '.F90') """ def __init__(self, @@ -264,6 +271,7 @@ def __call__(self, artefact_store: ArtefactStore): build_lists: Dict[str, List[AnalysedDependent]] = {} for root, tree in build_trees.items(): - build_lists[root] = filter_source_tree(source_tree=tree, suffixes=self.suffixes) + build_lists[root] = filter_source_tree(source_tree=tree, + suffixes=self.suffixes) return build_lists diff --git a/source/fab/parse/fortran.py b/source/fab/parse/fortran.py index 48a4c94e..c5ea7f60 100644 --- a/source/fab/parse/fortran.py +++ b/source/fab/parse/fortran.py @@ -298,8 +298,9 @@ def _process_comment(self, analysed_file, obj): if comment[:2] == "!$": # Check if it is a use statement with an OpenMP sentinel: # Use fparser's string reader to discard potential comment - # TODO #13: once fparser supports reading the sentinels, + # TODO #327: once fparser supports reading the sentinels, # this can be removed. + # fparser issue: https://github.com/stfc/fparser/issues/443 reader = FortranStringReader(comment[2:]) try: line = reader.next() diff --git a/source/fab/steps/analyse.py b/source/fab/steps/analyse.py index 40df5aa4..1b739e56 100644 --- a/source/fab/steps/analyse.py +++ b/source/fab/steps/analyse.py @@ -312,7 +312,8 @@ def _gen_symbol_table(analysed_files: Iterable[AnalysedDependent]) -> Dict[str, symbols[symbol_def] = analysed_file.fpath if duplicates: - # we don't break the build because these symbols might not be required to build the exe + # we don't break the build because these symbols might not be + # required to build the executable. # todo: put a big warning at the end of the build? err_msg = "\n".join(map(str, duplicates)) warnings.warn(f"Duplicates found while generating symbol table:\n{err_msg}") diff --git a/source/fab/steps/archive_objects.py b/source/fab/steps/archive_objects.py index a931be9b..1ebbcdcf 100644 --- a/source/fab/steps/archive_objects.py +++ b/source/fab/steps/archive_objects.py @@ -24,10 +24,12 @@ DEFAULT_SOURCE_GETTER = CollectionGetter(ArtefactSet.OBJECT_FILES) -# todo: two diagrams showing the flow of artefacts in the exe and library use cases -# show how the library has a single build target with None as the name. +# todo: two diagrams showing the flow of artefacts in the executables and +# library use cases show how the library has a single build target with None +# as the name. -# todo: all this documentation for such a simple step - should we split it up somehow? +# todo: all this documentation for such a simple step - should we split it +# up somehow? @step def archive_objects(config: BuildConfig, @@ -73,7 +75,7 @@ def archive_objects(config: BuildConfig, targets, each with a name. This typically happens when configuring the :class:`~fab.steps.analyser.Analyser` step *with* a root symbol(s). We can assume each list of object files is sufficient to build each - *.exe*. + ** executable. In this case you cannot specify an *output_fpath* path because they are automatically created from the target name. @@ -110,14 +112,15 @@ def archive_objects(config: BuildConfig, target_objects = source_getter(config.artefact_store) assert target_objects.keys() if output_fpath and list(target_objects.keys()) != [None]: - raise ValueError("You must not specify an output path (library) when there are root symbols (exes)") + raise ValueError("You must not specify an output path (library) when " + "there are root symbols (executables)") if not output_fpath and list(target_objects.keys()) == [None]: raise ValueError("You must specify an output path when building a library.") for root, objects in target_objects.items(): if root: - # we're building an object archive for an exe + # we're building an object archive for an executable output_fpath = str(config.build_output / f'{root}.a') else: # we're building a single object archive with a given filename diff --git a/source/fab/steps/c_pragma_injector.py b/source/fab/steps/c_pragma_injector.py index dfbf64eb..196d0867 100644 --- a/source/fab/steps/c_pragma_injector.py +++ b/source/fab/steps/c_pragma_injector.py @@ -20,23 +20,30 @@ # todo: test @step -def c_pragma_injector(config, source: Optional[ArtefactsGetter] = None, output_name=None): +def c_pragma_injector(config, source: Optional[ArtefactsGetter] = None, + output_name=None): """ - A build step to inject custom pragmas to mark blocks of user and system include statements. + A build step to inject custom pragmas to mark blocks of user and system + include statements. - By default, reads .c files from the *all_source* artefact and creates the *pragmad_c* artefact. + By default, reads .c files from the *INITIAL_SOURCE* artefact and creates + the *pragmad_c* artefact. - This step does not write to the build output folder, it creates the pragmad c in the same folder as the c file. - This is because a subsequent preprocessing step needs to look in the source folder for header files, + This step does not write to the build output folder, it creates the + pragmad c in the same folder as the c file. This is because a subsequent + preprocessing step needs to look in the source folder for header files, including in paths relative to the c file. :param config: - The :class:`fab.build_config.BuildConfig` object where we can read settings - such as the project workspace folder or the multiprocessing flag. + The :class:`fab.build_config.BuildConfig` object where we can + read settings such as the project workspace folder or the + multiprocessing flag. :param source: - An :class:`~fab.artefacts.ArtefactsGetter` which give us our c files to process. + An :class:`~fab.artefacts.ArtefactsGetter` which give us our c files + to process. :param output_name: - The name of the artefact collection to create in the artefact store, with a sensible default + The name of the artefact collection to create in the artefact store, + with a sensible default """ source_getter = source or DEFAULT_SOURCE_GETTER diff --git a/source/fab/steps/find_source_files.py b/source/fab/steps/find_source_files.py index 0d5a8c90..52b03e0a 100644 --- a/source/fab/steps/find_source_files.py +++ b/source/fab/steps/find_source_files.py @@ -40,7 +40,8 @@ def check(self, path): class Include(_PathFilter): """ - A path filter which includes matching paths, this convenience class improves config readability. + A path filter which includes matching paths, this convenience class + improves config readability. """ def __init__(self, *filter_strings): @@ -57,7 +58,8 @@ def __str__(self): class Exclude(_PathFilter): """ - A path filter which excludes matching paths, this convenience class improves config readability. + A path filter which excludes matching paths, this convenience class + improves config readability. """ @@ -75,7 +77,7 @@ def __str__(self): @step def find_source_files(config, source_root=None, - output_collection=ArtefactSet.ALL_SOURCE, + output_collection=ArtefactSet.INITIAL_SOURCE, path_filters: Optional[Iterable[_PathFilter]] = None): """ Find the files in the source folder, with filtering. @@ -83,9 +85,10 @@ def find_source_files(config, source_root=None, Files can be included or excluded with simple pattern matching. Every file is included by default, unless the filters say otherwise. - Path filters are expected to be provided by the user in an *ordered* collection. - The two convenience subclasses, :class:`~fab.steps.walk_source.Include` and :class:`~fab.steps.walk_source.Exclude`, - improve readability. + Path filters are expected to be provided by the user in an *ordered* + collection. The two convenience subclasses, + :class:`~fab.steps.walk_source.Include` and + :class:`~fab.steps.walk_source.Exclude`, improve readability. Order matters. For example:: @@ -94,14 +97,17 @@ def find_source_files(config, source_root=None, Include('my_folder/my_file.F90'), ] - In the above example, swapping the order would stop the file being included in the build. + In the above example, swapping the order would stop the file being + included in the build. A path matches a filter string simply if it *contains* it, - so the path *my_folder/my_file.F90* would match filters "my_folder", "my_file" and "er/my". + so the path *my_folder/my_file.F90* would match filters + "my_folder", "my_file" and "er/my". :param config: - The :class:`fab.build_config.BuildConfig` object where we can read settings - such as the project workspace folder or the multiprocessing flag. + The :class:`fab.build_config.BuildConfig` object where we can read + settings such as the project workspace folder or the multiprocessing + flag. :param source_root: Optional path to source folder, with a sensible default. :param output_collection: @@ -120,8 +126,10 @@ def find_source_files(config, source_root=None, # file filtering filtered_fpaths = set() - # todo: we shouldn't need to ignore the prebuild folder here, it's not underneath the source root. - for fpath in file_walk(source_root, ignore_folders=[config.prebuild_folder]): + # todo: we shouldn't need to ignore the prebuild folder here, it's not + # underneath the source root. + for fpath in file_walk(source_root, + ignore_folders=[config.prebuild_folder]): wanted = True for path_filter in path_filters: diff --git a/source/fab/steps/preprocess.py b/source/fab/steps/preprocess.py index 981dbb3c..325ada70 100644 --- a/source/fab/steps/preprocess.py +++ b/source/fab/steps/preprocess.py @@ -207,7 +207,7 @@ class DefaultCPreprocessorSource(ArtefactsGetter): """ def __call__(self, artefact_store): return CollectionGetter(ArtefactSet.PRAGMAD_C)(artefact_store) \ - or SuffixFilter(ArtefactSet.ALL_SOURCE, '.c')(artefact_store) + or SuffixFilter(ArtefactSet.INITIAL_SOURCE, '.c')(artefact_store) # todo: rename preprocess_c diff --git a/source/fab/steps/root_inc_files.py b/source/fab/steps/root_inc_files.py index 5c2a0ca1..5ab3a58b 100644 --- a/source/fab/steps/root_inc_files.py +++ b/source/fab/steps/root_inc_files.py @@ -4,10 +4,11 @@ # which you should have received as part of this distribution ############################################################################## """ -A helper step to copy .inc files to the root of the build source folder, for easy include by the preprocessor. +A helper step to copy .inc files to the root of the build source folder, +for easy include by the preprocessor. -Currently only used for building JULES, .inc files are due to be removed from dev practices, -at which point this step should be deprecated. +Currently only used for building JULES, .inc files are due to be removed +from dev practices, at which point this step should be deprecated. """ import logging @@ -36,8 +37,9 @@ def root_inc_files(config: BuildConfig): Artefacts created by previous Steps. This is where we find the artefacts to process. :param config: - The :class:`fab.build_config.BuildConfig` object where we can read settings - such as the project workspace folder or the multiprocessing flag. + The :class:`fab.build_config.BuildConfig` object where we can + read settings such as the project workspace folder or the + multiprocessing flag. """ @@ -45,14 +47,18 @@ def root_inc_files(config: BuildConfig): build_output: Path = config.build_output build_output.mkdir(parents=True, exist_ok=True) - warnings.warn("RootIncFiles is deprecated as .inc files are due to be removed.", DeprecationWarning) + warnings.warn("RootIncFiles is deprecated as .inc files are due to " + "be removed.", DeprecationWarning) - # inc files all go in the root - they're going to be removed altogether, soon + # inc files all go in the root - they're going to be + # removed altogether, soon inc_copied = set() - for fpath in suffix_filter(config.artefact_store[ArtefactSet.ALL_SOURCE], [".inc"]): + initial_source = config.artefact_store[ArtefactSet.INITIAL_SOURCE] + for fpath in suffix_filter(initial_source, [".inc"]): # don't copy from the output root to the output root! - # this is currently unlikely to happen but did in the past, and caused problems. + # this is currently unlikely to happen but did in the past, + # and caused problems. if fpath.parent == build_output: continue diff --git a/tests/conftest.py b/tests/conftest.py index a0adbc26..55d948fd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -58,3 +58,10 @@ def fixture_tool_box(mock_c_compiler, mock_fortran_compiler, mock_linker): tool_box.add_tool(mock_fortran_compiler) tool_box.add_tool(mock_linker) return tool_box + + +@pytest.fixture(name="psyclone_lfric_api") +def fixture_psyclone_lfric_api(): + '''A simple fixture to provide the name of the LFRic API for + PSyclone.''' + return "dynamo0.3" diff --git a/tests/system_tests/psyclone/test_psyclone_system_test.py b/tests/system_tests/psyclone/test_psyclone_system_test.py index 00616165..cf3c80d0 100644 --- a/tests/system_tests/psyclone/test_psyclone_system_test.py +++ b/tests/system_tests/psyclone/test_psyclone_system_test.py @@ -131,7 +131,7 @@ def config(self, tmp_path): multiprocessing=False) return config - def steps(self, config): + def steps(self, config, psyclone_lfric_api): here = Path(__file__).parent grab_folder(config, here / 'skeleton') find_source_files(config) @@ -145,9 +145,9 @@ def steps(self, config): config.build_output / 'kernel', # this second folder is just to test the multiple folders code, which was bugged. There's no kernels there. Path(__file__).parent / 'skeleton/algorithm', - ], api="dynamo0.3") + ], api=psyclone_lfric_api) - def test_run(self, config): + def test_run(self, config, psyclone_lfric_api): # if these files exist after the run then we know: # a) the expected files were created # b) the prebuilds were protected from automatic cleanup @@ -171,20 +171,20 @@ def test_run(self, config): assert all(list(config.prebuild_folder.glob(f)) == [] for f in expect_prebuild_files) assert all(list(config.build_output.glob(f)) == [] for f in expect_build_files) with config, pytest.warns(UserWarning, match="no transformation script specified"): - self.steps(config) + self.steps(config, psyclone_lfric_api) assert all(list(config.prebuild_folder.glob(f)) != [] for f in expect_prebuild_files) assert all(list(config.build_output.glob(f)) != [] for f in expect_build_files) - def test_prebuild(self, tmp_path, config): + def test_prebuild(self, tmp_path, config, psyclone_lfric_api): with config, pytest.warns(UserWarning, match="no transformation script specified"): - self.steps(config) + self.steps(config, psyclone_lfric_api) # make sure no work gets done the second time round with mock.patch('fab.parse.x90.X90Analyser.walk_nodes') as mock_x90_walk, \ mock.patch('fab.parse.fortran.FortranAnalyser.walk_nodes') as mock_fortran_walk, \ mock.patch('fab.tools.psyclone.Psyclone.process') as mock_run, \ config, pytest.warns(UserWarning, match="no transformation script specified"): - self.steps(config) + self.steps(config, psyclone_lfric_api) mock_x90_walk.assert_not_called() mock_fortran_walk.assert_not_called() @@ -197,12 +197,12 @@ class TestTransformationScript: and whether transformation script is passed to psyclone after '-s'. """ - def test_transformation_script(self): + def test_transformation_script(self, psyclone_lfric_api): psyclone_tool = Psyclone() mock_transformation_script = mock.Mock(return_value=__file__) with mock.patch('fab.tools.psyclone.Psyclone.run') as mock_run_command: mock_transformation_script.return_value = Path(__file__) - psyclone_tool.process(api="dynamo0.3", + psyclone_tool.process(api=psyclone_lfric_api, x90_file=Path(__file__), psy_file=Path(__file__), alg_file=Path(__file__), @@ -216,7 +216,7 @@ def test_transformation_script(self): mock_transformation_script.assert_called_once_with(Path(__file__), None) # check transformation_script is passed to psyclone command with '-s' mock_run_command.assert_called_with( - additional_parameters=['-api', 'dynamo0.3', '-l', 'all', + additional_parameters=['-api', psyclone_lfric_api, '-l', 'all', '-opsy', Path(__file__), '-oalg', Path(__file__), '-s', Path(__file__), diff --git a/tests/unit_tests/parse/fortran/test_fortran_analyser.f90 b/tests/unit_tests/parse/fortran/test_fortran_analyser.f90 index b4f250ff..508ba56b 100644 --- a/tests/unit_tests/parse/fortran/test_fortran_analyser.f90 +++ b/tests/unit_tests/parse/fortran/test_fortran_analyser.f90 @@ -20,6 +20,10 @@ END SUBROUTINE internal_sub SUBROUTINE openmp_sentinel !$ USE compute_chunk_size_mod, ONLY: compute_chunk_size ! Note OpenMP sentinel !$ USE test that is not a sentinel with a use statement inside +!GCC$ unroll 6 +!DIR$ assume (mod(p, 6) == 0) +!$omp do +!$acc parallel copyin (array, scalar). END SUBROUTINE openmp_sentinel INTEGER FUNCTION internal_func() diff --git a/tests/unit_tests/parse/fortran/test_fortran_analyser.py b/tests/unit_tests/parse/fortran/test_fortran_analyser.py index 97d39c77..75621020 100644 --- a/tests/unit_tests/parse/fortran/test_fortran_analyser.py +++ b/tests/unit_tests/parse/fortran/test_fortran_analyser.py @@ -18,20 +18,25 @@ from fab.parse.fortran_common import iter_content from fab.tools import ToolBox +'''Tests the Fortran analyser. +''' # todo: test function binding @pytest.fixture -def module_fpath(): +def module_fpath() -> Path: + '''Simple fixture that sets the name of the module test file.''' return Path(__file__).parent / "test_fortran_analyser.f90" @pytest.fixture -def module_expected(module_fpath): +def module_expected(module_fpath) -> AnalysedFortran: + '''Returns the expected AnalysedFortran instance for the Fortran + test module.''' return AnalysedFortran( fpath=module_fpath, - file_hash=1344519263, + file_hash=1757501304, module_defs={'foo_mod'}, symbol_defs={'external_sub', 'external_func', 'foo_mod'}, module_deps={'bar_mod', 'compute_chunk_size_mod'}, @@ -41,7 +46,7 @@ def module_expected(module_fpath): ) -class Test_Analyser(object): +class TestAnalyser: @pytest.fixture def fortran_analyser(self, tmp_path): @@ -53,29 +58,37 @@ def fortran_analyser(self, tmp_path): def test_empty_file(self, fortran_analyser): # make sure we get back an EmptySourceFile with mock.patch('fab.parse.AnalysedFile.save'): - analysis, artefact = fortran_analyser.run(fpath=Path(Path(__file__).parent / "empty.f90")) - assert type(analysis) is EmptySourceFile + analysis, artefact = fortran_analyser.run( + fpath=Path(Path(__file__).parent / "empty.f90")) + assert isinstance(analysis, EmptySourceFile) assert artefact is None - def test_module_file(self, fortran_analyser, module_fpath, module_expected): + def test_module_file(self, fortran_analyser, module_fpath, + module_expected): with mock.patch('fab.parse.AnalysedFile.save'): analysis, artefact = fortran_analyser.run(fpath=module_fpath) assert analysis == module_expected - assert artefact == fortran_analyser._config.prebuild_folder / f'test_fortran_analyser.{analysis.file_hash}.an' + assert artefact == (fortran_analyser._config.prebuild_folder / + f'test_fortran_analyser.{analysis.file_hash}.an') - def test_program_file(self, fortran_analyser, module_fpath, module_expected): + def test_program_file(self, fortran_analyser, module_fpath, + module_expected): # same as test_module_file() but replacing MODULE with PROGRAM with NamedTemporaryFile(mode='w+t', suffix='.f90') as tmp_file: - tmp_file.write(module_fpath.open().read().replace("MODULE", "PROGRAM")) + tmp_file.write(module_fpath.open().read().replace("MODULE", + "PROGRAM")) tmp_file.flush() with mock.patch('fab.parse.AnalysedFile.save'): - analysis, artefact = fortran_analyser.run(fpath=Path(tmp_file.name)) + analysis, artefact = fortran_analyser.run( + fpath=Path(tmp_file.name)) module_expected.fpath = Path(tmp_file.name) - module_expected._file_hash = 731743441 + module_expected._file_hash = 3388519280 module_expected.program_defs = {'foo_mod'} module_expected.module_defs = set() - module_expected.symbol_defs.update({'internal_sub', 'openmp_sentinel', 'internal_func'}) + module_expected.symbol_defs.update({'internal_func', + 'internal_sub', + 'openmp_sentinel'}) assert analysis == module_expected assert artefact == fortran_analyser._config.prebuild_folder \ @@ -84,11 +97,13 @@ def test_program_file(self, fortran_analyser, module_fpath, module_expected): # todo: test more methods! -class Test_process_variable_binding(object): +class TestProcessVariableBinding: + '''This test class tests the variable binding.''' # todo: define and depend, with and without bind name def test_define_without_bind_name(self, tmp_path): + '''Test usage of bind''' fpath = tmp_path / 'temp.f90' open(fpath, 'wt').write(""" @@ -112,13 +127,15 @@ def test_define_without_bind_name(self, tmp_path): tree = f2008_parser(reader) # find the tree node representing the variable binding - var_decl = next(obj for obj in iter_content(tree) if isinstance(obj, Type_Declaration_Stmt)) + var_decl = next(obj for obj in iter_content(tree) + if isinstance(obj, Type_Declaration_Stmt)) # run our handler fpath = Path('foo') analysed_file = AnalysedFortran(fpath=fpath, file_hash=0) analyser = FortranAnalyser() - analyser._process_variable_binding(analysed_file=analysed_file, obj=var_decl) + analyser._process_variable_binding(analysed_file=analysed_file, + obj=var_decl) assert analysed_file.symbol_defs == {'helloworld', } diff --git a/tests/unit_tests/steps/test_psyclone_unit_test.py b/tests/unit_tests/steps/test_psyclone_unit_test.py index 6bebb6a1..7790bc7d 100644 --- a/tests/unit_tests/steps/test_psyclone_unit_test.py +++ b/tests/unit_tests/steps/test_psyclone_unit_test.py @@ -20,7 +20,8 @@ class TestGenPrebuildHash: """ @pytest.fixture - def data(self, tmp_path) -> Tuple[MpCommonArgs, Path, int]: + def data(self, tmp_path, psyclone_lfric_api) -> Tuple[MpCommonArgs, + Path, int]: x90_file = Path('foo.x90') analysed_x90 = { @@ -46,7 +47,7 @@ def data(self, tmp_path) -> Tuple[MpCommonArgs, Path, int]: config=None, # type: ignore[arg-type] kernel_roots=[], transformation_script=mock_transformation_script, - api="dynamo0.3", + api=psyclone_lfric_api, overrides_folder=None, override_files=None, # type: ignore[arg-type] ) diff --git a/tests/unit_tests/steps/test_root_inc_files.py b/tests/unit_tests/steps/test_root_inc_files.py index a2e46101..6c0e94b9 100644 --- a/tests/unit_tests/steps/test_root_inc_files.py +++ b/tests/unit_tests/steps/test_root_inc_files.py @@ -16,38 +16,44 @@ def test_vanilla(self): inc_files = [Path('/foo/source/bar.inc')] config = BuildConfig('proj', ToolBox()) - config.artefact_store[ArtefactSet.ALL_SOURCE] = inc_files + config.artefact_store[ArtefactSet.INITIAL_SOURCE] = inc_files with mock.patch('fab.steps.root_inc_files.shutil') as mock_shutil: with mock.patch('fab.steps.root_inc_files.Path.mkdir'), \ - pytest.warns(UserWarning, match="_metric_send_conn not set, cannot send metrics"): + pytest.warns(UserWarning, match="_metric_send_conn not set, " + "cannot send metrics"): root_inc_files(config) - mock_shutil.copy.assert_called_once_with(inc_files[0], config.build_output) + mock_shutil.copy.assert_called_once_with(inc_files[0], + config.build_output) def test_skip_output_folder(self): # ensure it doesn't try to copy a file in the build output config = BuildConfig('proj', ToolBox()) - inc_files = [Path('/foo/source/bar.inc'), config.build_output / 'fab.inc'] - config.artefact_store[ArtefactSet.ALL_SOURCE] = inc_files + inc_files = [Path('/foo/source/bar.inc'), + config.build_output / 'fab.inc'] + config.artefact_store[ArtefactSet.INITIAL_SOURCE] = inc_files with mock.patch('fab.steps.root_inc_files.shutil') as mock_shutil: with mock.patch('fab.steps.root_inc_files.Path.mkdir'), \ - pytest.warns(UserWarning, match="_metric_send_conn not set, cannot send metrics"): + pytest.warns(UserWarning, match="_metric_send_conn not set, " + "cannot send metrics"): root_inc_files(config) - mock_shutil.copy.assert_called_once_with(inc_files[0], config.build_output) + mock_shutil.copy.assert_called_once_with(inc_files[0], + config.build_output) def test_name_clash(self): # ensure raises an exception if there is a name clash inc_files = [Path('/foo/source/bar.inc'), Path('/foo/sauce/bar.inc')] config = BuildConfig('proj', ToolBox()) - config.artefact_store[ArtefactSet.ALL_SOURCE] = inc_files + config.artefact_store[ArtefactSet.INITIAL_SOURCE] = inc_files with pytest.raises(FileExistsError): with mock.patch('fab.steps.root_inc_files.shutil'): with mock.patch('fab.steps.root_inc_files.Path.mkdir'), \ pytest.warns(DeprecationWarning, - match="RootIncFiles is deprecated as .inc files are due to be removed."): + match="RootIncFiles is deprecated as .inc " + "files are due to be removed."): root_inc_files(config) diff --git a/tests/unit_tests/test_artefacts.py b/tests/unit_tests/test_artefacts.py index 5b76a04a..06f6037b 100644 --- a/tests/unit_tests/test_artefacts.py +++ b/tests/unit_tests/test_artefacts.py @@ -16,7 +16,6 @@ def test_artefact_store() -> None: artefact_store = ArtefactStore() assert len(artefact_store) == len(ArtefactSet) assert isinstance(artefact_store, dict) - assert ArtefactSet.CURRENT_PREBUILDS in artefact_store for artefact in ArtefactSet: if artefact in [ArtefactSet.OBJECT_FILES, ArtefactSet.OBJECT_ARCHIVES]: @@ -35,29 +34,29 @@ def test_artefact_store_copy() -> None: d = Path("d.F90.nocopy") e = Path("e.f90.donotcopyeither") # Try adding a single path, a set and a list: - artefact_store.add(ArtefactSet.ALL_SOURCE, a) - artefact_store.copy_artefacts(ArtefactSet.ALL_SOURCE, + artefact_store.add(ArtefactSet.INITIAL_SOURCE, a) + artefact_store.copy_artefacts(ArtefactSet.INITIAL_SOURCE, ArtefactSet.CURRENT_PREBUILDS) assert artefact_store[ArtefactSet.CURRENT_PREBUILDS] == set([a]) - artefact_store.add(ArtefactSet.ALL_SOURCE, [b, c]) - artefact_store.add(ArtefactSet.ALL_SOURCE, set([d, e])) - assert (artefact_store[ArtefactSet.ALL_SOURCE] == + artefact_store.add(ArtefactSet.INITIAL_SOURCE, [b, c]) + artefact_store.add(ArtefactSet.INITIAL_SOURCE, set([d, e])) + assert (artefact_store[ArtefactSet.INITIAL_SOURCE] == set([a, b, c, d, e])) # Make sure that the previous copy did not get modified: assert artefact_store[ArtefactSet.CURRENT_PREBUILDS] == set([a]) - artefact_store.copy_artefacts(ArtefactSet.ALL_SOURCE, + artefact_store.copy_artefacts(ArtefactSet.INITIAL_SOURCE, ArtefactSet.CURRENT_PREBUILDS) assert (artefact_store[ArtefactSet.CURRENT_PREBUILDS] == set([a, b, c, d, e])) # Now copy with suffix filtering: - artefact_store.copy_artefacts(ArtefactSet.ALL_SOURCE, + artefact_store.copy_artefacts(ArtefactSet.INITIAL_SOURCE, ArtefactSet.FORTRAN_BUILD_FILES, suffixes=[".F90", ".f90"]) assert artefact_store[ArtefactSet.FORTRAN_BUILD_FILES] == set([a, b, c]) # Make sure filtering is case sensitive - artefact_store.copy_artefacts(ArtefactSet.ALL_SOURCE, + artefact_store.copy_artefacts(ArtefactSet.INITIAL_SOURCE, ArtefactSet.C_BUILD_FILES, suffixes=[".f90"]) assert artefact_store[ArtefactSet.C_BUILD_FILES] == set([a, c]) @@ -77,13 +76,13 @@ def test_artefact_store_update_dict() -> None: def test_artefact_store_replace() -> None: '''Tests the replace function.''' artefact_store = ArtefactStore() - artefact_store.add(ArtefactSet.ALL_SOURCE, [Path("a"), Path("b"), - Path("c")]) - artefact_store.replace(ArtefactSet.ALL_SOURCE, + artefact_store.add(ArtefactSet.INITIAL_SOURCE, [Path("a"), Path("b"), + Path("c")]) + artefact_store.replace(ArtefactSet.INITIAL_SOURCE, remove_files=[Path("a"), Path("b")], add_files=[Path("B")]) - assert artefact_store[ArtefactSet.ALL_SOURCE] == set([Path("B"), - Path("c")]) + assert artefact_store[ArtefactSet.INITIAL_SOURCE] == set([Path("B"), + Path("c")]) # Test the behaviour for dictionaries with pytest.raises(RuntimeError) as err: @@ -181,9 +180,9 @@ def test_multiple_suffixes(self, artefact_store) -> None: def test_collection_getter() -> None: '''Test CollectionGetter.''' artefact_store = ArtefactStore() - artefact_store.add(ArtefactSet.ALL_SOURCE, ["a", "b", "c"]) - cg = CollectionGetter(ArtefactSet.ALL_SOURCE) - assert artefact_store[ArtefactSet.ALL_SOURCE] == cg(artefact_store) + artefact_store.add(ArtefactSet.INITIAL_SOURCE, ["a", "b", "c"]) + cg = CollectionGetter(ArtefactSet.INITIAL_SOURCE) + assert artefact_store[ArtefactSet.INITIAL_SOURCE] == cg(artefact_store) def test_collection_concat(): diff --git a/tests/unit_tests/tools/test_psyclone.py b/tests/unit_tests/tools/test_psyclone.py index 2c1cf09e..ad7817c2 100644 --- a/tests/unit_tests/tools/test_psyclone.py +++ b/tests/unit_tests/tools/test_psyclone.py @@ -46,7 +46,7 @@ def test_psyclone_check_available(): assert not psyclone.check_available() -def test_psyclone_process(): +def test_psyclone_process(psyclone_lfric_api): '''Test running PSyclone.''' psyclone = Psyclone() mock_result = mock.Mock(returncode=0) @@ -57,7 +57,7 @@ def test_psyclone_process(): with mock.patch('fab.tools.tool.subprocess.run', return_value=mock_result) as tool_run: psyclone.process(config=config, - api="dynamo0.3", + api=psyclone_lfric_api, x90_file="x90_file", psy_file="psy_file", alg_file="alg_file", @@ -65,8 +65,8 @@ def test_psyclone_process(): kernel_roots=["root1", "root2"], additional_parameters=["-c", "psyclone.cfg"]) tool_run.assert_called_with( - ['psyclone', '-api', 'dynamo0.3', '-l', 'all', '-opsy', 'psy_file', - '-oalg', 'alg_file', '-s', 'script_called', '-c', + ['psyclone', '-api', psyclone_lfric_api, '-l', 'all', '-opsy', + 'psy_file', '-oalg', 'alg_file', '-s', 'script_called', '-c', 'psyclone.cfg', '-d', 'root1', '-d', 'root2', 'x90_file'], capture_output=True, env=None, cwd=None, check=False) @@ -112,12 +112,12 @@ def test_psyclone_process(): x90_file="x90_file", psy_file="psy_file", alg_file="alg_file", - api="dynamo0.3", + api=psyclone_lfric_api, transformation_script=transformation_function, kernel_roots=["root1", "root2"], additional_parameters=["-c", "psyclone.cfg"]) tool_run.assert_called_with( - ['psyclone', '-api', 'dynamo0.3', '-l', 'all', '-opsy', 'psy_file', - '-oalg', 'alg_file', '-s', 'script_called', '-c', + ['psyclone', '-api', psyclone_lfric_api, '-l', 'all', '-opsy', + 'psy_file', '-oalg', 'alg_file', '-s', 'script_called', '-c', 'psyclone.cfg', '-d', 'root1', '-d', 'root2', 'x90_file'], capture_output=True, env=None, cwd=None, check=False)