diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 0ff21fcf..2e288542 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,6 +1,6 @@ { "name": "Python dev environment", - "image": "ghcr.io/opencyphal/toxic:tx22.4.2", + "image": "ghcr.io/opencyphal/toxic:tx22.4.3", "workspaceFolder": "/workspace", "workspaceMount": "source=${localWorkspaceFolder},target=/workspace,type=bind,consistency=delegated", "mounts": [ diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 89f200dc..21e226fb 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -8,7 +8,7 @@ jobs: release: if: ${{ github.event_name == 'release' && !github.event.release.prerelease }} runs-on: ubuntu-latest - container: ghcr.io/opencyphal/toxic:tx22.4.2 + container: ghcr.io/opencyphal/toxic:tx22.4.3 steps: - uses: actions/checkout@v4 with: @@ -25,7 +25,7 @@ jobs: - name: test-nnvg run: tox -e py311-nnvg - name: test-doctest - run: tox -e py311-doctest,py310-rstdoctest + run: tox -e py311-doctest,py311-rstdoctest - name: test-pytest run: tox -e py311-test - name: package diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 8bb85242..599c8a25 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -13,7 +13,7 @@ on: jobs: test: runs-on: ubuntu-latest - container: ghcr.io/opencyphal/toxic:tx22.4.2 + container: ghcr.io/opencyphal/toxic:tx22.4.3 steps: - uses: actions/checkout@v4 with: @@ -21,11 +21,11 @@ jobs: - name: lint run: tox -e lint - name: test-nnvg - run: tox -e py310-nnvg + run: tox -e py311-nnvg - name: test-doctest - run: tox -e py310-doctest,py310-rstdoctest + run: tox -e py311-doctest,py311-rstdoctest - name: test-pytest - run: tox -e py310-test + run: tox -e py311-test - name: test-report run: tox -e report - name: package @@ -39,7 +39,7 @@ jobs: uses: actions/upload-artifact@v4 with: name: xunit-results - path: .tox/py310-test/tmp/xunit-result.xml + path: .tox/py311-test/tmp/xunit-result.xml - name: upload-package uses: actions/upload-artifact@v4 with: @@ -54,7 +54,7 @@ jobs: with: # Disabling shallow clone is recommended for improving relevancy of reporting fetch-depth: 0 - - name: download-converage-reports + - name: download-coverage-reports uses: actions/download-artifact@v4 with: name: coverage-reports @@ -63,7 +63,7 @@ jobs: uses: actions/download-artifact@v4 with: name: xunit-results - path: .tox/py310-test/tmp/ + path: .tox/py311-test/tmp/ - name: set-environment run: | echo NUNAVUT_MAJOR_MINOR_VERSION=$(./.github/verify.py --major-minor-version-only) >> $GITHUB_ENV @@ -83,7 +83,7 @@ jobs: -Dsonar.projectVersion=${{ env.NUNAVUT_MAJOR_MINOR_VERSION }} -Dsonar.python.version=python3.11 -Dsonar.python.coverage.reportPaths=.tox/report/tmp/coverage.xml - -Dsonar.python.xunit.reportPath=.tox/py310-test/tmp/xunit-result.xml + -Dsonar.python.xunit.reportPath=.tox/py311-test/tmp/xunit-result.xml - name: report-pr if: ${{ github.event_name == 'pull_request' }} uses: sonarsource/sonarcloud-github-action@master @@ -97,12 +97,12 @@ jobs: -Dsonar.projectVersion=${{ env.NUNAVUT_MAJOR_MINOR_VERSION }} -Dsonar.python.version=python3.11 -Dsonar.python.coverage.reportPaths=.tox/report/tmp/coverage.xml - -Dsonar.python.xunit.reportPath=.tox/py310-test/tmp/xunit-result.xml + -Dsonar.python.xunit.reportPath=.tox/py311-test/tmp/xunit-result.xml compat-test-python3-mac: strategy: matrix: - python3-version: ['11','12'] + python3-version: ['11','12', '13'] python3-platform: ['macos-latest'] runs-on: ${{ matrix.python3-platform }} needs: test @@ -122,9 +122,9 @@ jobs: compat-test-python3-ubuntu: strategy: matrix: - python3-version: ['8', '9', '10', '11', '12'] + python3-version: ['8', '9', '10', '11', '12', '13'] runs-on: ubuntu-latest - container: ghcr.io/opencyphal/toxic:tx22.4.2 + container: ghcr.io/opencyphal/toxic:tx22.4.3 needs: test steps: - uses: actions/checkout@v4 @@ -262,7 +262,7 @@ jobs: language-verification-python: runs-on: ubuntu-latest needs: test - container: ghcr.io/opencyphal/toxic:tx22.4.2 + container: ghcr.io/opencyphal/toxic:tx22.4.3 steps: - uses: actions/checkout@v4 with: diff --git a/.readthedocs.yaml b/.readthedocs.yaml index b47700b5..1d3f0364 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -1,8 +1,8 @@ version: 2 build: - os: ubuntu-22.04 + os: ubuntu-lts-latest tools: - python: "3.11" + python: "latest" sphinx: configuration: conf.py python: diff --git a/.vscode/launch.json b/.vscode/launch.json index 4525ad19..bb65b638 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -22,7 +22,6 @@ "request": "launch", "module": "pytest", "args": [ - "--no-cov", "--keep-generated", "--rootdir=${workspaceFolder}", "${file}" diff --git a/.vscode/nunavut-words.txt b/.vscode/nunavut-words.txt index 1cd27e7d..a8e0f82c 100644 --- a/.vscode/nunavut-words.txt +++ b/.vscode/nunavut-words.txt @@ -40,6 +40,7 @@ postprocessor postprocessors roadmap rtype +scsv Sriram tobytes transcompilation diff --git a/.vscode/settings.json b/.vscode/settings.json index 2809d108..a867a261 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -155,6 +155,7 @@ "files.insertFinalNewline": true, "files.trimFinalNewlines": true, "files.trimTrailingWhitespace": true, + "reflow.preferredLineLength": 120, "testMate.cpp.test.advancedExecutables": [ { "pattern": "verification/build_vscode/**/suite/{test,Test,TEST}_*", diff --git a/.vscode/tasks.json b/.vscode/tasks.json index bf13e5f3..69b4f37b 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -24,6 +24,19 @@ "isDefault": true } }, + { + "label": "doc build (tox)", + "type": "shell", + "command": "tox", + "args": [ + "-e", + "docs" + ], + "group": { + "kind": "build", + }, + "problemMatcher": [] + }, { "label": "verify c native32", "type": "shell", diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index c844998e..df50898d 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -72,8 +72,8 @@ To run the full suite of `tox`_ tests locally you'll need docker. Once you have and running do:: git submodule update --init --recursive - docker pull ghcr.io/opencyphal/toxic:tx22.4.2 - docker run --rm -v $PWD:/repo ghcr.io/opencyphal/toxic:tx22.4.2 tox + docker pull ghcr.io/opencyphal/toxic:tx22.4.3 + docker run --rm -v $PWD:/repo ghcr.io/opencyphal/toxic:tx22.4.3 tox To run a limited suite using only locally available interpreters directly on your host machine, skip the docker invocations and use ``tox run -s``. @@ -198,7 +198,7 @@ Building The Docs We rely on `read the docs`_ to build our documentation from github but we also verify this build as part of our tox build. This means you can view a local copy after completing a full, successful test run (See `Running The Tests`_) or do -:code:`docker run --rm -t -v $PWD:/repo ghcr.io/opencyphal/toxic:tx22.4.2 /bin/sh -c "tox run -e docs"` to build +:code:`docker run --rm -t -v $PWD:/repo ghcr.io/opencyphal/toxic:tx22.4.3 /bin/sh -c "tox run -e docs"` to build the docs target. You can open the index.html under ``.tox_{host platform}/docs/tmp/index.html`` or run a local web-server:: diff --git a/NunavutConfig.cmake b/NunavutConfig.cmake index bb489665..9a8d4485 100644 --- a/NunavutConfig.cmake +++ b/NunavutConfig.cmake @@ -6,33 +6,30 @@ cmake_minimum_required(VERSION 3.27.0 FATAL_ERROR) - set(NUNAVUT_VERSION "3.0") - -####### Created using @PACKAGE_INIT@ by configure_package_config_file() ####### - +# ###### Created using @PACKAGE_INIT@ by configure_package_config_file() ####### get_filename_component(PACKAGE_PREFIX_DIR "${CMAKE_CURRENT_LIST_DIR}/" ABSOLUTE) macro(set_and_check _var _file) - set(${_var} "${_file}") - if(NOT EXISTS "${_file}") - message(FATAL_ERROR "File or directory ${_file} referenced by variable ${_var} does not exist !") - endif() -endmacro() + set(${_var} "${_file}") -macro(check_required_components _NAME) - foreach(comp ${${_NAME}_FIND_COMPONENTS}) - if(NOT ${_NAME}_${comp}_FOUND) - if(${_NAME}_FIND_REQUIRED_${comp}) - set(${_NAME}_FOUND FALSE) - endif() + if(NOT EXISTS "${_file}") + message(FATAL_ERROR "File or directory ${_file} referenced by variable ${_var} does not exist !") endif() - endforeach() endmacro() -#################################################################################### +macro(check_required_components _NAME) + foreach(comp ${${_NAME}_FIND_COMPONENTS}) + if(NOT ${_NAME}_${comp}_FOUND) + if(${_NAME}_FIND_REQUIRED_${comp}) + set(${_NAME}_FOUND FALSE) + endif() + endif() + endforeach() +endmacro() +# ################################################################################### find_package(Python3 3.9 REQUIRED) set_and_check(NUNAVUT_SOURCE_DIR "${PACKAGE_PREFIX_DIR}/src") @@ -40,17 +37,17 @@ set_and_check(NUNAVUT_SOURCE_DIR "${PACKAGE_PREFIX_DIR}/src") check_required_components(Nunavut Python3) execute_process( - COMMAND ${Python3_EXECUTABLE} ${PACKAGE_PREFIX_DIR}/.github/verify.py --major-minor-version-only - OUTPUT_VARIABLE NUNAVUT_VERSION_MAJOR_MINOR - OUTPUT_STRIP_TRAILING_WHITESPACE - WORKING_DIRECTORY "${PACKAGE_PREFIX_DIR}" + COMMAND ${Python3_EXECUTABLE} ${PACKAGE_PREFIX_DIR}/.github/verify.py --major-minor-version-only + OUTPUT_VARIABLE NUNAVUT_VERSION_MAJOR_MINOR + OUTPUT_STRIP_TRAILING_WHITESPACE + WORKING_DIRECTORY "${PACKAGE_PREFIX_DIR}" ) execute_process( - COMMAND ${Python3_EXECUTABLE} ${PACKAGE_PREFIX_DIR}/.github/verify.py --version-only - OUTPUT_VARIABLE NUNAVUT_VERSION - OUTPUT_STRIP_TRAILING_WHITESPACE - WORKING_DIRECTORY "${PACKAGE_PREFIX_DIR}" + COMMAND ${Python3_EXECUTABLE} ${PACKAGE_PREFIX_DIR}/.github/verify.py --version-only + OUTPUT_VARIABLE NUNAVUT_VERSION + OUTPUT_STRIP_TRAILING_WHITESPACE + WORKING_DIRECTORY "${PACKAGE_PREFIX_DIR}" ) message(STATUS "Nunavut version: ${NUNAVUT_VERSION}") @@ -58,157 +55,166 @@ message(STATUS "Nunavut version: ${NUNAVUT_VERSION}") # Taken from https://stackoverflow.com/questions/32585927/proper-way-to-use-platform-specific-separators-in-cmake as # this issue (https://gitlab.kitware.com/cmake/cmake/-/issues/17946) is still open. if("${CMAKE_HOST_SYSTEM}" MATCHES ".*Windows.*") - set(NUNAVUT_PATH_LIST_SEP "\\;") + set(NUNAVUT_PATH_LIST_SEP "\\;") else() # e.g. Linux - set(NUNAVUT_PATH_LIST_SEP ":") + set(NUNAVUT_PATH_LIST_SEP ":") endif() -# function: add_cyphal_library -# Create a library built from code generated by the Nunavut tool from dsdl files. This version -# of the function always defines an interface library since c and c++ types are generated as header-only. -# -# param: NAME str - A name for the library. If EXACT_NAME is set then this is the -# exact name of the target. Otherwise, the target name will be -# derived from this name for uniqueness. Use OUT_TARGET to capture -# the generated name of the library target. -# param: LANGUAGE str - The language to generate code for. Supported types are 'c' and 'cpp'. -# param: DSDL_FILES list[path] - A list of DSDL files to generate code for. -# param: DSDL_NAMESPACES optional list[path] - A list of namespaces to search for dependencies in. While optional, -# it's rare that this would be omitted. -# param: LANGUAGE_STANDARD optional str - The language standard to use. -# param: OUTPUT_DIR optional path - The directory to write generated code to. If omitted then -# ${CMAKE_CURRENT_BINARY_DIR}/generated is used. -# param: CONFIGURATION optional list[path] - A list of configurations files to pass into nnvg. See the -# nunavut documentation for more information about configuration -# files. -# param: WORKING_DIRECTORY optional path - The working directory to use when invoking the Nunavut tool. If -# omitted then ${CMAKE_CURRENT_SOURCE_DIR} is used. -# param: PYDSDL_PATH optional path - The path to the PyDSDL tool. If omitted then it must be available -# to python when invoked. -# param: FILE_EXTENSION optional str - The file extension to use for generated files. If omitted then -# the default for the language is used. -# option: ALLOW_EXPERIMENTAL_LANGUAGES - If set then unsupported languages will be allowed. -# option: CONSOLE_DEBUG - If set then verbose output will be enabled. -# option: SUPPORT_ONLY - If set then the library created will contain only support code needed -# to use the code generated for DSDL_FILES. This allows different -# cyphal libraries to share a single set of support headers and -# avoids duplicate target rules. -# option: EXACT_NAME - If set then the target name will be exactly as specified in NAME. -# Otherwise, the target name will be prefixed with an internal default. -# param: OUT_LIBRARY_TARGET optional variable - If set, this method write a variable named ${OUT_LIBRARY_TARGET} with -# the interface library target name defined for the library in the -# calling scope. -# param: OUT_CODEGEN_TARGET optional variable - If set, this method write a variable named ${OUT_CODEGEN_TARGET} with -# the custom target name defined for invoking the code generator. -function (add_cyphal_library) - #+-[input]----------------------------------------------------------------+ - set(options ALLOW_EXPERIMENTAL_LANGUAGES CONSOLE_DEBUG SUPPORT_ONLY EXACT_NAME) - set(singleValueArgs +# ################################################################################### +# HELPER MACROS FOR INTERNAL FUNCTIONS. YOU CAN IGNORE THESE. + +# transform a JSON array into a CMAKE list +macro(nunavut_json_array_to_list _json_array _list) + string(JSON _json_array_type ERROR_VARIABLE _json_error TYPE ${${_json_array}}) + + if(_json_error) + message(FATAL_ERROR "nunavut_json_array_to_list: Failed to parse JSON array: ${_json_error}") + endif() + + if(NOT ${_json_array_type} STREQUAL "ARRAY") + message(FATAL_ERROR "nunavut_json_array_to_list: Expected JSON array but got ${_json_array_type}.") + endif() + + string(JSON _json_array_length ERROR_VARIABLE _json_error LENGTH ${${_json_array}}) + + if(_json_error) + message(FATAL_ERROR "nunavut_json_array_to_list: Failed to get length of JSON array: ${_json_error}") + endif() + + set(_local_list "") + + foreach(_index RANGE 0 ${_json_array_length} - 1) + string(JSON _item ERROR_VARIABLE _json_error GET ${${_json_array}} ${_index}) + + if(_json_error) + message(FATAL_ERROR "nunavut_json_array_to_list: Failed to get item from JSON array: ${_json_error}") + endif() + + list(APPEND _local_list "${_item}") + endforeach() + + set(${_list} ${_local_list}) +endmacro() + +# used internally to unify argument handling for standards nnvg arguments across all cmake functions +# Note: all options are repeated as "LOCAL_ARG_[option name]" to support forwarding. +macro(nunavut_config_args has_name options singleValueArgs multiValueArgs usageLines) + list(APPEND ${options} ALLOW_EXPERIMENTAL_LANGUAGES CONSOLE_DEBUG SUPPORT_ONLY NO_SUPPORT) + list(APPEND ${singleValueArgs} NAME - OUT_LIBRARY_TARGET - OUT_CODEGEN_TARGET - LANGUAGE - OUTPUT_DIR - LANGUAGE_STANDARD - PYDSDL_PATH - WORKING_DIRECTORY - FILE_EXTENSION + LANGUAGE + OUTPUT_DIR + LANGUAGE_STANDARD + PYDSDL_PATH + WORKING_DIRECTORY + FILE_EXTENSION ) - set(multiValueArgs DSDL_FILES DSDL_NAMESPACES) - cmake_parse_arguments(PARSE_ARGV 0 ARG "${options}" "${singleValueArgs}" "${multiValueArgs}") + list(APPEND ${multiValueArgs} CONFIGURATION DSDL_FILES DSDL_NAMESPACES) + list(INSERT ${usageLines} 0 + "USAGE:" + " ${CMAKE_CURRENT_FUNCTION}") - if (NOT ARG_NAME AND EXACT_NAME) - message(FATAL_ERROR "add_cyphal_library: NAME is required if EXACT_NAME is set.") + if(${has_name}) + list(INSERT ${usageLines} 2 " NAME LANGUAGE DSDL_FILES [DSDL_NAMESPACES ]") + else() + list(INSERT ${usageLines} 2 " LANGUAGE DSDL_FILES [DSDL_NAMESPACES ]") endif() - if (NOT ARG_LANGUAGE) - message(FATAL_ERROR "add_cyphal_library: LANGUAGE is required.") + list(INSERT ${usageLines} 3 + " [LANGUAGE_STANDARD ] [OUTPUT_DIR ] [CONFIGURATION ]" + " [WORKING_DIRECTORY ] [PYDSDL_PATH ] [FILE_EXTENSION ]" + " [ALLOW_EXPERIMENTAL_LANGUAGES] [CONSOLE_DEBUG] [SUPPORT_ONLY|NO_SUPPORT]" + ) + + cmake_parse_arguments(PARSE_ARGV 0 ARG "${${options}}" "${${singleValueArgs}}" "${${multiValueArgs}}") + + if(${has_name} AND NOT ARG_NAME) + message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}: NAME is required.") endif() - if (${ARG_LANGUAGE} STREQUAL "cpp") - if (NOT ARG_ALLOW_EXPERIMENTAL_LANGUAGES) - message(FATAL_ERROR "add_cyphal_library: C++ support is experimental and must be enabled by setting the ALLOW_EXPERIMENTAL_LANGUAGES option.") + if(NOT ARG_LANGUAGE) + message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}: LANGUAGE is required.") + endif() + + if(${ARG_LANGUAGE} STREQUAL "cpp") + if(NOT ARG_ALLOW_EXPERIMENTAL_LANGUAGES) + message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}: C++ support is experimental and must be enabled by setting the ALLOW_EXPERIMENTAL_LANGUAGES option.") endif() - elseif (NOT ${ARG_LANGUAGE} STREQUAL "c") - message(FATAL_ERROR "add_cyphal_library: LANGUAGE must be 'c' or 'cpp'.") + elseif(NOT ${ARG_LANGUAGE} STREQUAL "c") + message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}: LANGUAGE must be 'c' or 'cpp'.") endif() - if (NOT ARG_DSDL_FILES) - message(FATAL_ERROR "add_cyphal_library: DSDL_FILES is required.") + if(NOT ARG_DSDL_FILES) + message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}: DSDL_FILES is required.") endif() - if (NOT ARG_OUTPUT_DIR) + if(NOT ARG_OUTPUT_DIR) set(ARG_OUTPUT_DIR "${CMAKE_CURRENT_BINARY_DIR}/generated") endif() - if (NOT ARG_WORKING_DIRECTORY) + if(NOT ARG_WORKING_DIRECTORY) set(ARG_WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}") endif() - if (ARG_UNPARSED_ARGUMENTS) - message(FATAL_ERROR "add_cyphal_library: Unknown arguments found: ${ARG_UNPARSED_ARGUMENTS}\n" - "USAGE: \n" - " add_cyphal_library(\n" - " NAME LANGUAGE DSDL_FILES [DSDL_NAMESPACES ]\n" - " [LANGUAGE_STANDARD ] [OUTPUT_DIR ] [CONFIGURATION ]\n" - " [WORKING_DIRECTORY ] [PYDSDL_PATH ] [FILE_EXTENSION ]\n" - " [ALLOW_EXPERIMENTAL_LANGUAGES] [CONSOLE_DEBUG] [SUPPORT_ONLY] [EXACT_NAME]\n" - " [OUT_LIBRARY_TARGET ] [OUT_CODEGEN_TARGET ]\n" - " )\n" - ) + if(ARG_SUPPORT_ONLY AND ARG_NO_SUPPORT) + message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}: SUPPORT_ONLY and NO_SUPPORT are mutually exclusive.") endif() - #+-[body]-----------------------------------------------------------------+ - - if (ARG_EXACT_NAME) - set(LOCAL_TARGET_NAME "${ARG_NAME}") - else() - if (NOT ARG_NAME) - set(ARG_NAME "") - else() - set(ARG_NAME "-${ARG_NAME}") - endif() - if (ARG_SUPPORT_ONLY) - set(LOCAL_TARGET_NAME "cyphal-support${ARG_NAME}") - else() - set(LOCAL_TARGET_NAME "cyphal-types${ARG_NAME}") - endif() + if(ARG_UNPARSED_ARGUMENTS) + list(INSERT ${usageLines} 0 "Unknown arguments found: ${ARG_UNPARSED_ARGUMENTS}") + string(JOIN "\n" LOCAL_USAGE_MESSAGE ${${usageLines}}) + message(FATAL_ERROR "${LOCAL_USAGE_MESSAGE}\n") endif() +endmacro() +macro(nunavut_local_args) # handle forming arguments for the nunavut tool based on arguments passed into this function. set(LOCAL_DYNAMIC_ARGS "") - if (ARG_DSDL_NAMESPACES) + + if(ARG_DSDL_NAMESPACES) foreach(LOCAL_DSDL_NAMESPACE IN LISTS ARG_DSDL_NAMESPACES) list(APPEND LOCAL_DYNAMIC_ARGS "--lookup-dir" "${LOCAL_DSDL_NAMESPACE}") endforeach() endif() - if (ARG_LANGUAGE_STANDARD) + if(ARG_LANGUAGE_STANDARD) list(APPEND LOCAL_DYNAMIC_ARGS "--language-standard" "${ARG_LANGUAGE_STANDARD}") endif() - if (ARG_CONFIGURATIONS) - foreach(LOCAL_CONFIGURATION IN LISTS ARG_CONFIGURATIONS) + if(ARG_CONFIGURATION) + foreach(LOCAL_CONFIGURATION IN LISTS ARG_CONFIGURATION) list(APPEND LOCAL_DYNAMIC_ARGS "--configuration" "${LOCAL_CONFIGURATION}") endforeach() endif() - if (ARG_ALLOW_EXPERIMENTAL_LANGUAGES) + if(ARG_ALLOW_EXPERIMENTAL_LANGUAGES) + set(LOCAL_ARG_ALLOW_EXPERIMENTAL_LANGUAGES "ALLOW_EXPERIMENTAL_LANGUAGES") list(APPEND LOCAL_DYNAMIC_ARGS "--include-experimental-languages") + else() + set(LOCAL_ARG_ALLOW_EXPERIMENTAL_LANGUAGES) endif() - if (ARG_SUPPORT_ONLY) + if(ARG_SUPPORT_ONLY) + set(LOCAL_ARG_SUPPORT_ONLY "SUPPORT_ONLY") + set(LOCAL_ARG_NO_SUPPORT "") list(APPEND LOCAL_DYNAMIC_ARGS "--generate-support" "only") + elseif(ARG_NO_SUPPORT) + set(LOCAL_ARG_SUPPORT_ONLY "") + set(LOCAL_ARG_NO_SUPPORT "NO_SUPPORT") + list(APPEND LOCAL_DYNAMIC_ARGS "--generate-support" "never") + else() + set(LOCAL_ARG_SUPPORT_ONLY "") + set(LOCAL_ARG_NO_SUPPORT "") endif() - if (ARG_FILE_EXTENSION) + if(ARG_FILE_EXTENSION) list(APPEND LOCAL_DYNAMIC_ARGS "--output-extension" "${ARG_FILE_EXTENSION}") endif() # Setup running nunavut and pydsdl from source set(LOCAL_PYTHON_PATH "${NUNAVUT_SOURCE_DIR}") - if (ARG_PYDSDL_PATH) + if(ARG_PYDSDL_PATH) set(LOCAL_PYTHON_PATH "${LOCAL_PYTHON_PATH}${NUNAVUT_PATH_LIST_SEP}${ARG_PYDSDL_PATH}") endif() @@ -216,96 +222,497 @@ function (add_cyphal_library) # Setup additional debug options if requested. set(LOCAL_DEBUG_COMMAND_OPTIONS "") - if (ARG_CONSOLE_DEBUG) + + if(ARG_CONSOLE_DEBUG) + set(LOCAL_ARG_CONSOLE_DEBUG "CONSOLE_DEBUG") list(APPEND LOCAL_DEBUG_COMMAND_OPTIONS "COMMAND_ECHO" "STDOUT" "ECHO_OUTPUT_VARIABLE") + else() + set(LOCAL_ARG_CONSOLE_DEBUG "") + endif() + + if(ARG_EXPORT_MANIFEST) + set(LOCAL_JSON_FORMAT "json-pretty") + set(LOCAL_LIST_CONFIGURATION "--list-configuration") + else() + set(LOCAL_JSON_FORMAT "json") + set(LOCAL_LIST_CONFIGURATION "") endif() +endmacro() + +# ################################################################################### + +#[==[.rst: + + .. cmake:command:: export_nunavut_manifest + + Generate a json file listing the inputs to a code gen rule and the outputs generated by the rule. This is + useful for complex builds where discovering the inputs and outputs is time consuming. By generating this file + and checking it into source control, the build can use the manifest to avoid dynamic discovery for each new + configuration step. + + - **param** ``LANGUAGE`` **str**: + + The language to generate code for. Supported types are ``c`` and ``cpp``. + + - **param** ``DSDL_FILES`` **list[path]**: + + A list of DSDL files to generate code for. + + - **param** ``DSDL_NAMESPACES`` **optional list[path]**: + + A list of namespaces to search for dependencies in. While optional, it's rare that this would be omitted. + + - **param** ``LANGUAGE_STANDARD`` **optional str**: + + The language standard to use. + + - **param** ``OUTPUT_DIR`` **optional path**: + + The directory to write generated code to. If omitted then ``${CMAKE_CURRENT_BINARY_DIR}/generated`` is used. + + - **param** ``CONFIGURATION`` **optional list[path]**: + + A list of configuration files to pass into nnvg. See the nunavut documentation for more information about + configuration files. + + - **param** ``WORKING_DIRECTORY`` **optional path**: + + The working directory to use when invoking the Nunavut tool. If omitted then ``${CMAKE_CURRENT_SOURCE_DIR}`` + is used. + + - **param** ``PYDSDL_PATH`` **optional path**: + + The path to the PyDSDL tool. If omitted then it must be available to python when invoked. + + - **param** ``FILE_EXTENSION`` **optional str**: + + The file extension to use for generated files. If omitted then the default for the language is used. + + - **option** ``ALLOW_EXPERIMENTAL_LANGUAGES``: + + If set then unsupported languages will be allowed. + + - **option** ``CONSOLE_DEBUG``: + + If set then verbose output will be enabled. + + - **option** ``SUPPORT_ONLY``: + + If set then the library created will contain only support code needed to use the code generated for + ``DSDL_FILES``. This allows different cyphal libraries to share a single set of support headers and avoids + duplicate target rules. This option is mutually exclusive with ``NO_SUPPORT``. + + - **option** ``NO_SUPPORT``: + + If set then the library created will not contain support code needed to use the code generated for + ``DSDL_FILES``. This is a mutually exclusive option with ``SUPPORT_ONLY``. + + - **param** ``OUT_MANIFEST_PATH``: + + If set then this method write a variable named ``${OUT_MANIFEST_PATH}`` with the path to the manifest file + in the calling scope. + +#]==] +function(export_nunavut_manifest) + # +-[input]----------------------------------------------------------------+ + set(options) + set(singleValueArgs OUT_MANIFEST_PATH) + set(multiValueArgs) + set(usageLines " [OUT_MANIFEST_PATH ]") + nunavut_config_args(ON options singleValueArgs multiValueArgs usageLines) + + # +-[body]-----------------------------------------------------------------+ + nunavut_local_args() # List all inputs to use as the dependencies for the custom command. execute_process( COMMAND - ${Python3_EXECUTABLE} -m nunavut - --target-language ${ARG_LANGUAGE} - --list-inputs - --dry-run - ${LOCAL_DYNAMIC_ARGS} - ${ARG_DSDL_FILES} + ${Python3_EXECUTABLE} -m nunavut + --target-language ${ARG_LANGUAGE} + --list-inputs + --list-outputs + ${LOCAL_LIST_CONFIGURATION} + --list-format ${LOCAL_JSON_FORMAT} + --dry-run + ${LOCAL_DYNAMIC_ARGS} + ${ARG_DSDL_FILES} ${LOCAL_DEBUG_COMMAND_OPTIONS} WORKING_DIRECTORY ${ARG_WORKING_DIRECTORY} - OUTPUT_VARIABLE LOCAL_LIB_INPUTS + OUTPUT_VARIABLE LOCAL_LIB_INPUTS_AND_OUTPUTS OUTPUT_STRIP_TRAILING_WHITESPACE ENCODING UTF8 ) - list(LENGTH LOCAL_LIB_INPUTS LOCAL_LIB_INPUTS_LENGTH) - if (${LOCAL_LIB_INPUTS_LENGTH} EQUAL 0) - message(FATAL_ERROR "add_cyphal_library: No input files found for ${LOCAL_TARGET_NAME} (${LOCAL_LIB_INPUTS}).") - endif() + set(LOCAL_MANIFEST_FILE "${ARG_OUTPUT_DIR}/${ARG_NAME}.json") + file(WRITE ${LOCAL_MANIFEST_FILE} ${LOCAL_LIB_INPUTS_AND_OUTPUTS}) - if (ARG_CONSOLE_DEBUG) - message(STATUS "\nadd_cyphal_library: Found input files: ${LOCAL_LIB_INPUTS}") + # +-[output]---------------------------------------------------------------+ + if(ARG_OUT_MANIFEST_PATH) + set(${ARG_OUT_MANIFEST_PATH} ${LOCAL_MANIFEST_FILE} PARENT_SCOPE) endif() +endfunction() + +#[==[.rst: + + .. cmake:command:: discover_inputs_and_outputs + + Invoke nnvg to discover all dsdl inputs for a given set of namespaces and the outputs that these would generate + from a codegen build step. + + .. note:: + + The :cmake:command:`add_cyphal_library` function uses this method internally so it is not necessary to use + this method if defining a library using that function. + + - **param** ``LANGUAGE`` **str**: + + The language to generate code for. Supported types are ``c`` and ``cpp``. + + - **param** ``DSDL_FILES`` **list[path]**: + + A list of DSDL files to generate code for. + + - **param** ``DSDL_NAMESPACES`` **optional list[path]**: + + A list of namespaces to search for dependencies in. While optional, it's rare that this would be omitted. + + - **param** ``LANGUAGE_STANDARD`` **optional str**: + + The language standard to use. + + - **param** ``OUTPUT_DIR`` **optional path**: + + The directory to write generated code to. If omitted then ``${CMAKE_CURRENT_BINARY_DIR}/generated`` is used. + + - **param** ``CONFIGURATION`` **optional list[path]**: + + A list of configuration files to pass into nnvg. See the nunavut documentation for more information about + configuration files. + + - **param** ``WORKING_DIRECTORY`` **optional path**: - # List all outputs to use as the outputs for the custom command. + The working directory to use when invoking the Nunavut tool. If omitted then ``${CMAKE_CURRENT_SOURCE_DIR}`` + is used. + + - **param** ``PYDSDL_PATH`` **optional path**: + + The path to the PyDSDL tool. If omitted then it must be available to python when invoked. + + - **param** ``FILE_EXTENSION`` **optional str**: + + The file extension to use for generated files. If omitted then the default for the language is used. + + - **option** ``ALLOW_EXPERIMENTAL_LANGUAGES``: + + If set then unsupported languages will be allowed. + + - **option** ``CONSOLE_DEBUG``: + + If set then verbose output will be enabled. + + - **option** ``SUPPORT_ONLY``: + + If set then the library created will contain only support code needed to use the code generated for + ``DSDL_FILES``. This allows different cyphal libraries to share a single set of support headers and avoids + duplicate target rules. This option is mutually exclusive with ``NO_SUPPORT``. + + - **option** ``NO_SUPPORT``: + + If set then the library created will not contain support code needed to use the code generated for + ``DSDL_FILES``. This is a mutually exclusive option with ``SUPPORT_ONLY``. + + - **param** ``OUT_MANIFEST_DATA`` **optional variable:** + + If set, this method writes a variable named ``${OUT_MANIFEST_DATA}`` with the json string containing the + entire manifest read in from the nnvg invocation. + + - **param** ``OUT_INPUTS_LIST`` **optional variable:** + + If set, this method writes a variable named ``${OUT_LIBRARY_TARGET}`` with the interface library target name + defined for the library in the calling scope. + + - **param** ``OUT_OUTPUTS_LIST`` **optional variable:** + + If set, this method writes a variable named ``${OUT_CODEGEN_TARGET}`` with the custom target name defined + for invoking the code generator. + +#]==] +function(discover_inputs_and_outputs) + # +-[input]----------------------------------------------------------------+ + set(options) + set(singleValueArgs + OUT_MANIFEST_DATA + OUT_INPUTS_LIST + OUT_OUTPUTS_LIST + ) + set(multiValueArgs) + list(APPEND usageLines + " [OUT_INPUTS_LIST ] [OUT_OUTPUTS_LIST ] [OUT_MANIFEST_DATA ]" + ) + nunavut_config_args(OFF options singleValueArgs multiValueArgs usageLines) + + # +-[body]-----------------------------------------------------------------+ + nunavut_local_args() + + # List all inputs to use as the dependencies for the custom command. execute_process( COMMAND - ${Python3_EXECUTABLE} -m nunavut - --target-language ${ARG_LANGUAGE} - --list-outputs - --dry-run - --outdir ${ARG_OUTPUT_DIR} - ${LOCAL_DYNAMIC_ARGS} - ${ARG_DSDL_FILES} + ${Python3_EXECUTABLE} -m nunavut + --target-language ${ARG_LANGUAGE} + --list-inputs + --list-outputs + ${LOCAL_LIST_CONFIGURATION} + --list-format ${LOCAL_JSON_FORMAT} + --dry-run + ${LOCAL_DYNAMIC_ARGS} + ${ARG_DSDL_FILES} ${LOCAL_DEBUG_COMMAND_OPTIONS} WORKING_DIRECTORY ${ARG_WORKING_DIRECTORY} - OUTPUT_VARIABLE LOCAL_LIB_OUTPUTS + OUTPUT_VARIABLE LOCAL_LIB_INPUTS_AND_OUTPUTS OUTPUT_STRIP_TRAILING_WHITESPACE ENCODING UTF8 ) - list(LENGTH LOCAL_LIB_OUTPUTS LOCAL_LIB_OUTPUTS_LENGTH) - if (${LOCAL_LIB_OUTPUTS_LENGTH} EQUAL 0) - message(FATAL_ERROR "add_cyphal_library: No output files found for ${LOCAL_TARGET_NAME}.") + string(JSON LOCAL_LIB_INPUTS ERROR_VARIABLE LOCAL_LIB_READ_INPUTS_ERROR GET ${LOCAL_LIB_INPUTS_AND_OUTPUTS} "inputs") + + if(LOCAL_LIB_READ_INPUTS_ERROR) + message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}: Failed to read inputs from nunavut: ${LOCAL_LIB_READ_INPUTS_ERROR}") endif() - if (ARG_CONSOLE_DEBUG) - message(STATUS "\nadd_cyphal_library: Found output files: ${LOCAL_LIB_OUTPUTS}") + nunavut_json_array_to_list(LOCAL_LIB_INPUTS LOCAL_LIB_INPUTS_LIST) + list(LENGTH LOCAL_LIB_INPUTS_LIST LOCAL_LIB_INPUTS_LENGTH) + + if(${LOCAL_LIB_INPUTS_LENGTH} EQUAL 0) + message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}: No input files found for ${LOCAL_TARGET_NAME} (${LOCAL_LIB_INPUTS_LIST}).") endif() + if(ARG_CONSOLE_DEBUG) + message(STATUS "\n${CMAKE_CURRENT_FUNCTION}: Found input files: ${LOCAL_LIB_INPUTS_LIST}") + endif() + + string(JSON LOCAL_LIB_OUTPUTS ERROR_VARIABLE LOCAL_LIB_READ_OUTPUTS_ERROR GET ${LOCAL_LIB_INPUTS_AND_OUTPUTS} "outputs") + + if(LOCAL_LIB_READ_OUTPUTS_ERROR) + message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}: Failed to read outputs from nunavut: ${LOCAL_LIB_READ_OUTPUTS_ERROR}") + endif() + + nunavut_json_array_to_list(LOCAL_LIB_OUTPUTS LOCAL_LIB_OUTPUTS_LIST) + list(LENGTH LOCAL_LIB_OUTPUTS_LIST LOCAL_LIB_OUTPUTS_LENGTH) + + if(${LOCAL_LIB_OUTPUTS_LENGTH} EQUAL 0) + message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}: No output files found for ${LOCAL_TARGET_NAME}.") + endif() + + if(ARG_CONSOLE_DEBUG) + message(STATUS "\n${CMAKE_CURRENT_FUNCTION}: Found output files: ${LOCAL_LIB_OUTPUTS_LIST}") + endif() + + # +-[output]---------------------------------------------------------------+ + if(ARG_OUT_MANIFEST_DATA) + set(${ARG_OUT_MANIFEST_DATA} ${LOCAL_LIB_INPUTS_AND_OUTPUTS} PARENT_SCOPE) + endif() + + if(ARG_OUT_INPUTS_LIST) + set(${ARG_OUT_INPUTS_LIST} ${LOCAL_LIB_INPUTS_LIST} PARENT_SCOPE) + endif() + + if(ARG_OUT_OUTPUTS_LIST) + set(${ARG_OUT_OUTPUTS_LIST} ${LOCAL_LIB_OUTPUTS_LIST} PARENT_SCOPE) + endif() +endfunction() + +#[==[.rst: + .. cmake:command:: add_cyphal_library + + Create a library built from code generated by the Nunavut tool from dsdl files. This version + of the function always defines an interface library since c and c++ types are generated as header-only. + + .. note:: + + See the :ref:`fetch_content` for more guidance on using this function. + + - **param** ``NAME`` **str**: + + A name for the library. If ``EXACT_NAME`` is set then this is the exact name of the target. Otherwise, the + target name will be derived from this name for uniqueness. Use ``OUT_LIBRARY_TARGET`` to capture the + generated name of the library target. + + - **param** ``LANGUAGE`` **str**: + + The language to generate code for. Supported types are ``c`` and ``cpp``. + + - **param** ``DSDL_FILES`` **list[path]**: + + A list of DSDL files to generate code for. + + - **param** ``DSDL_NAMESPACES`` **optional list[path]**: + + A list of namespaces to search for dependencies in. While optional, it's rare that this would be omitted. + + - **param** ``LANGUAGE_STANDARD`` **optional str**: + + The language standard to use. + + - **param** ``OUTPUT_DIR`` **optional path**: + + The directory to write generated code to. If omitted then ``${CMAKE_CURRENT_BINARY_DIR}/generated`` is used. + + - **param** ``CONFIGURATION`` **optional list[path]**: + + A list of configuration files to pass into nnvg. See the nunavut documentation for more information about + configuration files. + + - **param** ``WORKING_DIRECTORY`` **optional path**: + + The working directory to use when invoking the Nunavut tool. If omitted then ``${CMAKE_CURRENT_SOURCE_DIR}`` + is used. + + - **param** ``PYDSDL_PATH`` **optional path**: + + The path to the PyDSDL tool. If omitted then it must be available to python when invoked. + + - **param** ``FILE_EXTENSION`` **optional str**: + + The file extension to use for generated files. If omitted then the default for the language is used. + + - **option** ``ALLOW_EXPERIMENTAL_LANGUAGES``: + + If set then unsupported languages will be allowed. + + - **option** ``CONSOLE_DEBUG``: + + If set then verbose output will be enabled. + + - **option** ``SUPPORT_ONLY``: + + If set then the library created will contain only support code needed to use the code generated for + ``DSDL_FILES``. This allows different cyphal libraries to share a single set of support headers and avoids + duplicate target rules. This option is mutually exclusive with ``NO_SUPPORT``. + + - **option** ``NO_SUPPORT``: + + If set then the library created will not contain support code needed to use the code generated for + ``DSDL_FILES``. This is a mutually exclusive option with ``SUPPORT_ONLY``. + + - **option** ``EXACT_NAME``: + + If set then the target name will be exactly as specified in ``NAME``. Otherwise, the target name will be + prefixed with an internal default. + + - **option** ``EXPORT_MANIFEST``: + + If set then a JSON file containing a list of all the inputs, outputs, and other information about the + custom command will be written to ``${CMAKE_CURRENT_BINARY_DIR}/${OUT_CODEGEN_TARGET}.json``. + + - **param** ``OUT_LIBRARY_TARGET`` **optional variable**: + + If set, this method write a variable named ``${OUT_LIBRARY_TARGET}`` with the interface library target name + defined for the library in the calling scope. + + - **param** ``OUT_CODEGEN_TARGET`` **optional variable**: + + If set, this method write a variable named ``${OUT_CODEGEN_TARGET}`` with the custom target name defined for + invoking the code generator. + +#]==] +function(add_cyphal_library) + # +-[input]----------------------------------------------------------------+ + set(options EXPORT_MANIFEST EXACT_NAME) + set(singleValueArgs + OUT_LIBRARY_TARGET + OUT_CODEGEN_TARGET + ) + set(multiValueArgs) + list(APPEND usageLines + " [EXPORT_MANIFEST] [EXACT_NAME]" + " [OUT_LIBRARY_TARGET ] [OUT_CODEGEN_TARGET ]" + ) + nunavut_config_args(ON options singleValueArgs multiValueArgs usageLines) + + if(NOT ARG_NAME AND EXACT_NAME) + message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}: NAME is required if EXACT_NAME is set.") + endif() + + # +-[body]-----------------------------------------------------------------+ + nunavut_local_args() + + if(ARG_EXACT_NAME) + set(LOCAL_TARGET_NAME "${ARG_NAME}") + else() + if(NOT ARG_NAME) + set(ARG_NAME "") + else() + set(ARG_NAME "-${ARG_NAME}") + endif() + + if(ARG_SUPPORT_ONLY) + set(LOCAL_TARGET_NAME "cyphal-support${ARG_NAME}") + elseif(ARG_NO_SUPPORT) + set(LOCAL_TARGET_NAME "cyphal-types${ARG_NAME}") + else() + set(LOCAL_TARGET_NAME "cyphal-types-and-support${ARG_NAME}") + endif() + endif() + + discover_inputs_and_outputs( + LANGUAGE ${ARG_LANGUAGE} + DSDL_FILES ${ARG_DSDL_FILES} + DSDL_NAMESPACES ${ARG_DSDL_NAMESPACES} + LANGUAGE_STANDARD ${ARG_LANGUAGE_STANDARD} + OUTPUT_DIR ${ARG_OUTPUT_DIR} + CONFIGURATION ${ARG_CONFIGURATION} + WORKING_DIRECTORY ${ARG_WORKING_DIRECTORY} + PYDSDL_PATH ${ARG_PYDSDL_PATH} + FILE_EXTENSION ${ARG_FILE_EXTENSION} + ${LOCAL_ARG_ALLOW_EXPERIMENTAL_LANGUAGES} + ${LOCAL_ARG_CONSOLE_DEBUG} + ${LOCAL_ARG_SUPPORT_ONLY} + ${LOCAL_ARG_NO_SUPPORT} + OUT_MANIFEST_DATA LOCAL_MANIFEST_DATA + OUT_INPUTS_LIST LOCAL_LIB_INPUTS_LIST + OUT_OUTPUTS_LIST LOCAL_LIB_OUTPUTS_LIST + ) + # Create the custom command to generate source files. add_custom_command( - OUTPUT ${LOCAL_LIB_OUTPUTS} + OUTPUT ${LOCAL_LIB_OUTPUTS_LIST} COMMAND - export PYTHONPATH=${LOCAL_PYTHON_PATH} && ${Python3_EXECUTABLE} -m nunavut - --target-language ${ARG_LANGUAGE} - --outdir ${ARG_OUTPUT_DIR} - ${LOCAL_DYNAMIC_ARGS} - ${ARG_DSDL_FILES} + export PYTHONPATH=${LOCAL_PYTHON_PATH} && ${Python3_EXECUTABLE} -m nunavut + --target-language ${ARG_LANGUAGE} + --outdir ${ARG_OUTPUT_DIR} + ${LOCAL_DYNAMIC_ARGS} + ${ARG_DSDL_FILES} WORKING_DIRECTORY ${ARG_WORKING_DIRECTORY} - DEPENDS ${LOCAL_LIB_INPUTS} + DEPENDS ${LOCAL_LIB_INPUTS_LIST} ) set(LOCAL_CODEGEN_TARGET "${LOCAL_TARGET_NAME}-generate") add_custom_target(${LOCAL_CODEGEN_TARGET} - DEPENDS ${LOCAL_LIB_OUTPUTS} + DEPENDS ${LOCAL_LIB_OUTPUTS_LIST} ) # finally, define the interface library for the generated headers. - add_library(${LOCAL_TARGET_NAME} INTERFACE ${LOCAL_LIB_OUTPUTS}) + add_library(${LOCAL_TARGET_NAME} INTERFACE ${LOCAL_LIB_OUTPUTS_LIST}) target_include_directories(${LOCAL_TARGET_NAME} INTERFACE ${ARG_OUTPUT_DIR}) add_dependencies(${LOCAL_TARGET_NAME} ${LOCAL_CODEGEN_TARGET}) - if (ARG_CONSOLE_DEBUG) - message(STATUS "add_cyphal_library: Done adding library ${LOCAL_TARGET_NAME}.") + if(ARG_EXPORT_MANIFEST) + set(LOCAL_MANIFEST_FILE "${CMAKE_CURRENT_BINARY_DIR}/${LOCAL_CODEGEN_TARGET}.json") + file(WRITE ${LOCAL_MANIFEST_FILE} ${LOCAL_MANIFEST_DATA}) + endif() + + if(ARG_CONSOLE_DEBUG) + message(STATUS "${CMAKE_CURRENT_FUNCTION}: Done adding library ${LOCAL_TARGET_NAME}.") endif() - #+-[output]---------------------------------------------------------------+ - if (ARG_OUT_LIBRARY_TARGET) + # +-[output]---------------------------------------------------------------+ + if(ARG_OUT_LIBRARY_TARGET) set(${ARG_OUT_LIBRARY_TARGET} ${LOCAL_TARGET_NAME} PARENT_SCOPE) endif() - if (ARG_OUT_CODEGEN_TARGET) + if(ARG_OUT_CODEGEN_TARGET) set(${ARG_OUT_CODEGEN_TARGET} ${LOCAL_CODEGEN_TARGET} PARENT_SCOPE) endif() endfunction() diff --git a/conf.py b/conf.py index 72aeef11..541efec4 100644 --- a/conf.py +++ b/conf.py @@ -14,6 +14,8 @@ # from nunavut._version import __version__ as nunavut_version from nunavut._version import __copyright__ as nunavut_copyright +import os +import subprocess # -- Project information ----------------------------------------------------- @@ -36,6 +38,12 @@ if len(stripped) > 0 and not stripped.startswith("#"): exclude_patterns.append(stripped) +rtd_version = os.environ.get('READTHEDOCS_VERSION') +if rtd_version is not None: + git_hash = subprocess.check_output(["git", "rev-parse", rtd_version]).decode().strip() +else: + git_hash = subprocess.check_output(["git", "rev-parse", "HEAD"]).decode().strip() + # -- General configuration --------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. @@ -52,10 +60,12 @@ "sphinx.ext.imgmath", "sphinx.ext.viewcode", "sphinx.ext.githubpages", + "sphinx.ext.extlinks", "sphinxarg.ext", "sphinx.ext.intersphinx", "sphinxemoji.sphinxemoji", - "sphinx_rtd_theme" + "sphinx_rtd_theme", + "sphinxcontrib.moderncmakedomain", ] # Add any paths that contain templates here, relative to this directory. @@ -108,6 +118,8 @@ "conf_py_path": "", } +extlinks = {"github_link": (f"https://github.com/OpenCyphal/nunavut/blob/{git_hash}/%s", "%s")} + # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". diff --git a/docs/cli.rst b/docs/cli.rst index f2913876..836f8967 100644 --- a/docs/cli.rst +++ b/docs/cli.rst @@ -1,3 +1,5 @@ +.. _nnvg: + ################################################ nnvg ################################################ diff --git a/docs/cmake/.gitignore b/docs/cmake/.gitignore new file mode 100644 index 00000000..033ba531 --- /dev/null +++ b/docs/cmake/.gitignore @@ -0,0 +1,5 @@ +external +build +out +nunavut_out +.vscode diff --git a/docs/cmake/CMakeLists.txt b/docs/cmake/CMakeLists.txt new file mode 100644 index 00000000..e4d9a4ba --- /dev/null +++ b/docs/cmake/CMakeLists.txt @@ -0,0 +1,168 @@ +# +# Copyright (C) OpenCyphal Development Team +# Copyright Amazon.com Inc. or its affiliates. +# SPDX-License-Identifier: MIT +# + +cmake_minimum_required(VERSION 3.27.0 FATAL_ERROR) + +project("Nunavut Cmake Example" + VERSION 1.0 + LANGUAGES C + HOMEPAGE_URL https://github.com/OpenCyphal/nunavut + DESCRIPTION "Demonstration of running Nunavut from source using the CMAKE FetchContent module." +) + +# This example demonstrates how to integrate Nunavut into a CMake project using the FetchContent module +# where only Cmake itself and Python are needed to run the code generation step. This is useful for +# projects that do not have a managed Python environment; allowing a default Python installation as +# might be found in a modern Linux distribution to be used without pip or setup-tools and without +# modifying the global Python environment. + +# +---------------------------------------------------------------------------+ +# | External Dependencies +# +---------------------------------------------------------------------------+ +include(FetchContent) + +# See CMakePresets.json for a convenient way to control online/offline mode. +# Documentation for this feature can be found here: +# https://cmake.org/cmake/help/latest/module/FetchContent.html#variable:FETCHCONTENT_FULLY_DISCONNECTED +if (${FETCHCONTENT_FULLY_DISCONNECTED}) + message(STATUS "☑️ FetchContent OFFLINE") +else() + message(STATUS "✅ FetchContent ONLINE") +endif() + +# We'll use this by convention to refer to a folder under which all FetchContent projects will be stored. +# You can add this to .gitignore to avoid checking in the external source or you can check it in based +# on your project's needs. If checked in then the build will not need network access nor will it fail if +# github is unavailable. Because of this we highly recommend such a configuration. +# See the provided CMakePresets.json in this folder for a convenient way to switch between online and +# offline builds. This file defines presets such that: +# +# cmake -B build -G Ninja --preset Connected +# +# will configure the build and go online to synchronize resources where: +# +# cmake -B build -G Ninja --preset Disconnected +# +# will configure the build to stay offline and use whatever is available under the external folder. +set(LOCAL_EXTERNAL_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/external) + +# Add Nunavut as a CMake module path. It contains the NunavutConfig.cmake file. +list(APPEND CMAKE_PREFIX_PATH "${CMAKE_CURRENT_SOURCE_DIR}/external/nunavut") + +# For our git pulls; first we'll declare the public dsdl types we need. +FetchContent_Declare( + cyphal_public_types + GIT_REPOSITORY "https://github.com/OpenCyphal/public_regulated_data_types" + GIT_TAG "HEAD" # <------------------------------------- By specifying HEAD we allow the public types to + # change for each run of a configure step. This is + # consistent with the public_regulated_data_types + # repository since Cyphal itself provides strong + # type versioning that, when properly adhered to, + # allows a project to get the latest version of the + # public types without risking an incompatibility. + # There is a risk that types are deprecated and + # later removed but such a migration will always + # occur over a long time span and deprecation + # warnings will be emitted with the next configure + # and build of this project after the type was + # marked deprecated. In this manner the HEAD tag + # allows a project to stay in-sync with the + # published types and maximizes compatibility with + # other Cyphal devices. + SOURCE_DIR "${LOCAL_EXTERNAL_ROOT}/public_regulated_types" +) + +# Next we'll declare pydsdl which is Nunavut's only required dependency. +FetchContent_Declare( + pydsdl + GIT_REPOSITORY "https://github.com/OpenCyphal/pydsdl" + GIT_TAG "1.22.0" # <------------------------------------ Here we've specified a specific git tag. While + # tags can be modified this specification says + # we want what the pydsdl maintainer has declared + # as the canonical 1.22.0 release even if this + # changes in the future. We are trusting the + # maintainers to make any such change completely + # compatible down to the patch-level. + SOURCE_DIR "${LOCAL_EXTERNAL_ROOT}/pydsdl" +) + +# Finally we'll declare Nunavut itself. +FetchContent_Declare( + Nunavut + GIT_REPOSITORY "https://github.com/OpenCyphal/nunavut" + GIT_TAG "2bdc4429320c430eef18104a00287c4cb394736c" # <-- Here we've specified a specific git hash. This is + # the strongest declaration of a fetched dependency. + # Using a hash says we don't care if there are bugs + # that are patched later. We want total build + # reproducibility (caveat: https://github.blog/news-insights/company-news/sha-1-collision-detection-on-github-com/) + SOURCE_DIR "${LOCAL_EXTERNAL_ROOT}/nunavut" + FIND_PACKAGE_ARGS 3.0 +) + +# Now we'll make the dependencies available. If FETCHCONTENT_FULLY_DISCONNECTED is set to OFF then this will pull the +# dependencies from github. As mentioned above, we recommend checking in these dependencies and committing them to +# your own repository to maximize build availability and reproducibility but to also run an online version of your +# build in a CI pipeline to ensure ongoing compatibility. +FetchContent_MakeAvailable( + pydsdl + Nunavut + cyphal_public_types +) + +# +---------------------------------------------------------------------------+ +# | Using NunavutConfig.cmake +# +---------------------------------------------------------------------------+ + +# We'll define the custom types we want to generate code for. These are the types that are not part of the public +# regulated data types. In this case we have two custom types that are part of the ecorp namespace. +set(LOCAL_ECORP_CUSTOM_TYPES + ${CMAKE_CURRENT_SOURCE_DIR}/custom_types/ecorp/customer/record.2.8.dsdl + ${CMAKE_CURRENT_SOURCE_DIR}/custom_types/ecorp/fintech/mortgage/property.4.2.dsdl +) + +# Now we'll add a library target that will also setup a code gen target as a dependency. We'll add the public +# regulated types as a dependency so that the generated code can use those. This will generate code only for the +# custom types we've defined and their dependencies. +add_cyphal_library( + NAME ecorp # <------------------------------------------ Make sure this is unique for each cyphal library you + # define, if you are defining more than one. + DSDL_FILES ${LOCAL_ECORP_CUSTOM_TYPES} + DSDL_NAMESPACES # <------------------------------------------ Here we list all valid namespace roots. Any direct + # or dependent type that cannot be found under one of + # these roots will cause the code-gen rule to fail. + ${LOCAL_EXTERNAL_ROOT}/public_regulated_types/uavcan + ${CMAKE_CURRENT_SOURCE_DIR}/custom_types/ecorp + LANGUAGE_STANDARD c11 + LANGUAGE c + OUTPUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/include # <-------- This is also the include path added to the interface + # library for including generated code. + PYDSDL_PATH ${LOCAL_EXTERNAL_ROOT}/pydsdl # <-------- This lets us run pydsdl from source. If you are using + # a managed build-container with the correct version + # of pydsdl built-in to the global Python environment + # you can omit this and the use of the pydsdl external + # project. + OUT_LIBRARY_TARGET CYPHAL_GENERATED_HEADERS_ECORP # <-------- ${CYPHAL_GENERATED_HEADERS_ECORP} will resolve to the + # name of the interface library defined after this + # function exits (successfully). + EXPORT_MANIFEST # <-------- Optional. Writes a json file in the build output that + # dumps all configuration used by Nunavut, all template + # files, all dsdl files, and all output files that were + # or would be generated (think of this as the nnvg + # equivalent of compile_commands.json). +) + + +# +---------------------------------------------------------------------------+ +# | Example Application +# +---------------------------------------------------------------------------+ +# By way of demonstration, we'll add a little executable... +add_executable(ecorp_pi main.c) + +# We then add a link to the headers interface library so the ecorp_pi executable will inherit the paths to the +# generated headers. This also ensures the code generation step will occur before the ecorp_pi compilation step. +# Note that, while this example uses dynamic resolution of all dsdl resources at configure-time, code generation +# is deferred to build-time. +target_link_libraries(ecorp_pi PUBLIC ${CYPHAL_GENERATED_HEADERS_ECORP}) diff --git a/docs/cmake/CMakePresets.json b/docs/cmake/CMakePresets.json new file mode 100644 index 00000000..6afe8e25 --- /dev/null +++ b/docs/cmake/CMakePresets.json @@ -0,0 +1,53 @@ +{ + "version": 7, + "cmakeMinimumRequired": { + "major": 3, + "minor": 27, + "patch": 0 + }, + "configurePresets": [ + { + "name": "config-common", + "hidden": true, + "description": "Common configuration", + "generator": "Ninja", + "warnings": { + "deprecated": true, + "uninitialized": true + } + }, + { + "name": "config-connected", + "hidden": true, + "cacheVariables": { + "FETCHCONTENT_FULLY_DISCONNECTED": "OFF", + "FETCHCONTENT_QUIET": "OFF" + } + }, + { + "name": "config-disconnected", + "hidden": true, + "cacheVariables": { + "FETCHCONTENT_FULLY_DISCONNECTED": "ON" + } + }, + { + "name": "Connected", + "displayName": "Connected Config", + "description": "FetchContent will go online to look for updates when configured.", + "inherits": [ + "config-common", + "config-connected" + ] + }, + { + "name": "Disconnected", + "displayName": "Disconnected Config", + "description": "FetchContent will not go online but will use any available local content to configure.", + "inherits": [ + "config-common", + "config-disconnected" + ] + } + ] +} \ No newline at end of file diff --git a/docs/cmake/cmake.rst b/docs/cmake/cmake.rst new file mode 100644 index 00000000..620eb524 --- /dev/null +++ b/docs/cmake/cmake.rst @@ -0,0 +1,52 @@ +################################################ +CMake Integration +################################################ + +.. _fetch_content: + +************************************* +FetchContent Example +************************************* + +Under the :github_link:`docs/cmake` folder of the Nunavut repo is an example project that uses +`CMake's FetchContent `__ module to integrate +:ref:`nnvg` code generation into a CMake project. + +.. _fetch_content_cmake_lists: + +==================================== +CMakeLists Figure +==================================== + +This example ``CMakeLists.txt`` builds the :github_link:`upstream ` example binary using only cmake, git, +and python. It demonstrates both how to integrate with :ref:`nnvg` and how to run Nunavut from source which avoids +managing Python environments for your build. + +.. literalinclude :: CMakeLists.txt + :language: cmake + +:download:`CMakeLists.txt ` + +==================================== +CMake Presets Figure +==================================== + +This isn't required but the following presets file demonstrates how you can use +`CMake presets `__ to easily switch between offline and +online builds when using `CMake's FetchContent `__ module. +See the :ref:`fetch_content_cmake_lists` figure for more context. + +.. literalinclude :: CMakePresets.json + :language: json + +:download:`CMakePresets.json ` + +************************************* +Helper Functions +************************************* + +Use either `CMake's FetchContent `__ +(see :ref:`fetch_content`) or `find_package(nunavut) `__, +to load the Nunavut cmake functions documented here into your project. + +.. cmake-module:: ../../NunavutConfig.cmake diff --git a/docs/cmake/custom_types/ecorp/customer/record.2.8.dsdl b/docs/cmake/custom_types/ecorp/customer/record.2.8.dsdl new file mode 100644 index 00000000..67fcfa49 --- /dev/null +++ b/docs/cmake/custom_types/ecorp/customer/record.2.8.dsdl @@ -0,0 +1,32 @@ +# _____ +# /\ \ +# /::\ \ +# /::::\ \ +# /::::::\ \ +# /:::/\:::\ \ +# /:::/__\:::\ \ +# /::::\ \:::\ \ +# /::::::\ \:::\ \ +# /:::/\:::\ \:::\ \ +# /:::/__\:::\ \:::\____\ +# \:::\ \:::\ \::/ / +# \:::\ \:::\ \/____/ +# \:::\ \:::\ \ +# \:::\ \:::\____\ +# \:::\ \::/ / +# \:::\ \/____/ +# \:::\ \ +# \:::\____\ +# \::/ / +# \/____/ +# +# ECORP Customer Record Type + +uavcan.primitive.String.1.0 full_name +uavcan.primitive.String.1.0 address1 +uavcan.primitive.String.1.0 address2 +uint8[2] country_code +uavcan.primitive.String.1.0 city +uint8[<=64] postal_code + +@extent 2048 * 8 diff --git a/docs/cmake/custom_types/ecorp/fintech/mortgage/property.4.2.dsdl b/docs/cmake/custom_types/ecorp/fintech/mortgage/property.4.2.dsdl new file mode 100644 index 00000000..16119ed0 --- /dev/null +++ b/docs/cmake/custom_types/ecorp/fintech/mortgage/property.4.2.dsdl @@ -0,0 +1,30 @@ +# _____ +# /\ \ +# /::\ \ +# /::::\ \ +# /::::::\ \ +# /:::/\:::\ \ +# /:::/__\:::\ \ +# /::::\ \:::\ \ +# /::::::\ \:::\ \ +# /:::/\:::\ \:::\ \ +# /:::/__\:::\ \:::\____\ +# \:::\ \:::\ \::/ / +# \:::\ \:::\ \/____/ +# \:::\ \:::\ \ +# \:::\ \:::\____\ +# \:::\ \::/ / +# \:::\ \/____/ +# \:::\ \ +# \:::\____\ +# \::/ / +# \/____/ +# +# ECORP Mortgage Record + +ecorp.customer.record.2.8 lendee +int64 principle +int64 outstanding +uavcan.primitive.scalar.Real32.1.0 apr + +@extent 4096 * 8 diff --git a/docs/cmake/main.c b/docs/cmake/main.c new file mode 100644 index 00000000..80f7d0f2 --- /dev/null +++ b/docs/cmake/main.c @@ -0,0 +1,15 @@ +/* + * Copyright (C) OpenCyphal Development Team + * Copyright Amazon.com Inc. or its affiliates. + * SPDX-License-Identifier: MIT + */ + +#include +#include "ecorp/customer/record_2_8.h" + +int main() { + ecorp_customer_record_2_8 record; + ecorp_customer_record_2_8_initialize_(&record); + printf("This is where we'd write the ECorp app using these types.\r\n"); + return 0; +} diff --git a/index.rst b/index.rst index 3ba2c46a..60d62002 100644 --- a/index.rst +++ b/index.rst @@ -7,6 +7,7 @@ docs/languages docs/templates CLI (nnvg) + docs/cmake/cmake docs/dev Appendix diff --git a/requirements.txt b/requirements.txt index 4aa9b534..c15407f7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,3 +4,4 @@ sphinx_rtd_theme sphinx-argparse sphinxemoji +sphinxcontrib-moderncmakedomain diff --git a/setup.cfg b/setup.cfg index 5b052dda..ddf7f9c9 100644 --- a/setup.cfg +++ b/setup.cfg @@ -21,6 +21,7 @@ classifiers = Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 Programming Language :: Python :: 3.12 + Programming Language :: Python :: 3.13 Programming Language :: Python :: 3 :: Only Topic :: Scientific/Engineering Topic :: Software Development :: Embedded Systems diff --git a/src/nunavut/_generators.py b/src/nunavut/_generators.py index 11801d1b..48cb4b65 100644 --- a/src/nunavut/_generators.py +++ b/src/nunavut/_generators.py @@ -10,6 +10,8 @@ """ import abc +import multiprocessing +import multiprocessing.pool from dataclasses import dataclass from pathlib import Path from typing import Any, Dict, Iterable, List, Mapping, Optional, Type, Union @@ -47,6 +49,64 @@ class GenerationResult: The set of template files used to generate the `generated_files`. """ + def __add__(self, other: Any) -> "GenerationResult": + """ + If there exists an isomorphism between this object and other, return a union of the two as a new result. + + .. invisible-code-block: python + + from nunavut._generators import GenerationResult, basic_language_context_builder_from_args + from pathlib import Path + from pytest import raises + + c_lang = basic_language_context_builder_from_args(target_language="c").create() + js_lang = basic_language_context_builder_from_args( + target_language="js", + include_experimental_languages=True).create() + + with raises(RuntimeError): + GenerationResult(c_lang, {}, [], [], []) + 1 + + with raises(RuntimeError): + GenerationResult(c_lang, {}, [], [], []) + GenerationResult(js_lang, {}, [], [], []) + + .. code-block:: python + + ursine = GenerationResult(c_lang, + {Path("bears/grizzly.c"): Path("bears/grizzly.dsdl")}, + [Path("bears/grizzly.c")], + [Path("include/support.h")], + [Path("templates/code.j2")]) + bovine = GenerationResult(c_lang, + {Path("cows/jersey.c"): Path("cows/jersey.dsdl")}, + [Path("cows/jersey.c")], + [Path("include/support.h")], + [Path("templates/code.j2")]) + + mammals = ursine + bovine + + assert(mammals.generator_targets[Path("bears/grizzly.c")] == Path("bears/grizzly.dsdl")) + assert(mammals.generator_targets[Path("cows/jersey.c")] == Path("cows/jersey.dsdl")) + assert(len(mammals.support_files) == 1) + assert(len(mammals.template_files) == 1) + + """ + if not isinstance(other, self.__class__): + raise RuntimeError(f"Cannot add {type(other)} type to a GenerationResult.") + if other.lctx != self.lctx: + raise RuntimeError( + f"Result with language {str(other.lctx)} is not isomorphic with this result for " + f"language {str(self.lctx)}." + ) + + return GenerationResult( + self.lctx, + {**self.generator_targets, **other.generator_targets}, + self.generated_files + other.generated_files, + [*{*(self.support_files + other.support_files)}], + [*{*(self.template_files + other.template_files)}], + ) + class AbstractGenerator(metaclass=abc.ABCMeta): """ @@ -78,20 +138,31 @@ def __init__( ): # pylint: disable=unused-argument self._namespace = namespace self._resource_types = resource_types + self._generate_namespace_types = self.generate_namespace_types_from_trinary( + self._namespace.get_language_context().get_target_language(), generate_namespace_types + ) + if index_file is not None: + self._index_files = [Path(p) for p in index_file] + else: + self._index_files = [] + + @classmethod + def generate_namespace_types_from_trinary( + cls, target_language: Language, generate_namespace_types: YesNoDefault + ) -> bool: + """ + Given the target language and a trinary value, returns a binary result for "should namespace types be generated" + as a parameter. + """ if generate_namespace_types == YesNoDefault.YES: - self._generate_namespace_types = True + return True elif generate_namespace_types == YesNoDefault.NO: - self._generate_namespace_types = False + return False else: - target_language = self._namespace.get_language_context().get_target_language() if target_language.has_standard_namespace_files: - self._generate_namespace_types = True + return True else: - self._generate_namespace_types = False - if index_file is not None: - self._index_files = [Path(p) for p in index_file] - else: - self._index_files = [] + return False @property def namespace(self) -> Namespace: @@ -242,36 +313,24 @@ def generate_all( .. code-block:: none - ┌─────────────────────────────────────────┐ - │ │ - │ 1. Language Context Construction │ generate_all - │ │ - └───────────────────┬─────────────────────┘ - │ - │ - ▼ - ┌─────────────────────────────────────────┐ - │ │ - │ 2. Parsing (pydsdl) and constructing │ generate_all_for_language - │ Namespace Trees from results │ - │ │ - └───────────────────┬─────────────────────┘ - │ - │ - ▼ - ┌─────────────────────────────────────────┐ - │ │ - │ 3. Generator Construction │ generate_all_from_namespace - │ │ - └───────────────────┬─────────────────────┘ - │ - │ - ▼ - ┌──────────────────────────────────────────┐ - │ │ - │ 4. Code Generation │ generate_all_from_namespace_with_generators - │ │ - └──────────────────────────────────────────┘ + ┌───────────────────────────────────────────────────┐ 1. generate_all + │ Language context construction │ + │ │ + │ ┌─────────────────────────────────────────────┐ │ 2. generate_all_for_language + │ │ Parsing (pydsdl) and constructing Namespace │ │ + │ │ trees from results │ │ + │ │ │ │ + │ │ ┌───────────────────────────────────────┐ │ │ 3. generate_all_from_namespace + │ │ │ Generator construction │ │ │ + │ │ │ │ │ │ + │ │ │ ┌─────────────────────────────────┐ │ │ │ 4. generate_all_from_namespace_with_generators + │ │ │ │ code generation │ │ │ │ + │ │ │ │ │ │ │ │ + │ │ │ │ │ │ │ │ + │ │ │ └─────────────────────────────────┘ │ │ │ + │ │ └───────────────────────────────────────┘ │ │ + │ └─────────────────────────────────────────────┘ │ + └───────────────────────────────────────────────────┘ At each stage the number of options are reduced as objects are constructed based on their values. @@ -346,6 +405,8 @@ def generate_all( on supported arguments. :returns GenerationResult: A dataclass containing explicit inputs, discovered inputs, and determined outputs. + :raises pydsdl.FrontendError: Exceptions thrown from the pydsdl frontend. For example, parsing malformed DSDL will + raise this exception. """ language_context = basic_language_context_builder_from_args( @@ -416,6 +477,8 @@ def generate_all_for_language( :param generator_args: Additional arguments to pass into the generator constructors. See the documentation for specific generator types for details on supported arguments. :return: A dataclass containing explicit inputs, discovered inputs, and determined outputs. + :raises pydsdl.FrontendError: Exceptions thrown from the pydsdl frontend. For example, parsing malformed DSDL will + raise this exception. """ index = Namespace.read_files( outdir, @@ -467,11 +530,17 @@ def generate_all_from_namespace( :param generator_args: Additional arguments to pass into the generator constructors. See the documentation for specific generator types for details on supported arguments. :return: A dataclass containing explicit inputs, discovered inputs, and determined outputs. + :raises pydsdl.FrontendError: Exceptions thrown from the pydsdl frontend. For example, parsing malformed DSDL will + raise this exception. """ support_generator_args = generator_args.copy() support_generator_args["templates_dir"] = generator_args.get("support_templates_dir", []) + # if ResourceType.INDEX.value() | resource_types != 0: + # For implementing issue #334 or other features requiring an index template, add an index_generator_type + # here to the generators we create and run + if code_generator_type is None: # load default code generator from .jinja import DSDLCodeGenerator # pylint: disable=import-outside-toplevel @@ -513,9 +582,10 @@ def generate_all_from_namespace_with_generators( :param no_overwrite: If True then generated files will not be allowed to overwrite existing files under the `outdir` path causing errors. :return: A dataclass containing explicit inputs, discovered inputs, and determined outputs. + :raises pydsdl.FrontendError: Exceptions thrown from the pydsdl frontend. For example, parsing malformed DSDL will + raise this exception. """ - # TODO: create code generators per-root and run them in parallel template_files = list(set(support_generator.get_templates()).union(set(code_generator.get_templates()))) support_files = list(support_generator.generate_all(dry_run, not no_overwrite)) generated_files = list(code_generator.generate_all(dry_run, not no_overwrite)) diff --git a/src/nunavut/_namespace.py b/src/nunavut/_namespace.py index 83a9df33..95fa2656 100644 --- a/src/nunavut/_namespace.py +++ b/src/nunavut/_namespace.py @@ -48,6 +48,8 @@ """ import collections +import multiprocessing +import multiprocessing.pool import sys from functools import singledispatchmethod from os import PathLike @@ -86,49 +88,102 @@ def _register(self, cls, method=None): # type: ignore # +--------------------------------------------------------------------------------------------------------------------+ class Generatable(type(Path())): # type: ignore """ - A file that can be generated from a pydsdl type. The override of the __new__ operator is required until python 3.12. + A file that can be generated from a pydsdl type. + .. invisible-code-block: python + + from nunavut._namespace import Generatable + from pathlib import Path + from unittest.mock import MagicMock + import pydsdl + + dsdl_definition = MagicMock(spec=pydsdl.CompositeType) + dependent_types = [MagicMock(spec=pydsdl.CompositeType)] + + .. code-block:: python + + # Generatables combine a Path to the generated file with the pydsdl type that can be reified into the file + # and the types that are required to generate the file. This is useful for tracking dependencies and + # generating files in the correct order. It also provides a representation of the generated file before it + # is actually generated. + + generatable = Generatable(dsdl_definition, dependent_types, "test.h") + + # This is a Generatable object. + assert isinstance(generatable, Generatable) + assert generatable.definition == dsdl_definition + assert generatable.input_types == dependent_types + + # But it is also a Path object. + assert isinstance(generatable, Path) + assert Path("test.h") == generatable + + :param pydsdl.Any definition: The pydsdl type that can be reified into a generated file. + :param List[pydsdl.Any] input_types: The types that are required to generate the file. :param args: Arguments to pass to the Path constructor. :param kwargs: Keyword arguments to pass to the Path constructor. """ - def __new__(cls, *args: Any, **kwargs: Any) -> "Generatable": - if cls is not Generatable: - raise TypeError("Unknown type passed to Generatable constructor.") - try: - definition = cast(pydsdl.CompositeType, kwargs.pop("definition")) - except KeyError as ex: - raise ValueError("Generatable requires a 'definition' argument.") from ex + @classmethod + def _check_arguments( + cls, definition: pydsdl.CompositeType, input_types: List[pydsdl.CompositeType] + ) -> Tuple[pydsdl.CompositeType, List[pydsdl.CompositeType]]: + """ + Check the arguments for the Generatable constructor. + :param pydsdl.Any definition: The pydsdl type that can be reified into a generated file. + :param List[pydsdl.Any] input_types: The types that are required to generate the file. + :raises TypeError: If the arguments are not of the correct types. + :return: The definition and input types. + """ if not isinstance(definition, pydsdl.CompositeType): - raise ValueError("Generatable requires a 'definition' argument of type pydsdl.CompositeType.") + raise TypeError("Generatable requires a 'definition' argument of type pydsdl.CompositeType.") + if not isinstance(input_types, list): + raise TypeError("Generatable requires an 'input_types' argument of type List[pydsdl.CompositeType].") + return definition, input_types - try: - input_types = cast(List[pydsdl.CompositeType], kwargs.pop("input_types")) - except KeyError: - input_types = [] + if sys.version_info < (3, 12): - if not isinstance(input_types, list): - raise ValueError("Generatable requires an 'input_types' argument of type List[pydsdl.CompositeType].") + def __new__(cls, *args: Any, **kwargs: Any) -> "Generatable": + """ + The override of the __new__ operator is required until python 3.12. + After that, the __init__ operator can be used. + """ + if cls is not Generatable: + raise TypeError("Unknown type passed to Generatable constructor.") + + if len(args) < 3: + raise TypeError("Generatable requires 'definition', 'input_types', and 'path' arguments.") - new_pure_path = cast(Generatable, super().__new__(cls, *args, **kwargs)) - new_pure_path._definition = definition - new_pure_path._input_types = input_types - return new_pure_path + definition, input_types = cls._check_arguments(*args[:2]) + new_pure_path = cast(Generatable, super().__new__(cls, *args[2:], **kwargs)) + new_pure_path._definition = definition + new_pure_path._input_types = input_types + return new_pure_path + + else: + + def __init__( + self, definition: pydsdl.CompositeType, input_types: List[pydsdl.CompositeType], *args: Any, **kwargs: Any + ): + super().__init__(*args, **kwargs) + self._definition, self._input_types = self._check_arguments(definition, input_types) @classmethod def wrap( cls, path: Path, definition: pydsdl.CompositeType, input_types: List[pydsdl.CompositeType] ) -> "Generatable": """ - Wrap a Path object with the Generatable interface. + Create a Generatable object from a Path, a pydsdl type, and a list of pydsdl types in a Python-version agnostic + way. This is useful for deferred construction of Generatable objects since __init__ is not available in + the python 3.11 and earlier versions. :param Path path: The path to the generated file. :param pydsdl.Any definition: The pydsdl type that can be reified into a generated file. :param List[pydsdl.Any] input_types: The types that are required to generate the file. :return: A Generatable object. """ - return Generatable(path, definition=definition, input_types=input_types) + return Generatable(definition, input_types, path) def with_segments(self, *pathsegments: Union[str, PathLike]) -> Path: """ @@ -152,6 +207,11 @@ def input_types(self) -> List[pydsdl.CompositeType]: return self._input_types.copy() # type: ignore # pylint: disable=no-member, # --[DATA MODEL]------------------------------------------------------------------------------------------------- + def __reduce__(self) -> Tuple[Callable, Tuple[Path, pydsdl.CompositeType, List[pydsdl.CompositeType]]]: + super_reduction = super().__reduce__() + reduced_path = Path(*super_reduction[1]) if isinstance(super_reduction, tuple) else Path(super_reduction) + return (self.wrap, (reduced_path, self.definition, self.input_types)) + def __eq__(self, other: object) -> bool: if isinstance(other, Generatable): return bool( @@ -163,17 +223,17 @@ def __eq__(self, other: object) -> bool: return super().__eq__(other) # type: ignore def __hash__(self) -> int: - return hash((super().__hash__(), self.definition)) + return hash((super().__hash__(), self._definition)) # pylint: disable=no-member def __repr__(self) -> str: return ( f"{super().__repr__()}, " # pylint: disable=no-member - f"definition={repr(self._definition)}, " - f"input_types={repr(self._input_types)}" + f"definition={repr(self._definition)}, " # pylint: disable=no-member + f"input_types={repr(self._input_types)}" # pylint: disable=no-member ) def __copy__(self) -> "Generatable": - return Generatable(self, definition=self.definition, input_types=self.input_types) + return Generatable(self.definition, self.input_types, *self.parts) # +--------------------------------------------------------------------------------------------------------------------+ @@ -421,22 +481,32 @@ def read_files( already_read: set[Path] = set() - # TODO: parallelize fileset processing using a map-reduce pattern and threads - while fileset: - next_file = fileset.pop() - target_type, dependent_types = pydsdl.read_files( - next_file, - root_namespace_directories_or_names, - lookup_directories, - print_output_handler, - allow_unregulated_fixed_port_id, - ) - already_read.add(next_file) # TODO: canonical paths for keying here? - Namespace.add_types(index, (target_type[0], dependent_types)) - if not omit_dependencies: - for dependent_type in dependent_types: - if dependent_type.source_file_path not in already_read: - fileset.add(dependent_type.source_file_path) + running_lookups: list[multiprocessing.pool.AsyncResult] = [] + with multiprocessing.pool.Pool() as pool: + while fileset: + next_file = fileset.pop() + running_lookups.append( + pool.apply_async( + pydsdl.read_files, + args=( + next_file, + root_namespace_directories_or_names, + lookup_directories, + print_output_handler, + allow_unregulated_fixed_port_id, + ), + ) + ) + already_read.add(next_file) # TODO: canonical paths for keying here? + if not fileset: + for lookup in running_lookups: + target_type, dependent_types = lookup.get() + Namespace.add_types(index, (target_type[0], dependent_types)) + if not omit_dependencies: + for dependent_type in dependent_types: + if dependent_type.source_file_path not in already_read: + fileset.add(dependent_type.source_file_path) + running_lookups.clear() return index @@ -823,7 +893,7 @@ def add_data_type( if extension is None: extension = language.get_config_value(Language.WKCV_DEFINITION_FILE_EXTENSION) output_file = Path(self._base_output_path) / IncludeGenerator.make_path(dsdl_type, language, extension) - output_generatable = Generatable.wrap(output_file, dsdl_type, input_types) + output_generatable = Generatable(dsdl_type, input_types, output_file) self._data_type_to_outputs[dsdl_type] = output_generatable return output_generatable diff --git a/src/nunavut/_templates.py b/src/nunavut/_templates.py index 26cc1fcf..b1e468d5 100644 --- a/src/nunavut/_templates.py +++ b/src/nunavut/_templates.py @@ -42,16 +42,6 @@ class SupportsTemplateContext: """ -def template_environment_list_filter(filter_func: typing.Callable) -> typing.Callable[..., typing.List[str]]: - """ - Decorator for marking environment dependent filters. - An object supporting the :class:`SupportsTemplateEnv` protocol - will be passed to the filter as the first argument. - """ - setattr(filter_func, ENVIRONMENT_FILTER_ATTRIBUTE_NAME, True) - return filter_func - - def template_context_filter(filter_func: typing.Callable) -> typing.Callable[..., str]: """ Decorator for marking context dependent filters. diff --git a/src/nunavut/_version.py b/src/nunavut/_version.py index 3ba58ba6..ab16630d 100644 --- a/src/nunavut/_version.py +++ b/src/nunavut/_version.py @@ -8,7 +8,7 @@ .. autodata:: __version__ """ -__version__ = "3.0.0.dev0" +__version__ = "3.0.0.dev1" __license__ = "MIT" __author__ = "OpenCyphal" __copyright__ = "Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Copyright (c) 2023 OpenCyphal." diff --git a/src/nunavut/cli/__init__.py b/src/nunavut/cli/__init__.py index 74e49bb0..3aa29e8f 100644 --- a/src/nunavut/cli/__init__.py +++ b/src/nunavut/cli/__init__.py @@ -546,9 +546,7 @@ def extension_type(raw_arg: str) -> str: ).lstrip(), ) - run_mode_ex_group = run_mode_group.add_mutually_exclusive_group() - - run_mode_ex_group.add_argument( + run_mode_group.add_argument( "--list-outputs", action="store_true", help=textwrap.dedent( @@ -560,11 +558,16 @@ def extension_type(raw_arg: str) -> str: systems that need a list of targets to determine if a rebuild is necessary. + If used with --list-inputs the list of inputs will be emitted first followed + by the list of outputs. A single empty value will separate the two lists when + using value-delimited formats. Use --list-format to control the output format + including using json to avoid the need for an empty-value delimiter. + """ ).lstrip(), ) - run_mode_ex_group.add_argument( + run_mode_group.add_argument( "--list-inputs", action="store_true", help=textwrap.dedent( @@ -576,11 +579,16 @@ def extension_type(raw_arg: str) -> str: This command is useful for integrating with CMake and other build systems that need a list of inputs to determine if a rebuild is necessary. + If used with --list-outputs the list of inputs will be emitted first followed + by the list of outputs. A single empty value will separate the two lists. Use + --list-format to control the output format including using json to avoid the + need for an empty-value delimiter. + """ ).lstrip(), ) - run_mode_ex_group.add_argument( + run_mode_group.add_argument( "--list-configuration", "-lc", action="store_true", @@ -591,6 +599,26 @@ def extension_type(raw_arg: str) -> str: and --list-outputs this command does *not* imply --dry-run but can be used in conjunction with it. + This option is only available if --list-format is set to json. + + """ + ).lstrip(), + ) + + run_mode_group.add_argument( + "--list-format", + default="scsv", + choices=["csv", "scsv", "json", "json-pretty"], + help=textwrap.dedent( + """ + + For commands that emit lists of files this option controls the format of the output. + + csv - comma separated values + scsv - semicolon separated values + json - json formatted results + json-pretty - json formatted results with indentation + """ ).lstrip(), ) diff --git a/src/nunavut/cli/parsers.py b/src/nunavut/cli/parsers.py index 938c28d2..89feb005 100644 --- a/src/nunavut/cli/parsers.py +++ b/src/nunavut/cli/parsers.py @@ -12,6 +12,7 @@ import os import re import sys +import textwrap from pathlib import Path from typing import Any, Dict, List, Optional, Set, Tuple @@ -182,6 +183,19 @@ def _post_process_args(self, args: argparse.Namespace) -> None: # Generator arguments args.generate_namespace_types = YesNoDefault.YES if args.generate_namespace_types else YesNoDefault.DEFAULT + # Can't list configuration as csv. Has to be a structured return format. + if args.list_configuration and args.list_format in ("scsv", "csv"): + self.error( + textwrap.dedent( + f""" + + --list-format {args.list_format} is not supported for --list-configuration. Use a structured format like + --list-format json to list configuration information. + + """ + ) + ) + def _parse_target_paths( self, target_files_or_root_namespace: Optional[List[str]], greedy: bool, error_if_folder: bool = False ) -> Tuple[Set[Path], Set[Path]]: diff --git a/src/nunavut/cli/runners.py b/src/nunavut/cli/runners.py index 226be9b0..cbceacd7 100644 --- a/src/nunavut/cli/runners.py +++ b/src/nunavut/cli/runners.py @@ -13,13 +13,27 @@ import logging import sys from pathlib import Path -from typing import Any, Callable, Dict, Iterable, Optional +from typing import Any, Dict, Iterable, Optional from .._generators import basic_language_context_builder_from_args, generate_all +from .._utilities import DefaultValue from .._utilities import ResourceType from ..lang import LanguageContext +class ConfigJSONEncoder(json.JSONEncoder): + """ + A JSON encoder that can handle Nunavut configuration and pydsdl objects. + """ + + def default(self, o: Any) -> Any: + if isinstance(o, Path): + return str(o) + if isinstance(o, DefaultValue): + return o.value + return super().default(o) + + class StandardArgparseRunner: """ Runner based on Python argparse. This class delegates most of the generation logic to the :func:`generate_all` @@ -43,51 +57,75 @@ def run(self) -> int: Perform actions defined by the arguments this object was created with. This may generate outputs where the arguments have requested this action. """ + lister_object: Dict[str, Any] = {} if self._args.list_configuration: - self.list_configuration(basic_language_context_builder_from_args(**vars(self.args)).create()) + lister_object["configuration"] = self.list_configuration( + basic_language_context_builder_from_args(**vars(self.args)).create() + ) result = generate_all(**vars(self.args)) + if self._args.list_inputs: + input_dsdl = {str(p) for p in set(result.template_files)} + for _, target_data in result.generator_targets.items(): + input_dsdl.add(str(target_data.definition.source_file_path.resolve())) + input_dsdl.update({str(d.source_file_path.resolve()) for d in target_data.input_types}) + lister_object["inputs"] = list(input_dsdl) + if self._args.list_outputs: file_iterators = [] if self._args.resource_types != ResourceType.NONE.value: file_iterators.append(result.support_files) if (self._args.resource_types & ResourceType.ONLY.value) == 0: file_iterators.append(result.generated_files) - self.stdout_lister(itertools.chain(*file_iterators), lambda p: str(p.resolve()), end="") - - elif self._args.list_inputs: - input_dsdl = set(result.template_files) - for _, target_data in result.generator_targets.items(): - input_dsdl.add(target_data.definition.source_file_path) - input_dsdl.update({d.source_file_path for d in target_data.input_types}) - self.stdout_lister(input_dsdl, lambda p: str(p.resolve()), end="") + lister_object["outputs"] = [str(p.resolve()) for p in itertools.chain(*file_iterators)] + + if self._args.list_format == "json": + json.dump(lister_object, sys.stdout, ensure_ascii=False, cls=ConfigJSONEncoder) + elif self._args.list_format == "json-pretty": + json.dump(lister_object, sys.stdout, ensure_ascii=False, indent=2, cls=ConfigJSONEncoder) + else: + if self._args.list_format == "scsv": + sep = ";" + end = ";" + elif self._args.list_format == "csv": + sep = "," + end = "," + else: # pragma: no cover + raise ValueError(f"Unsupported list format: {self._args.list_format}") + had_inputs = False + has_outputs = "outputs" in lister_object and len(lister_object["outputs"]) > 0 + if "inputs" in lister_object and len(lister_object["inputs"]) > 0: + had_inputs = True + self.stdout_lister(lister_object["inputs"], sep=sep, end=(end if has_outputs else "")) + if has_outputs: + if had_inputs: + sys.stdout.write(sep) + self.stdout_lister(lister_object["outputs"], sep=sep, end="") return 0 - def list_configuration(self, lctx: LanguageContext) -> None: + def list_configuration(self, lctx: LanguageContext) -> Dict[str, Any]: """ - List the configuration of the language context to a json file. + List the configuration of the language context to an object. """ config: Dict[str, Any] = {} config["target_language"] = lctx.get_target_language().name config["sections"] = lctx.config.sections() - json.dump(config, sys.stdout, ensure_ascii=False) + return config def stdout_lister( self, - things_to_list: Iterable[Any], - to_string: Callable[[Any], str], - sep: str = ";", - end: str = ";", + things_to_list: Iterable[str], + sep: str, + end: str, ) -> None: """ Write a list of things to stdout. :param Iterable[Any] things_to_list: The things to list. - :param Callable[[Any], str] to_string: A function that converts a thing to a string. :param str sep: The separator to use between things. :param str end: The character to print at the end. """ @@ -97,7 +135,7 @@ def stdout_lister( first = False else: sys.stdout.write(sep) - sys.stdout.write(to_string(thing)) + sys.stdout.write(thing) if not first: sys.stdout.write(end) diff --git a/src/nunavut/jinja/__init__.py b/src/nunavut/jinja/__init__.py index 6c934a30..775f52f7 100644 --- a/src/nunavut/jinja/__init__.py +++ b/src/nunavut/jinja/__init__.py @@ -24,7 +24,6 @@ from .._postprocessors import FilePostProcessor, LinePostProcessor, PostProcessor from .._utilities import TEMPLATE_SUFFIX, ResourceSearchPolicy, ResourceType, YesNoDefault from .environment import CodeGenEnvironment, CodeGenEnvironmentBuilder -from .jinja2 import TemplateNotFound from .loaders import DEFAULT_TEMPLATE_PATH, DSDLSupportTemplateLoader, DSDLTemplateLoader logger = logging.getLogger(__name__) diff --git a/src/nunavut/jinja/loaders.py b/src/nunavut/jinja/loaders.py index 7970bce0..08c0fa3c 100644 --- a/src/nunavut/jinja/loaders.py +++ b/src/nunavut/jinja/loaders.py @@ -380,7 +380,7 @@ def get_source(self, environment: Environment, template: str) -> Tuple[Any, str, mtime = support_file.stat().st_mtime - def is_modified() -> bool: + def is_modified() -> bool: # pragma: no cover try: return support_file.stat().st_mtime == mtime except OSError: diff --git a/src/nunavut/lang/_language.py b/src/nunavut/lang/_language.py index 1c0d32b7..d8e680ae 100644 --- a/src/nunavut/lang/_language.py +++ b/src/nunavut/lang/_language.py @@ -496,6 +496,11 @@ def get_globals(self) -> typing.Mapping[str, typing.Any]: :return: A mapping of global names to global values. """ + + # If this method is called before init (see __getattr__) then we raise an AttributeError to avoid + # infinite recursion + _ = object.__getattribute__(self, "_globals") + if self._globals is None: globals_map: typing.Dict[str, typing.Any] = {} diff --git a/src/nunavut/lang/c/__init__.py b/src/nunavut/lang/c/__init__.py index 2e8f1427..3dd2d555 100644 --- a/src/nunavut/lang/c/__init__.py +++ b/src/nunavut/lang/c/__init__.py @@ -17,7 +17,6 @@ from nunavut._dependencies import Dependencies from nunavut._templates import ( - template_environment_list_filter, template_language_filter, template_language_list_filter, template_language_test, diff --git a/src/nunavut/lang/cpp/__init__.py b/src/nunavut/lang/cpp/__init__.py index 82564df6..4dc60831 100644 --- a/src/nunavut/lang/cpp/__init__.py +++ b/src/nunavut/lang/cpp/__init__.py @@ -21,7 +21,6 @@ from nunavut._dependencies import Dependencies from nunavut._templates import ( - template_environment_list_filter, template_language_filter, template_language_list_filter, template_language_test, diff --git a/test/gentest_namespaces/test_namespaces.py b/test/gentest_namespaces/test_namespaces.py index 601a90ad..637427d2 100644 --- a/test/gentest_namespaces/test_namespaces.py +++ b/test/gentest_namespaces/test_namespaces.py @@ -266,27 +266,20 @@ def test_generatable_constructor(): # type: ignore input_types = [MagicMock(spec=CompositeType)] input_types[0].__hash__ = MagicMock(return_value=1) # type: ignore - gen = Generatable.wrap(path, definition, input_types) + gen = Generatable(definition, input_types, path) assert gen.definition == definition assert gen.input_types == input_types + assert gen == path - with pytest.raises(ValueError): + with pytest.raises(TypeError): Generatable(path) - with pytest.raises(ValueError): - Generatable(path, definition=None) - - with pytest.raises(ValueError): - Generatable(path, input_types=None) - - with pytest.raises(ValueError): - Generatable(path, definition="a string", input_types=input_types) - - with pytest.raises(ValueError): - Generatable(path, definition=definition, input_types="not a list") + with pytest.raises(TypeError): + Generatable(None, path) - _ = Generatable(path, definition=definition, input_types=input_types) + with pytest.raises(TypeError): + Generatable(None, None, path) def test_generatable_copy(): # type: ignore @@ -314,21 +307,35 @@ def test_generatable_copy(): # type: ignore def test_generatable_as_path_like(): # type: ignore """Test Generatable objects as Path-like objects.""" - gen = Generatable("test", definition=MagicMock(spec=CompositeType)) + gen = Generatable(MagicMock(spec=CompositeType), [MagicMock(spec=CompositeType)], Path("test")) assert Path("path", "to", "test") == Path("path", "to") / gen - with pytest.raises(ValueError): + with pytest.raises(TypeError): Generatable("foo") print(f"{str(gen)}:{repr(gen)}") - assert str(Path("foo")) == str(Generatable("foo", definition=MagicMock(spec=CompositeType))) - assert Path("foo") == Generatable("foo/bar", definition=MagicMock(spec=CompositeType)).parent - assert Path("foo/bar").name == Generatable("foo/bar", definition=MagicMock(spec=CompositeType)).name - assert Path("foo/bar") == Generatable("foo/bar", definition=MagicMock(spec=CompositeType)) - assert Generatable("foo/bar", definition=MagicMock(spec=CompositeType)) == Path("foo/bar") - assert repr(Generatable("foo/bar", definition=MagicMock(spec=CompositeType))) != repr(Path("foo/bar")) + assert str(Path("foo")) == str( + Generatable(MagicMock(spec=CompositeType), [MagicMock(spec=CompositeType)], Path("foo")) + ) + assert ( + Path("foo") + == Generatable(MagicMock(spec=CompositeType), [MagicMock(spec=CompositeType)], Path("foo/bar")).parent + ) + assert ( + Path("foo/bar").name + == Generatable(MagicMock(spec=CompositeType), [MagicMock(spec=CompositeType)], Path("foo/bar")).name + ) + assert Path("foo/bar") == Generatable( + MagicMock(spec=CompositeType), [MagicMock(spec=CompositeType)], Path("foo/bar") + ) + assert Generatable(MagicMock(spec=CompositeType), [MagicMock(spec=CompositeType)], Path("foo/bar")) == Path( + "foo/bar" + ) + assert repr(Generatable(MagicMock(spec=CompositeType), [MagicMock(spec=CompositeType)], Path("foo/bar"))) != repr( + Path("foo/bar") + ) def test_namespace_constructor(gen_paths): # type: ignore @@ -452,8 +459,9 @@ def test_identity_namespace(gen_paths): # type: ignore with pytest.raises(KeyError): namespace.find_output_path_for_type(aves) + @pytest.mark.parametrize("read_method", [gen_test_namespace_folder, gen_test_namespace_files]) -@pytest.mark.parametrize("templates_subdir",["default", "namespace"]) +@pytest.mark.parametrize("templates_subdir", ["default", "namespace"]) def test_namespace_any_template(gen_paths, read_method, templates_subdir): # type: ignore """Basic test of a non-empty namespace using the Any.j2 then Namespace.j2 templates.""" language_context = LanguageContextBuilder(include_experimental_languages=True).set_target_language("js").create() @@ -543,6 +551,7 @@ def test_namespace_generation(gen_paths, read_method): # type: ignore assert data_type_count == test_params.total_namespace_count + 1 # +1 for the index itself + @pytest.mark.parametrize("read_method", [gen_test_namespace_folder, gen_test_namespace_files]) @pytest.mark.parametrize( "language_key,expected_file_ext,expected_strop_part_0,expected_strop_part_1", diff --git a/test/gentest_nnvg/test_nnvg.py b/test/gentest_nnvg/test_nnvg.py index 4643fbd8..817f659b 100644 --- a/test/gentest_nnvg/test_nnvg.py +++ b/test/gentest_nnvg/test_nnvg.py @@ -253,6 +253,124 @@ def test_list_outputs(gen_paths: Any, run_nnvg_main: Callable) -> None: assert expected_output == completed_wo_empty +@pytest.mark.parametrize("list_format", ["csv", "scsv"]) +def test_list_inputs_and_outputs(gen_paths: Any, run_nnvg_main: Callable, list_format: str) -> None: + """ + Verifies nnvg --list-output --list-input --list-format [format]. + """ + expected_inputs_result = sorted( + [ + str(gen_paths.templates_dir / Path("Any.j2")), + str(gen_paths.dsdl_dir / Path("uavcan", "test", "TestType.0.8.dsdl")), + str(gen_paths.dsdl_dir / Path("scotec", "Timer.1.0.dsdl")), + ] + ) + + expected_outputs_result = sorted( + [ + str(gen_paths.out_dir / Path("uavcan", "test", "TestType_0_8.json")), + str(gen_paths.out_dir / Path("scotec", "Timer_1_0.json")), + ] + ) + + nnvg_args = [ + "--no-target-namespaces", + "--templates", + gen_paths.templates_dir.as_posix(), + "--outdir", + gen_paths.out_dir.as_posix(), + "--output-extension", + ".json", + "-l", + "js", + "-Xlang", + "--omit-serialization-support", + "--lookup-dir", + (gen_paths.dsdl_dir / Path("scotec")).as_posix(), + "--list-outputs", + "--list-inputs", + "--list-format", + list_format, + f"{(gen_paths.dsdl_dir / Path('uavcan')).as_posix()}:{Path('test', 'TestType.0.8.dsdl').as_posix()}", + ] + + expected_separator = ";" if list_format == "scsv" else "," + result = run_nnvg_main(gen_paths, nnvg_args) + assert 0 == result.returncode + raw_result = result.stdout.decode("utf-8") + completed = raw_result.split(2 * expected_separator) + assert 2 == len(completed) + completed_inputs = sorted(completed[0].split(expected_separator)) + completed_outputs = sorted(completed[1].split(expected_separator)) + assert expected_inputs_result == completed_inputs + assert expected_outputs_result == completed_outputs + + +@pytest.mark.parametrize("list_format", ["csv", "scsv"]) +def test_list_config_w_scsv(gen_paths: Any, run_nnvg_main: Callable, list_format: str) -> None: + """ + Covers failure when using --list-configuration with unsupported list formats. + """ + nnvg_args = [ + "--no-target-namespaces", + "--templates", + gen_paths.templates_dir.as_posix(), + "--outdir", + gen_paths.out_dir.as_posix(), + "--output-extension", + ".json", + "-l", + "js", + "-Xlang", + "--omit-serialization-support", + "--lookup-dir", + (gen_paths.dsdl_dir / Path("scotec")).as_posix(), + "--list-configuration", + "--list-format", + list_format, + f"{(gen_paths.dsdl_dir / Path('uavcan')).as_posix()}:{Path('test', 'TestType.0.8.dsdl').as_posix()}", + ] + + with pytest.raises( + AssertionError, match=rf".*--list-format {list_format} is not supported for --list-configuration.*" + ): + assert 0 == run_nnvg_main(gen_paths, nnvg_args, raise_argument_error=True) + + +@pytest.mark.parametrize("json_format", ["json", "json-pretty"]) +def test_list_config_w_json(gen_paths: Any, run_nnvg_main: Callable, json_format: str) -> None: + """ + Covers using --list-configuration with --list-format json and variations. + """ + nnvg_args = [ + "--no-target-namespaces", + "--templates", + gen_paths.templates_dir.as_posix(), + "--outdir", + gen_paths.out_dir.as_posix(), + "--output-extension", + ".json", + "-l", + "js", + "-Xlang", + "--omit-serialization-support", + "--lookup-dir", + (gen_paths.dsdl_dir / Path("scotec")).as_posix(), + "--list-configuration", + "--list-format", + json_format, + f"{(gen_paths.dsdl_dir / Path('uavcan')).as_posix()}:{Path('test', 'TestType.0.8.dsdl').as_posix()}", + ] + + result = run_nnvg_main(gen_paths, nnvg_args) + assert 0 == result.returncode + raw_result = result.stdout.decode("utf-8") + result_obj = json.loads(raw_result) + assert isinstance(result_obj, dict) + assert "configuration" in result_obj + assert isinstance(result_obj["configuration"], dict) + + def test_list_support_outputs_builtin(gen_paths: Any, run_nnvg_main: Callable) -> None: """ Verifies nnvg's --list-output mode for internal language support. @@ -754,19 +872,24 @@ def test_language_allow_unregulated_fixed_port_id(gen_paths: Any, run_nnvg_main: assert expected_output == completed_wo_empty -def test_list_configuration(gen_paths: Any, run_nnvg_main: Callable) -> None: +@pytest.mark.parametrize("json_format", ["json", "json-pretty"]) +def test_list_configuration(gen_paths: Any, run_nnvg_main: Callable, json_format: str) -> None: """ Verifies nnvg's --list-configuration option """ - nnvg_args = ["--list-configuration"] + nnvg_args = ["--list-configuration", "--list-format", json_format] - completed = run_nnvg_main(gen_paths, nnvg_args).stdout.decode("utf-8") + result = run_nnvg_main(gen_paths, nnvg_args) + completed = result.stdout.decode("utf-8") + assert completed parsed_config = json.loads(completed) default_target_section_name = LanguageClassLoader.to_language_module_name( LanguageContextBuilder.DEFAULT_TARGET_LANGUAGE ) - assert len(parsed_config["sections"][default_target_section_name]) > 0 - print(json.dumps(parsed_config)) + assert "configuration" in parsed_config + config = parsed_config["configuration"] + assert len(config["sections"][default_target_section_name]) > 0 + print(json.dumps(config)) def test_colon_syntax(gen_paths: Any, run_nnvg_main: Callable) -> None: diff --git a/tox.ini b/tox.ini index c8ae0a25..3c5ef628 100644 --- a/tox.ini +++ b/tox.ini @@ -2,7 +2,7 @@ # The standard version to develop against is 3.11. # [tox] -envlist = {py38,py39,py310,py311,py312}-{test,nnvg,doctest,rstdoctest},lint,report,docs +envlist = {py38,py39,py310,py311,py312,py313}-{test,nnvg,doctest,rstdoctest},lint,report,docs [base] deps = @@ -61,6 +61,7 @@ omit = */setup.py */conf.py */embed_jinja.py + */public_regulated_data_types/* [coverage:paths] @@ -133,6 +134,9 @@ passenv = deps = test,nnvg,doctest,rstdoctest: {[base]deps} +allowlist_externals = + coverage + commands = nnvg: coverage run \ @@ -143,17 +147,20 @@ commands = nnvg: --language-standard c++17-pmr \ nnvg: -v \ nnvg: {toxinidir}/submodules/public_regulated_data_types/uavcan + nnvg: coverage combine --append test: coverage run \ test: -m pytest {posargs} --basetemp={envtmpdir} -p "no:cacheprovider" \ test: --junit-xml={envtmpdir}/xunit-result.xml \ test: --rootdir={toxinidir} \ test: {toxinidir}/test + test: coverage combine --append doctest: coverage run \ doctest: -m pytest {posargs} --basetemp={envtmpdir} -p "no:cacheprovider" \ doctest: --rootdir={toxinidir} \ doctest: {toxinidir}/src + doctest: coverage combine --append rstdoctest: pytest {posargs} --basetemp={envtmpdir} -p "no:cacheprovider" \ rstdoctest: --rootdir={toxinidir} \ @@ -173,7 +180,7 @@ commands = deps = coverage skip_install = true commands = - coverage combine --append + -coverage combine --append coverage html -d {envtmpdir} coverage xml -o {envtmpdir}/coverage.xml diff --git a/verification/.devcontainer/devcontainer.json b/verification/.devcontainer/devcontainer.json index ba70bada..3d6d5d8d 100644 --- a/verification/.devcontainer/devcontainer.json +++ b/verification/.devcontainer/devcontainer.json @@ -8,7 +8,10 @@ "extensions": [ "uavcan.dsdl", "wholroyd.jinja", - "ms-vscode.cpptools-extension-pack", + "ms-vscode.cpptools", + "ms-vscode.cpptools-themes", + "ms-vscode.cmake-tools", + "josetr.cmake-language-support-vscode", "streetsidesoftware.code-spell-checker", "xaver.clang-format", "vadimcn.vscode-lldb", diff --git a/verification/python/noxfile.py b/verification/python/noxfile.py index 6951bb74..ef5e8cea 100644 --- a/verification/python/noxfile.py +++ b/verification/python/noxfile.py @@ -10,7 +10,7 @@ import nox -PYTHONS = ["3.8", "3.9", "3.10", "3.11", "3.12"] +PYTHONS = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] nox.options.error_on_external_run = True