Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • mszczep/aidge_backend_cpu
  • eclipse/aidge/aidge_backend_cpu
  • hrouis/aidge_backend_cpu
  • oantoni/aidge_backend_cpu
  • raphaelmillet/aidge_backend_cpu
  • cguillon/aidge_backend_cpu
  • jeromeh/aidge_backend_cpu
  • axelfarr/aidge_backend_cpu
  • noamzerah/aidge_backend_cpu
  • silvanosky/aidge_backend_cpu
  • maab05/aidge_backend_cpu
  • lucaslopez/aidge_backend_cpu_ll
  • farnez/aidge_backend_cpu
  • mick94/aidge_backend_cpu
14 results
Show changes
Commits on Source (845)
Showing
with 1249 additions and 302 deletions
# common
.cache
# C++ Build
build*/
install*/
include/aidge/backend/cpu_version.h
# VSCode
.vscode
......@@ -11,6 +15,8 @@ __pycache__
*.pyc
*.egg-info
dist*/
wheelhouse/
aidge_backend_cpu/_version.py
# Mermaid
*.mmd
......@@ -19,4 +25,4 @@ dist*/
xml*/
# ONNX
*.onnx
\ No newline at end of file
*.onnx
......@@ -4,15 +4,22 @@
###############################################################################
stages:
# Analyse code
- static_analysis
# Build Aidge
- build
# Unit test stage
- test
- coverage
- release
- deploy
include:
- local: '/.gitlab/ci/_global.gitlab-ci.yml'
- local: '/.gitlab/ci/static_analysis.gitlab-ci.yml'
- local: '/.gitlab/ci/build.gitlab-ci.yml'
- local: '/.gitlab/ci/test.gitlab-ci.yml'
- project: 'eclipse/aidge/gitlab_shared_files'
ref: 'main'
file: # choose which jobs to run by including the corresponding files.
- '.gitlab/ci/ubuntu_cpp.gitlab-ci.yml'
- '.gitlab/ci/ubuntu_python.gitlab-ci.yml'
- '.gitlab/ci/release/cibuildwheel_ubuntu.gitlab-ci.yml'
- '.gitlab/ci/windows_cpp.gitlab-ci.yml'
- '.gitlab/ci/windows_python.gitlab-ci.yml'
- '.gitlab/ci/release/cibuildwheel_windows.gitlab-ci.yml'
################################################################################
# Centralized definitions of common job parameter values. #
# Parameters with many optional configurations may be in separate files. #
# #
################################################################################
variables:
GIT_SUBMODULE_STRATEGY: recursive
OMP_NUM_THREADS: 4
GIT_SSL_NO_VERIFY: 1
DEBIAN_FRONTEND: noninteractive
image: n2d2-ci/ubuntu20.04/cpu:latest
\ No newline at end of file
build:ubuntu_cpp:
stage: build
tags:
- docker
image: n2d2-ci/ubuntu20.04/cpu:latest
script:
- INSTALL_PATH="$CI_PROJECT_DIR/install_cpp"
- mkdir -p $INSTALL_PATH
- mkdir -p build_cpp
# Clone and compile dependencies
- MODULE_NAME="aidge_core"
- BASE_URL=`echo $CI_REPOSITORY_URL | sed "s;\/*$CI_PROJECT_PATH.*;;"`
- REPO_URL="$BASE_URL/aidge/$MODULE_NAME.git"
- git clone $REPO_URL $MODULE_NAME
- mkdir -p $MODULE_NAME/build
- cd $MODULE_NAME/build
- cmake -DCMAKE_INSTALL_PREFIX:PATH=$INSTALL_PATH -DCMAKE_BUILD_TYPE=Debug -DWERROR=ON ..
- make -j4 all install
- cd ../..
# Build current module
- cd build_cpp
- cmake -DCMAKE_INSTALL_PREFIX:PATH=$INSTALL_PATH -DCMAKE_BUILD_TYPE=Debug -DWERROR=ON ..
- make -j4 all install
artifacts:
paths:
- build_cpp/
- install_cpp/
build:ubuntu_python:
stage: build
tags:
- docker
image: n2d2-ci/ubuntu20.04/cpu:latest
script:
- export AIDGE_INSTALL=`pwd`/install
# Create virtaul env
- python3 -m pip install virtualenv
- virtualenv venv
- source venv/bin/activate
# Clone dependencies
- MODULE_NAME="aidge_core"
- BASE_URL=`echo $CI_REPOSITORY_URL | sed "s;\/*$CI_PROJECT_PATH.*;;"`
- REPO_URL="$BASE_URL/aidge/$MODULE_NAME.git"
- git clone $REPO_URL $MODULE_NAME
# Pip install dependancy
- cd $MODULE_NAME
- python3 -m pip install . -v
- cd ..
- python3 -m pip install . -v
artifacts:
paths:
- venv/
\ No newline at end of file
$ErrorActionPreference = "Stop"
# Retrieve and clean the dependencies string from the environment variable
$AIDGE_DEPENDENCIES = $env:AIDGE_DEPENDENCIES -split ' '
Write-Host "Aidge dependencies : $AIDGE_DEPENDENCIES"
if ( $($AIDGE_DEPENDENCIES.Length) -eq 0) {
Write-Host "- No dependencies provided for current repsitory"
New-Item -ItemType Directory -Force -Path ".\build" | Out-Null
Remove-Item -Path ".\build\*" -Recurse -Force
} else {
Write-Host "Retrieving given dependencies to build current package : $AIDGE_DEPENDENCIES"
foreach ($dep in $($AIDGE_DEPENDENCIES -split " ")) {
Write-Host "Retrieving : $dep"
$curr_loc=$(Get-Location)
Set-Location $dep
Get-Location
Get-ChildItem .
New-Item -Path ".\build" -ItemType Directory -Force | Out-Null
Get-ChildItem -Path ".\build" -File | Remove-Item -Force
python -m pip install . -v
Set-Location $curr_loc
}
}
#!/bin/bash
set -e
if [[ "$1" == "" ]]; then
echo "build aidge deps in cibuildwheel container before building wheel."
echo "search path defines where the dependencies will be searched."
echo "Hint : In wheel containers, files are mounted on /host by default."
echo "\nusage : ./cibuildwheel_build_deps_before_build_wheel.sh $search_path"
fi
set -x
if [[ $AIDGE_DEPENDENCIES == "" ]]; then # case for aidge_ core
mkdir -p build # creating build if its not already there to hold the build of cpp files
rm -rf build/* # build from scratch
else
for repo in $AIDGE_DEPENDENCIES ; do # case for other projects
search_path=$1
REPO_PATH=$(find $search_path ! -writable -prune -o -type d \
-name "$repo" \
-not -path "*/install/*" \
-not -path "*/.git/*" \
-not -path "*/miniconda/*" \
-not -path "*/conda/*" \
-not -path "*/.local/*" \
-not -path "*/lib/*" \
-not -path "*/$repo/$repo/*" \
-not -path "*/proc/*" \
-print -quit)
if [[ -z "$REPO_PATH" ]]; then
echo "ERROR : dependency $repo not found in search_path \"$search_path\". ABORTING."
exit -1
fi
cd $REPO_PATH
mkdir -p build # creating build if its not already there to hold the build of cpp files
rm -rf build/* # build from scratch
pip install . -v
cd -
done
fi
set +x
set +e
static_analysis:cpp:
stage: static_analysis
tags:
- static_analysis
allow_failure: true
script:
- mkdir -p $CI_COMMIT_REF_NAME
- cppcheck -j 4 --enable=all --inconclusive --force --xml --xml-version=2 . 2> cppcheck-result.xml
- python -m pip install Pygments
- cppcheck-htmlreport --file=cppcheck-result.xml --report-dir=$CI_COMMIT_REF_NAME --source-dir=src
- python3 -m pip install -U cppcheck_codequality
- cppcheck-codequality --input-file=cppcheck-result.xml --output-file=cppcheck.json
- mkdir -p public/cpp
- mv $CI_COMMIT_REF_NAME public/cpp/
artifacts:
paths:
- public
reports:
codequality: cppcheck.json
static_analysis:python:
stage: static_analysis
tags:
- static_analysis
allow_failure: true
script:
- pip install pylint
- pip install pylint-gitlab
- pylint --rcfile=.pylintrc --exit-zero --output-format=pylint_gitlab.GitlabCodeClimateReporter aidge_backend_cpu/ > codeclimate.json
- pylint --rcfile=.pylintrc --exit-zero --output-format=pylint_gitlab.GitlabPagesHtmlReporter aidge_backend_cpu/ > pylint.html
- mkdir -p public/python/$CI_COMMIT_REF_NAME
- mv pylint.html public/python/$CI_COMMIT_REF_NAME/
artifacts:
paths:
- public
reports:
codequality: codeclimate.json
\ No newline at end of file
test:ubuntu_cpp:
stage: test
needs: ["build:ubuntu_cpp"]
tags:
- docker
image: n2d2-ci/ubuntu20.04/cpu:latest
script:
- cd build_cpp
- ctest --output-on-failure
test:ubuntu_python:
stage: test
needs: ["build:ubuntu_python"]
tags:
- docker
image: n2d2-ci/ubuntu20.04/cpu:latest
script:
- source venv/bin/activate
- cd aidge_backend_cpu
- python3 -m pip install numpy
- python3 -m pip list
# Run on discovery all tests located in core/unit_tests/python and discard the stdout
# only to show the errors/warnings and the results of the tests
- python3 -m unittest discover -s unit_tests/ -v -b 1> /dev/null
# Verson 0.5.0 (January 31, 2025)
# Verson 0.4.0 (December 6, 2024)
# Version 0.2.2 (May 14, 2024)
* Remove implmentation for Operators soly handling memory and format
- Concat
- Gather
- Memorize
- Pop
- Reshape
- Slice
- Transpose
* Fix ReLU backward kernel
* Add `showCpuVersion()` function to show which compiler was used
# Version 0.2.1 (April 11, 2024)
Fix: explicit linkage with fmt
# Version 0.2.0 (April 10, 2024)
# Version 0.1.0 (January 23, 2024)
Initial release
cmake_minimum_required(VERSION 3.15)
file(READ "${CMAKE_SOURCE_DIR}/version.txt" version)
file(READ "${CMAKE_SOURCE_DIR}/project_name.txt" project)
cmake_minimum_required(VERSION 3.18)
set(CMAKE_CXX_STANDARD 14)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_CXX_EXTENSIONS OFF)
file(STRINGS "${CMAKE_SOURCE_DIR}/version.txt" version)
# Parse version.txt to retrieve Major, Minor and Path
string(REGEX MATCH "([0-9]+\\.[0-9]+\\.[0-9]+)" _ MATCHES ${version})
set(PROJECT_VERSION_MAJOR ${CMAKE_MATCH_1})
set(PROJECT_VERSION_MINOR ${CMAKE_MATCH_2})
set(PROJECT_VERSION_PATCH ${CMAKE_MATCH_3})
project(aidge_backend_cpu
VERSION ${version}
DESCRIPTION "CPU implementations of the operators of aidge framework."
LANGUAGES CXX)
# Retrieve latest git commit
execute_process(
COMMAND git rev-parse --short HEAD
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
OUTPUT_VARIABLE GIT_COMMIT_HASH
OUTPUT_STRIP_TRAILING_WHITESPACE
ERROR_QUIET
)
message(STATUS "Project name: ${project}")
message(STATUS "Project name: ${CMAKE_PROJECT_NAME}")
message(STATUS "Project version: ${version}")
message(STATUS "Latest git commit: ${GIT_COMMIT_HASH}")
# helper for LSP users
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
# Note : project name is {project} and python module name is also {project}
set(module_name _${project}) # target name
# Note : project name is ${CMAKE_PROJECT_NAME} and python module name is also ${CMAKE_PROJECT_NAME}
set(module_name _${CMAKE_PROJECT_NAME}) # target name
set(pybind_module_name ${CMAKE_PROJECT_NAME}) # name of submodule for python bindings
project(${project})
##############################################
# Define options
option(PYBIND "python binding" OFF)
option(WERROR "Warning as error" OFF)
option(TEST "Enable tests" ON)
option(COVERAGE "Enable coverage" OFF)
option(ENABLE_ASAN "Enable ASan (AddressSanitizer) for runtime analysis of memory use (over/underflow, memory leak, ...)" OFF)
##############################################
# Import utils CMakeLists
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_SOURCE_DIR}/cmake")
include(PybindModuleCreation)
##############################################
# Define options
option(PYBIND "python binding" ON)
option(WERROR "Warning as error" OFF)
if(CMAKE_COMPILER_IS_GNUCXX AND COVERAGE)
Include(CodeCoverage)
endif()
##############################################
# Find system dependencies
# FIND Dependencies
if(NOT $ENV{AIDGE_INSTALL} STREQUAL "")
set(CMAKE_INSTALL_PREFIX $ENV{AIDGE_INSTALL})
list(APPEND CMAKE_PREFIX_PATH $ENV{AIDGE_INSTALL})
message(WARNING "Env var AIDGE_INSTALL detected : $ENV{AIDGE_INSTALL}. Set CMAKE_INSTALL_PREFIX to AIDGE_INSTALL & added to CMAKE_PREFIX_PATH"
"\n\tCMAKE_INSTALL_PREFIX = ${CMAKE_INSTALL_PREFIX}"
"\n\tCMAKE_PREFIX_PATH = ${CMAKE_PREFIX_PATH}")
endif()
find_package(aidge_core REQUIRED)
find_package(OpenMP)
find_package(OpenSSL QUIET)
if(OpenSSL_FOUND)
message(STATUS "OpenSSL found: ${OPENSSL_VERSION}")
add_definitions(-DWITH_OPENSSL)
else()
message(WARNING "OpenSSL not found, SHA256 will not be available.")
endif()
##############################################
# Create target and set properties
file(GLOB_RECURSE src_files "src/*.cpp")
file(GLOB_RECURSE inc_files "include/*.hpp")
add_library(${module_name} ${src_files} ${inc_files})
target_link_libraries(${module_name}
PRIVATE
fmt::fmt
PUBLIC
_aidge_core # _ is added because we link the target not the project
_aidge_core # _ is added because we link the exported target and not the project
)
if(OpenMP_CXX_FOUND)
target_link_libraries(${module_name} PRIVATE OpenMP::OpenMP_CXX)
set(AIDGE_REQUIRES_OPENMP TRUE)
endif()
# Add definition _USE_MATH_DEFINES to enable math constant definitions from math.h/cmath.
if (WIN32)
target_compile_definitions(${module_name} PRIVATE _USE_MATH_DEFINES)
endif()
#Set target properties
set_property(TARGET ${module_name} PROPERTY POSITION_INDEPENDENT_CODE ON)
# PYTHON BINDING
if (PYBIND)
# Python binding lib is by default installed in <prefix>/python_packages/<package>/
# When installed from python, setup.py should set it to the python package dir
set(PYBIND_INSTALL_PREFIX python_packages/${pybind_module_name} CACHE PATH "Python package install prefix")
include(PybindModuleCreation)
generate_python_binding(${pybind_module_name} ${module_name})
endif()
if( ${ENABLE_ASAN} )
message("Building ${module_name} with ASAN.")
set(SANITIZE_FLAGS -fsanitize=address -fno-omit-frame-pointer)
target_link_libraries(${module_name}
PUBLIC
-fsanitize=address
)
target_compile_options(${module_name}
PRIVATE
${SANITIZE_FLAGS}
)
endif()
target_include_directories(${module_name}
PUBLIC
$<INSTALL_INTERFACE:include>
......@@ -48,87 +132,91 @@ target_include_directories(${module_name}
${CMAKE_CURRENT_SOURCE_DIR}/src
)
# PYTHON BINDING
generate_python_binding(${project} ${module_name})
if (PYBIND)
# Handles Python + pybind11 headers dependencies
target_link_libraries(${module_name}
PUBLIC
pybind11::pybind11
PRIVATE
Python::Python
)
set(AIDGE_REQUIRES_OPENSSL FALSE)
if(OpenSSL_FOUND)
target_link_libraries(${module_name} PRIVATE OpenSSL::SSL OpenSSL::Crypto)
set(AIDGE_REQUIRES_OPENSSL TRUE)
endif()
target_compile_features(${module_name} PRIVATE cxx_std_14)
if(WERROR)
target_compile_options(${module_name} PRIVATE
target_compile_options(${module_name} PRIVATE
$<$<OR:$<CXX_COMPILER_ID:Clang>,$<CXX_COMPILER_ID:AppleClang>,$<CXX_COMPILER_ID:GNU>>:
-Wall -Wextra -Wold-style-cast -Winline -pedantic -Werror=narrowing -Wshadow -Werror>)
target_compile_options(${module_name} PRIVATE
-Wall -Wextra -Wold-style-cast -Winline -pedantic -Werror=narrowing -Wshadow $<$<BOOL:${WERROR}>:-Werror>>)
target_compile_options(${module_name} PRIVATE
$<$<CXX_COMPILER_ID:MSVC>:
/W4>)
else()
target_compile_options(${module_name} PRIVATE
$<$<OR:$<CXX_COMPILER_ID:Clang>,$<CXX_COMPILER_ID:AppleClang>,$<CXX_COMPILER_ID:GNU>>:
-Wall -Wextra -Wold-style-cast -Winline -pedantic -Werror=narrowing -Wshadow -Wpedantic>)
target_compile_options(${module_name} PRIVATE
$<$<CXX_COMPILER_ID:MSVC>:
/W4>)
if(CMAKE_COMPILER_IS_GNUCXX AND COVERAGE)
append_coverage_compiler_flags()
endif()
message(STATUS "Creating ${CMAKE_CURRENT_SOURCE_DIR}/include/aidge/backend/cpu_version.h")
# Generate version.h file from config file version.h.in
configure_file(
"${CMAKE_CURRENT_SOURCE_DIR}/include/aidge/backend/version.h.in"
"${CMAKE_CURRENT_SOURCE_DIR}/include/aidge/backend/cpu_version.h"
)
##############################################
# Installation instructions
include(GNUInstallDirs)
set(INSTALL_CONFIGDIR ${CMAKE_INSTALL_LIBDIR}/cmake/${project})
set(INSTALL_CONFIGDIR ${CMAKE_INSTALL_LIBDIR}/cmake/${CMAKE_PROJECT_NAME})
install(TARGETS ${module_name} EXPORT ${project}-targets
install(TARGETS ${module_name} EXPORT ${CMAKE_PROJECT_NAME}-targets
LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}
RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
)
install(DIRECTORY include/ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR})
#Export the targets to a script
if (PYBIND)
install(TARGETS ${pybind_module_name}
DESTINATION ${PYBIND_INSTALL_PREFIX}
)
endif()
install(EXPORT ${project}-targets
FILE "${project}-targets.cmake"
#Export the targets to a script
install(EXPORT ${CMAKE_PROJECT_NAME}-targets
FILE "${CMAKE_PROJECT_NAME}-targets.cmake"
DESTINATION ${INSTALL_CONFIGDIR}
COMPONENT ${module_name}
)
COMPONENT ${module_name}
)
#Create a ConfigVersion.cmake file
include(CMakePackageConfigHelpers)
write_basic_package_version_file(
"${CMAKE_CURRENT_BINARY_DIR}/${project}-config-version.cmake"
"${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_PROJECT_NAME}-config-version.cmake"
VERSION ${version}
COMPATIBILITY AnyNewerVersion
)
configure_package_config_file("${project}-config.cmake.in"
"${CMAKE_CURRENT_BINARY_DIR}/${project}-config.cmake"
configure_package_config_file("${CMAKE_PROJECT_NAME}-config.cmake.in"
"${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_PROJECT_NAME}-config.cmake"
INSTALL_DESTINATION ${INSTALL_CONFIGDIR}
)
#Install the config, configversion and custom find modules
install(FILES
"${CMAKE_CURRENT_BINARY_DIR}/${project}-config.cmake"
"${CMAKE_CURRENT_BINARY_DIR}/${project}-config-version.cmake"
"${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_PROJECT_NAME}-config.cmake"
"${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_PROJECT_NAME}-config-version.cmake"
DESTINATION ${INSTALL_CONFIGDIR}
)
##############################################
## Exporting from the build tree
export(EXPORT ${project}-targets
FILE "${CMAKE_CURRENT_BINARY_DIR}/${project}-targets.cmake")
message(STATUS "Exporting created targets to use them in another build")
export(EXPORT ${CMAKE_PROJECT_NAME}-targets
FILE "${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_PROJECT_NAME}-targets.cmake")
##############################################
## Add test
enable_testing()
add_subdirectory(unit_tests)
if(TEST)
if (AIDGE_REQUIRES_PYTHON AND NOT AIDGE_PYTHON_HAS_EMBED)
message(WARNING "Skipping compilation of tests: missing Python embedded interpreter")
else()
enable_testing()
add_subdirectory(unit_tests)
endif()
endif()
include README.md LICENSE
recursive-include aidge_backend_cpu *.py
recursive-exclude aidge_backend_cpu/unit_tests *.py
recursive-include include *.hpp
recursive-include src *.cpp
recursive-include python_binding *.cpp
include CMakeLists.txt
![Pipeline status](https://gitlab.eclipse.org/eclipse/aidge/aidge_backend_cpu/badges/main/pipeline.svg?ignore_skipped=true) ![C++ coverage](https://gitlab.eclipse.org/eclipse/aidge/aidge_backend_cpu/badges/main/coverage.svg?job=coverage:ubuntu_cpp&key_text=C%2B%2B+coverage&key_width=90) ![Python coverage](https://gitlab.eclipse.org/eclipse/aidge/aidge_backend_cpu/badges/main/coverage.svg?job=coverage:ubuntu_python&key_text=Python+coverage&key_width=100)
# Aidge CPU library
You can find in this folder the library that implements the CPU operators. <br>
Those operators can be used on any machine with an Linux OS.
So far be sure to have the correct requirements to use this library
- GCC
- Make
- CMake
- aidge_core
- Python (optional, if you have no intend to use this library in python with pybind)
[TOC]
## Installation
### Dependencies
- `GCC`
- `Make`/`Ninja`
- `CMake`
- `Python` (optional, if you have no intend to use this library in python with pybind)
## Pip installation
#### Aidge dependencies
- `aidge_core`
You will need to install first the aidge_core library before installing aidge_cpu.
Also, make sure that the install path was set before installing aidge_core library.
Then run in your python environnement :
### Pip installation
``` bash
pip install . -v
```
> **TIPS :** Use environment variables to change compilation options :
> - `AIDGE_INSTALL` : to set the installation folder. Defaults to `<python_prefix>/lib/libAidge`. :warning: This path must be identical to aidge_core install path.
> - `AIDGE_PYTHON_BUILD_TYPE` : to set the compilation mode to **Debug** or **Release** or "" (for default flags). Defaults to **Release**.
> - `AIDGE_BUILD_GEN` : to set the build backend (for development mode) or "" for the cmake default. Default to "".
## Standard C++ Compilation
## Pip installation for development
You will need to compile first the Core library before compiling the CPU one.
The makefile is designed to do it for you.
To setup using pip in development (or editable mode), use the `--no-build-isolation -e` options to pip.
To only compile the CPU library, run
```
make cpu_only
For instance run the following command in your python environnement for a typical setup :
``` bash
export AIDGE_PYTHON_BUILD_TYPE= # default flags (no debug info but fastest build time)
export AIDGE_PYTHON_BUILD_TYPE=Debug # or if one really need to debug the C++ code
pip install -U pip setuptools setuptools_scm[toml] cmake # Pre-install build requirements (refer to the pyproject.toml [build-system] section)
pip install -v --no-build-isolation -e .
```
To compile the CPU library + the associated unitary tests, run
```
make cpu_tests
```
Refer to `aidge_core/README.md` for more details on development build options.
To compile the CPU library with the python binding, run
```
make cpu_with_pybind
```
Important: this command can also be run with `make`.
### Standard C++ Compilation
You will need to compile and install the [Core Library](https://gitlab.eclipse.org/eclipse/aidge/aidge_core) before compiling the CPU one.
To compile the CPU library with the python binding + the associated unitary tests, run
Once this has been done, you'll need run CMake with the
`CMAKE_INSTALL_PREFIX:PATH` flag, in order to indicate to CMake where
`aidge_core` has been installed :
```sh
cmake -DCMAKE_INSTALL_PREFIX:PATH=$(path_to_install_folder) $(CMAKE PARAMETERS) $(projet_root)
make all
```
make cpu_with_pybind_tests
```
\ No newline at end of file
More detailed information is available in the [Aidge User Guide](https://eclipse.dev/aidge/source/GetStarted/install.html)
@PACKAGE_INIT@
include(CMakeFindDependencyMacro)
find_dependency(aidge_core)
set(AIDGE_REQUIRES_OPENMP @AIDGE_REQUIRES_OPENMP@)
if (AIDGE_REQUIRES_OPENMP)
find_dependency(OpenMP)
endif()
set(AIDGE_REQUIRES_OPENSSL @AIDGE_REQUIRES_OPENSSL@)
if (AIDGE_REQUIRES_OPENSSL)
find_dependency(OpenSSL)
endif()
include(CMakeFindDependencyMacro)
include(${CMAKE_CURRENT_LIST_DIR}/aidge_backend_cpu-config-version.cmake)
include(${CMAKE_CURRENT_LIST_DIR}/aidge_backend_cpu-targets.cmake)
from aidge_backend_cpu.aidge_backend_cpu import * # import so generated by PyBind
\ No newline at end of file
import aidge_core
from aidge_backend_cpu.aidge_backend_cpu import * # import so generated by PyBind
from . import benchmark
import time
import numpy as np
import aidge_core
def prepare_model_scheduler_inputs(model: aidge_core.GraphView, input_data: list[str, np.ndarray]) -> tuple[aidge_core.GraphView, aidge_core.SequentialScheduler]:
# update model and inputs backend
model.set_backend("cpu")
ordered_inputs = [aidge_core.Tensor(i[1]) for i in input_data]
for ordered_input in ordered_inputs:
ordered_input.set_backend("cpu")
scheduler = aidge_core.SequentialScheduler(model)
scheduler.generate_scheduling()
return model, scheduler, ordered_inputs
def measure_inference_time(model: aidge_core.GraphView, input_data: list[str, np.ndarray], nb_warmup: int = 10, nb_iterations: int = 50) -> list[float]:
model, scheduler, ordered_inputs = prepare_model_scheduler_inputs(model, input_data)
timings = []
# Warm-up runs.
for i in range(nb_warmup + nb_iterations):
if i < nb_warmup:
scheduler.forward(forward_dims=False, data=ordered_inputs)
else:
start = time.process_time()
scheduler.forward(forward_dims=False, data=ordered_inputs)
end = time.process_time()
timings.append((end - start))
return timings
def compute_output(model: aidge_core.GraphView, input_data: list[str, np.ndarray]) -> list[np.ndarray]:
model, scheduler, ordered_inputs = prepare_model_scheduler_inputs(model, input_data)
scheduler.forward(forward_dims=False, data=ordered_inputs)
return [np.array(t[0].get_operator().get_output(t[1])) for t in model.get_ordered_outputs()]
\ No newline at end of file
"""
Copyright (c) 2023 CEA-List
This program and the accompanying materials are made available under the
terms of the Eclipse Public License 2.0 which is available at
http://www.eclipse.org/legal/epl-2.0.
SPDX-License-Identifier: EPL-2.0
"""
import unittest
import aidge_core
import aidge_backend_cpu
from functools import reduce
import numpy as np
class test_recipes(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_fuse_batchnorm(self):
dims = [1, 1, 10, 10]
size = reduce((lambda x, y: x*y), dims)
input_data = np.arange(size).reshape(dims).astype(np.float32)
input_tensor = aidge_core.Tensor(input_data)
input_node = aidge_core.Producer(input_tensor, "X")
conv = aidge_core.Conv2D(1, 1, [3, 3], name="Conv0")
bn = aidge_core.BatchNorm2D(1, name="Add0")
graph_view = aidge_core.sequential([input_node, conv, bn])
# Add random values to conv and BatchNorm parameters
graph_view.set_datatype(aidge_core.dtype.float32)
graph_view.set_backend("cpu")
np_weights = np.arange(9).reshape([1, 1, 3, 3]).astype(np.float32)
np_bias = np.arange(1).reshape([1]).astype(np.float32)
np_scale = np.array([0.05]).astype(np.float32)
np_shift = np.array([0.05]).astype(np.float32)
np_mean = np.array([0.05]).astype(np.float32)
np_var = np.array([0.05]).astype(np.float32)
conv.input(1)[0].get_operator().set_output(0, aidge_core.Tensor(np_weights))
conv.input(2)[0].get_operator().set_output(0, aidge_core.Tensor(np_bias))
bn.input(1)[0].get_operator().set_output(0, aidge_core.Tensor(np_scale))
bn.input(2)[0].get_operator().set_output(0, aidge_core.Tensor(np_shift))
bn.input(3)[0].get_operator().set_output(0, aidge_core.Tensor(np_mean))
bn.input(4)[0].get_operator().set_output(0, aidge_core.Tensor(np_var))
scheduler0 = aidge_core.SequentialScheduler(graph_view)
scheduler0.forward()
for outNode in graph_view.get_output_nodes():
output_aidge0 = outNode.get_operator().get_output(0)
aidge_core.fuse_batchnorm(graph_view)
scheduler1 = aidge_core.SequentialScheduler(graph_view)
scheduler1.forward()
for outNode in graph_view.get_output_nodes():
output_aidge1 = outNode.get_operator().get_output(0)
self.assertTrue(aidge_core.approx_eq(output_aidge0, output_aidge1, 0.000001, 0.0001))
if __name__ == '__main__':
unittest.main()
......@@ -13,30 +13,79 @@ class test_scheduler(unittest.TestCase):
pass
def test_relu_forward(self):
values = np.arange(6) - 3
input_node = aidge_core.Producer(aidge_core.Tensor(values), "Input")
t = aidge_core.Tensor(np.arange(6, dtype=np.int32) - 3)
input_node = aidge_core.Producer(t)
relu = aidge_core.ReLU()
input_node.add_child(relu)
gv = aidge_core.GraphView()
gv.add(relu)
gv.add(input_node)
gv.set_datatype(aidge_core.DataType.Int32)
gv.set_backend("cpu")
input_node.add_child(relu)
gv.set_datatype(aidge_core.dtype.int32)
gv.set_backend("cpu")
scheduler = aidge_core.SequentialScheduler(gv)
scheduler.forward()
out_tensor = relu.get_operator().output(0)
out_tensor = relu.get_operator().get_output(0)
expected_out = [0,0,0,0,1,2]
for i in range(len(expected_out)):
self.assertEqual(expected_out[i], out_tensor[i])
self.assertEqual(expected_out[i], out_tensor[i], f"On idx {i}")
def test_sequential_scheduling(self):
input_data = np.array([0]).astype(np.float32)
input_tensor = aidge_core.Tensor(input_data)
graph_view = aidge_core.sequential([
aidge_core.Producer(input_tensor, "X"),
aidge_core.FC(1, 50, name='0'),
aidge_core.FC(50, 50, name='1'),
aidge_core.FC(50, 10, name='2'),
])
EXPECTED_SCHEDULE = ['0', '1', '2']
graph_view.set_datatype(aidge_core.dtype.float32)
graph_view.set_backend("cpu")
graph_view.forward_dims()
scheduler = aidge_core.SequentialScheduler(graph_view)
scheduler.generate_scheduling()
self.assertEqual(len(scheduler.get_sequential_static_scheduling()), 10)
# Do not care about the order of execution of the producers
self.assertListEqual([i.name() for i in scheduler.get_sequential_static_scheduling()[-3:]], EXPECTED_SCHEDULE)
def test_parallel_scheduling(self):
input_data = np.array([0]).astype(np.float32)
input_tensor = aidge_core.Tensor(input_data)
graph_view = aidge_core.sequential([
aidge_core.Producer(input_tensor, "X"),
aidge_core.FC(1, 50, name='0'),
aidge_core.parallel([aidge_core.FC(50, 50, name='1'), aidge_core.FC(50, 50, name='3')]),
aidge_core.Add(name='2'),
])
EXPECTED_SCHEDULE = [['0', '1', '3', '2'], ['0', '3', '1', '2']] # Both scheduling are valid !
graph_view.set_datatype(aidge_core.dtype.float32)
graph_view.set_backend("cpu")
graph_view.forward_dims()
scheduler = aidge_core.SequentialScheduler(graph_view)
scheduler.generate_scheduling()
self.assertEqual(len(scheduler.get_sequential_static_scheduling()), 11)
# Do not care about the order of execution of the producers
self.assertTrue([i.name() for i in scheduler.get_sequential_static_scheduling()[-4:]] in EXPECTED_SCHEDULE)
if __name__ == '__main__':
unittest.main()
import unittest
import aidge_core
import aidge_backend_cpu
import numpy as np
class test_tensor(unittest.TestCase):
"""Test tensor binding
"""
def setUp(self):
pass
def tearDown(self):
pass
def test_getavailable_backends(self):
self.assertTrue("cpu" in aidge_core.Tensor.get_available_backends())
def test_numpy_int_to_tensor(self):
np_array = np.arange(9).reshape(1,1,3,3)
# Numpy -> Tensor
t = aidge_core.Tensor(np_array)
self.assertEqual(t.dtype(), aidge_core.DataType.Int32)
for i_t, i_n in zip(t, np_array.flatten()):
self.assertTrue(i_t == i_n)
for i,j in zip(t.dims(), np_array.shape):
self.assertEqual(i,j)
def test_tensor_int_to_numpy(self):
np_array = np.arange(9).reshape(1,1,3,3)
# Numpy -> Tensor
t = aidge_core.Tensor(np_array)
# Tensor -> Numpy
nnarray = np.array(t)
for i_nn, i_n in zip(nnarray.flatten(), np_array.flatten()):
self.assertTrue(i_nn == i_n)
for i,j in zip(t.dims(), nnarray.shape):
self.assertEqual(i,j)
def test_numpy_float_to_tensor(self):
t = aidge_core.Tensor()
np_array = np.random.rand(1, 1, 3, 3).astype(np.float32)
# Numpy -> Tensor
t = aidge_core.Tensor(np_array)
self.assertEqual(t.dtype(), aidge_core.DataType.Float32)
for i_t, i_n in zip(t, np_array.flatten()):
self.assertTrue(i_t == i_n) # TODO : May need to change this to a difference
for i,j in zip(t.dims(), np_array.shape):
self.assertEqual(i,j)
if __name__ == '__main__':
unittest.main()
# Copyright (c) 2012 - 2017, Lars Bilke
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# CHANGES:
#
# 2012-01-31, Lars Bilke
# - Enable Code Coverage
#
# 2013-09-17, Joakim Söderberg
# - Added support for Clang.
# - Some additional usage instructions.
#
# 2016-02-03, Lars Bilke
# - Refactored functions to use named parameters
#
# 2017-06-02, Lars Bilke
# - Merged with modified version from github.com/ufz/ogs
#
# 2019-05-06, Anatolii Kurotych
# - Remove unnecessary --coverage flag
#
# 2019-12-13, FeRD (Frank Dana)
# - Deprecate COVERAGE_LCOVR_EXCLUDES and COVERAGE_GCOVR_EXCLUDES lists in favor
# of tool-agnostic COVERAGE_EXCLUDES variable, or EXCLUDE setup arguments.
# - CMake 3.4+: All excludes can be specified relative to BASE_DIRECTORY
# - All setup functions: accept BASE_DIRECTORY, EXCLUDE list
# - Set lcov basedir with -b argument
# - Add automatic --demangle-cpp in lcovr, if 'c++filt' is available (can be
# overridden with NO_DEMANGLE option in setup_target_for_coverage_lcovr().)
# - Delete output dir, .info file on 'make clean'
# - Remove Python detection, since version mismatches will break gcovr
# - Minor cleanup (lowercase function names, update examples...)
#
# 2019-12-19, FeRD (Frank Dana)
# - Rename Lcov outputs, make filtered file canonical, fix cleanup for targets
#
# 2020-01-19, Bob Apthorpe
# - Added gfortran support
#
# 2020-02-17, FeRD (Frank Dana)
# - Make all add_custom_target()s VERBATIM to auto-escape wildcard characters
# in EXCLUDEs, and remove manual escaping from gcovr targets
#
# 2021-01-19, Robin Mueller
# - Add CODE_COVERAGE_VERBOSE option which will allow to print out commands which are run
# - Added the option for users to set the GCOVR_ADDITIONAL_ARGS variable to supply additional
# flags to the gcovr command
#
# 2020-05-04, Mihchael Davis
# - Add -fprofile-abs-path to make gcno files contain absolute paths
# - Fix BASE_DIRECTORY not working when defined
# - Change BYPRODUCT from folder to index.html to stop ninja from complaining about double defines
#
# 2021-05-10, Martin Stump
# - Check if the generator is multi-config before warning about non-Debug builds
#
# 2022-02-22, Marko Wehle
# - Change gcovr output from -o <filename> for --xml <filename> and --html <filename> output respectively.
# This will allow for Multiple Output Formats at the same time by making use of GCOVR_ADDITIONAL_ARGS, e.g. GCOVR_ADDITIONAL_ARGS "--txt".
#
# 2022-09-28, Sebastian Mueller
# - fix append_coverage_compiler_flags_to_target to correctly add flags
# - replace "-fprofile-arcs -ftest-coverage" with "--coverage" (equivalent)
#
# USAGE:
#
# 1. Copy this file into your cmake modules path.
#
# 2. Add the following line to your CMakeLists.txt (best inside an if-condition
# using a CMake option() to enable it just optionally):
# include(CodeCoverage)
#
# 3. Append necessary compiler flags for all supported source files:
# append_coverage_compiler_flags()
# Or for specific target:
# append_coverage_compiler_flags_to_target(YOUR_TARGET_NAME)
#
# 3.a (OPTIONAL) Set appropriate optimization flags, e.g. -O0, -O1 or -Og
#
# 4. If you need to exclude additional directories from the report, specify them
# using full paths in the COVERAGE_EXCLUDES variable before calling
# setup_target_for_coverage_*().
# Example:
# set(COVERAGE_EXCLUDES
# '${PROJECT_SOURCE_DIR}/src/dir1/*'
# '/path/to/my/src/dir2/*')
# Or, use the EXCLUDE argument to setup_target_for_coverage_*().
# Example:
# setup_target_for_coverage_lcov(
# NAME coverage
# EXECUTABLE testrunner
# EXCLUDE "${PROJECT_SOURCE_DIR}/src/dir1/*" "/path/to/my/src/dir2/*")
#
# 4.a NOTE: With CMake 3.4+, COVERAGE_EXCLUDES or EXCLUDE can also be set
# relative to the BASE_DIRECTORY (default: PROJECT_SOURCE_DIR)
# Example:
# set(COVERAGE_EXCLUDES "dir1/*")
# setup_target_for_coverage_gcovr_html(
# NAME coverage
# EXECUTABLE testrunner
# BASE_DIRECTORY "${PROJECT_SOURCE_DIR}/src"
# EXCLUDE "dir2/*")
#
# 5. Use the functions described below to create a custom make target which
# runs your test executable and produces a code coverage report.
#
# 6. Build a Debug build:
# cmake -DCMAKE_BUILD_TYPE=Debug ..
# make
# make my_coverage_target
#
include(CMakeParseArguments)
option(CODE_COVERAGE_VERBOSE "Verbose information" FALSE)
# Check prereqs
find_program( GCOV_PATH gcov )
find_program( LCOV_PATH NAMES lcov lcov.bat lcov.exe lcov.perl)
find_program( FASTCOV_PATH NAMES fastcov fastcov.py )
find_program( GENHTML_PATH NAMES genhtml genhtml.perl genhtml.bat )
find_program( GCOVR_PATH gcovr PATHS ${CMAKE_SOURCE_DIR}/scripts/test)
find_program( CPPFILT_PATH NAMES c++filt )
if(NOT GCOV_PATH)
message(FATAL_ERROR "gcov not found! Aborting...")
endif() # NOT GCOV_PATH
# Check supported compiler (Clang, GNU and Flang)
get_property(LANGUAGES GLOBAL PROPERTY ENABLED_LANGUAGES)
foreach(LANG ${LANGUAGES})
if("${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
if("${CMAKE_${LANG}_COMPILER_VERSION}" VERSION_LESS 3)
message(FATAL_ERROR "Clang version must be 3.0.0 or greater! Aborting...")
endif()
elseif(NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "GNU"
AND NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(LLVM)?[Ff]lang")
message(FATAL_ERROR "Compiler is not GNU or Flang! Aborting...")
endif()
endforeach()
set(COVERAGE_COMPILER_FLAGS "-g --coverage"
CACHE INTERNAL "")
if(CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)")
include(CheckCXXCompilerFlag)
check_cxx_compiler_flag(-fprofile-abs-path HAVE_fprofile_abs_path)
if(HAVE_fprofile_abs_path)
set(COVERAGE_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
endif()
endif()
set(CMAKE_Fortran_FLAGS_COVERAGE
${COVERAGE_COMPILER_FLAGS}
CACHE STRING "Flags used by the Fortran compiler during coverage builds."
FORCE )
set(CMAKE_CXX_FLAGS_COVERAGE
${COVERAGE_COMPILER_FLAGS}
CACHE STRING "Flags used by the C++ compiler during coverage builds."
FORCE )
set(CMAKE_C_FLAGS_COVERAGE
${COVERAGE_COMPILER_FLAGS}
CACHE STRING "Flags used by the C compiler during coverage builds."
FORCE )
set(CMAKE_EXE_LINKER_FLAGS_COVERAGE
""
CACHE STRING "Flags used for linking binaries during coverage builds."
FORCE )
set(CMAKE_SHARED_LINKER_FLAGS_COVERAGE
""
CACHE STRING "Flags used by the shared libraries linker during coverage builds."
FORCE )
mark_as_advanced(
CMAKE_Fortran_FLAGS_COVERAGE
CMAKE_CXX_FLAGS_COVERAGE
CMAKE_C_FLAGS_COVERAGE
CMAKE_EXE_LINKER_FLAGS_COVERAGE
CMAKE_SHARED_LINKER_FLAGS_COVERAGE )
get_property(GENERATOR_IS_MULTI_CONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
if(NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG))
message(WARNING "Code coverage results with an optimised (non-Debug) build may be misleading")
endif() # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)
if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
link_libraries(gcov)
endif()
# Defines a target for running and collection code coverage information
# Builds dependencies, runs the given executable and outputs reports.
# NOTE! The executable should always have a ZERO as exit code otherwise
# the coverage generation will not complete.
#
# setup_target_for_coverage_lcov(
# NAME testrunner_coverage # New target name
# EXECUTABLE testrunner -j ${PROCESSOR_COUNT} # Executable in PROJECT_BINARY_DIR
# DEPENDENCIES testrunner # Dependencies to build first
# BASE_DIRECTORY "../" # Base directory for report
# # (defaults to PROJECT_SOURCE_DIR)
# EXCLUDE "src/dir1/*" "src/dir2/*" # Patterns to exclude (can be relative
# # to BASE_DIRECTORY, with CMake 3.4+)
# NO_DEMANGLE # Don't demangle C++ symbols
# # even if c++filt is found
# )
function(setup_target_for_coverage_lcov)
set(options NO_DEMANGLE SONARQUBE)
set(oneValueArgs BASE_DIRECTORY NAME)
set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES LCOV_ARGS GENHTML_ARGS)
cmake_parse_arguments(Coverage "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
if(NOT LCOV_PATH)
message(FATAL_ERROR "lcov not found! Aborting...")
endif() # NOT LCOV_PATH
if(NOT GENHTML_PATH)
message(FATAL_ERROR "genhtml not found! Aborting...")
endif() # NOT GENHTML_PATH
# Set base directory (as absolute path), or default to PROJECT_SOURCE_DIR
if(DEFINED Coverage_BASE_DIRECTORY)
get_filename_component(BASEDIR ${Coverage_BASE_DIRECTORY} ABSOLUTE)
else()
set(BASEDIR ${PROJECT_SOURCE_DIR})
endif()
# Collect excludes (CMake 3.4+: Also compute absolute paths)
set(LCOV_EXCLUDES "")
foreach(EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_LCOV_EXCLUDES})
if(CMAKE_VERSION VERSION_GREATER 3.4)
get_filename_component(EXCLUDE ${EXCLUDE} ABSOLUTE BASE_DIR ${BASEDIR})
endif()
list(APPEND LCOV_EXCLUDES "${EXCLUDE}")
endforeach()
list(REMOVE_DUPLICATES LCOV_EXCLUDES)
# Conditional arguments
if(CPPFILT_PATH AND NOT ${Coverage_NO_DEMANGLE})
set(GENHTML_EXTRA_ARGS "--demangle-cpp")
endif()
# Setting up commands which will be run to generate coverage data.
# Cleanup lcov
set(LCOV_CLEAN_CMD
${LCOV_PATH} ${Coverage_LCOV_ARGS} --gcov-tool ${GCOV_PATH} -directory .
-b ${BASEDIR} --zerocounters
)
# Create baseline to make sure untouched files show up in the report
set(LCOV_BASELINE_CMD
${LCOV_PATH} ${Coverage_LCOV_ARGS} --gcov-tool ${GCOV_PATH} -c -i -d . -b
${BASEDIR} -o ${Coverage_NAME}.base
)
# Run tests
set(LCOV_EXEC_TESTS_CMD
${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS}
)
# Capturing lcov counters and generating report
set(LCOV_CAPTURE_CMD
${LCOV_PATH} ${Coverage_LCOV_ARGS} --gcov-tool ${GCOV_PATH} --directory . -b
${BASEDIR} --capture --output-file ${Coverage_NAME}.capture
)
# add baseline counters
set(LCOV_BASELINE_COUNT_CMD
${LCOV_PATH} ${Coverage_LCOV_ARGS} --gcov-tool ${GCOV_PATH} -a ${Coverage_NAME}.base
-a ${Coverage_NAME}.capture --output-file ${Coverage_NAME}.total
)
# filter collected data to final coverage report
set(LCOV_FILTER_CMD
${LCOV_PATH} ${Coverage_LCOV_ARGS} --gcov-tool ${GCOV_PATH} --remove
${Coverage_NAME}.total ${LCOV_EXCLUDES} --output-file ${Coverage_NAME}.info
)
# Generate HTML output
set(LCOV_GEN_HTML_CMD
${GENHTML_PATH} ${GENHTML_EXTRA_ARGS} ${Coverage_GENHTML_ARGS} -o
${Coverage_NAME} ${Coverage_NAME}.info
)
if(${Coverage_SONARQUBE})
# Generate SonarQube output
set(GCOVR_XML_CMD
${GCOVR_PATH} --sonarqube ${Coverage_NAME}_sonarqube.xml -r ${BASEDIR} ${GCOVR_ADDITIONAL_ARGS}
${GCOVR_EXCLUDE_ARGS} --object-directory=${PROJECT_BINARY_DIR}
)
set(GCOVR_XML_CMD_COMMAND
COMMAND ${GCOVR_XML_CMD}
)
set(GCOVR_XML_CMD_BYPRODUCTS ${Coverage_NAME}_sonarqube.xml)
set(GCOVR_XML_CMD_COMMENT COMMENT "SonarQube code coverage info report saved in ${Coverage_NAME}_sonarqube.xml.")
endif()
if(CODE_COVERAGE_VERBOSE)
message(STATUS "Executed command report")
message(STATUS "Command to clean up lcov: ")
string(REPLACE ";" " " LCOV_CLEAN_CMD_SPACED "${LCOV_CLEAN_CMD}")
message(STATUS "${LCOV_CLEAN_CMD_SPACED}")
message(STATUS "Command to create baseline: ")
string(REPLACE ";" " " LCOV_BASELINE_CMD_SPACED "${LCOV_BASELINE_CMD}")
message(STATUS "${LCOV_BASELINE_CMD_SPACED}")
message(STATUS "Command to run the tests: ")
string(REPLACE ";" " " LCOV_EXEC_TESTS_CMD_SPACED "${LCOV_EXEC_TESTS_CMD}")
message(STATUS "${LCOV_EXEC_TESTS_CMD_SPACED}")
message(STATUS "Command to capture counters and generate report: ")
string(REPLACE ";" " " LCOV_CAPTURE_CMD_SPACED "${LCOV_CAPTURE_CMD}")
message(STATUS "${LCOV_CAPTURE_CMD_SPACED}")
message(STATUS "Command to add baseline counters: ")
string(REPLACE ";" " " LCOV_BASELINE_COUNT_CMD_SPACED "${LCOV_BASELINE_COUNT_CMD}")
message(STATUS "${LCOV_BASELINE_COUNT_CMD_SPACED}")
message(STATUS "Command to filter collected data: ")
string(REPLACE ";" " " LCOV_FILTER_CMD_SPACED "${LCOV_FILTER_CMD}")
message(STATUS "${LCOV_FILTER_CMD_SPACED}")
message(STATUS "Command to generate lcov HTML output: ")
string(REPLACE ";" " " LCOV_GEN_HTML_CMD_SPACED "${LCOV_GEN_HTML_CMD}")
message(STATUS "${LCOV_GEN_HTML_CMD_SPACED}")
if(${Coverage_SONARQUBE})
message(STATUS "Command to generate SonarQube XML output: ")
string(REPLACE ";" " " GCOVR_XML_CMD_SPACED "${GCOVR_XML_CMD}")
message(STATUS "${GCOVR_XML_CMD_SPACED}")
endif()
endif()
# Setup target
add_custom_target(${Coverage_NAME}
COMMAND ${LCOV_CLEAN_CMD}
COMMAND ${LCOV_BASELINE_CMD}
COMMAND ${LCOV_EXEC_TESTS_CMD}
COMMAND ${LCOV_CAPTURE_CMD}
COMMAND ${LCOV_BASELINE_COUNT_CMD}
COMMAND ${LCOV_FILTER_CMD}
COMMAND ${LCOV_GEN_HTML_CMD}
${GCOVR_XML_CMD_COMMAND}
# Set output files as GENERATED (will be removed on 'make clean')
BYPRODUCTS
${Coverage_NAME}.base
${Coverage_NAME}.capture
${Coverage_NAME}.total
${Coverage_NAME}.info
${GCOVR_XML_CMD_BYPRODUCTS}
${Coverage_NAME}/index.html
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
DEPENDS ${Coverage_DEPENDENCIES}
VERBATIM # Protect arguments to commands
COMMENT "Resetting code coverage counters to zero.\nProcessing code coverage counters and generating report."
)
# Show where to find the lcov info report
add_custom_command(TARGET ${Coverage_NAME} POST_BUILD
COMMAND ;
COMMENT "Lcov code coverage info report saved in ${Coverage_NAME}.info."
${GCOVR_XML_CMD_COMMENT}
)
# Show info where to find the report
add_custom_command(TARGET ${Coverage_NAME} POST_BUILD
COMMAND ;
COMMENT "Open ./${Coverage_NAME}/index.html in your browser to view the coverage report."
)
endfunction() # setup_target_for_coverage_lcov
# Defines a target for running and collection code coverage information
# Builds dependencies, runs the given executable and outputs reports.
# NOTE! The executable should always have a ZERO as exit code otherwise
# the coverage generation will not complete.
#
# setup_target_for_coverage_gcovr_xml(
# NAME ctest_coverage # New target name
# EXECUTABLE ctest -j ${PROCESSOR_COUNT} # Executable in PROJECT_BINARY_DIR
# DEPENDENCIES executable_target # Dependencies to build first
# BASE_DIRECTORY "../" # Base directory for report
# # (defaults to PROJECT_SOURCE_DIR)
# EXCLUDE "src/dir1/*" "src/dir2/*" # Patterns to exclude (can be relative
# # to BASE_DIRECTORY, with CMake 3.4+)
# )
# The user can set the variable GCOVR_ADDITIONAL_ARGS to supply additional flags to the
# GCVOR command.
function(setup_target_for_coverage_gcovr_xml)
set(options NONE)
set(oneValueArgs BASE_DIRECTORY NAME)
set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES)
cmake_parse_arguments(Coverage "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
if(NOT GCOVR_PATH)
message(FATAL_ERROR "gcovr not found! Aborting...")
endif() # NOT GCOVR_PATH
# Set base directory (as absolute path), or default to PROJECT_SOURCE_DIR
if(DEFINED Coverage_BASE_DIRECTORY)
get_filename_component(BASEDIR ${Coverage_BASE_DIRECTORY} ABSOLUTE)
else()
set(BASEDIR ${PROJECT_SOURCE_DIR})
endif()
# Collect excludes (CMake 3.4+: Also compute absolute paths)
set(GCOVR_EXCLUDES "")
foreach(EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_GCOVR_EXCLUDES})
if(CMAKE_VERSION VERSION_GREATER 3.4)
get_filename_component(EXCLUDE ${EXCLUDE} ABSOLUTE BASE_DIR ${BASEDIR})
endif()
list(APPEND GCOVR_EXCLUDES "${EXCLUDE}")
endforeach()
list(REMOVE_DUPLICATES GCOVR_EXCLUDES)
# Combine excludes to several -e arguments
set(GCOVR_EXCLUDE_ARGS "")
foreach(EXCLUDE ${GCOVR_EXCLUDES})
list(APPEND GCOVR_EXCLUDE_ARGS "-e")
list(APPEND GCOVR_EXCLUDE_ARGS "${EXCLUDE}")
endforeach()
# Set up commands which will be run to generate coverage data
# Run tests
set(GCOVR_XML_EXEC_TESTS_CMD
${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS}
)
# Running gcovr
set(GCOVR_XML_CMD
${GCOVR_PATH} --xml ${Coverage_NAME}.xml -r ${BASEDIR} ${GCOVR_ADDITIONAL_ARGS}
${GCOVR_EXCLUDE_ARGS} --object-directory=${PROJECT_BINARY_DIR}
)
if(CODE_COVERAGE_VERBOSE)
message(STATUS "Executed command report")
message(STATUS "Command to run tests: ")
string(REPLACE ";" " " GCOVR_XML_EXEC_TESTS_CMD_SPACED "${GCOVR_XML_EXEC_TESTS_CMD}")
message(STATUS "${GCOVR_XML_EXEC_TESTS_CMD_SPACED}")
message(STATUS "Command to generate gcovr XML coverage data: ")
string(REPLACE ";" " " GCOVR_XML_CMD_SPACED "${GCOVR_XML_CMD}")
message(STATUS "${GCOVR_XML_CMD_SPACED}")
endif()
add_custom_target(${Coverage_NAME}
COMMAND ${GCOVR_XML_EXEC_TESTS_CMD}
COMMAND ${GCOVR_XML_CMD}
BYPRODUCTS ${Coverage_NAME}.xml
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
DEPENDS ${Coverage_DEPENDENCIES}
VERBATIM # Protect arguments to commands
COMMENT "Running gcovr to produce Cobertura code coverage report."
)
# Show info where to find the report
add_custom_command(TARGET ${Coverage_NAME} POST_BUILD
COMMAND ;
COMMENT "Cobertura code coverage report saved in ${Coverage_NAME}.xml."
)
endfunction() # setup_target_for_coverage_gcovr_xml
# Defines a target for running and collection code coverage information
# Builds dependencies, runs the given executable and outputs reports.
# NOTE! The executable should always have a ZERO as exit code otherwise
# the coverage generation will not complete.
#
# setup_target_for_coverage_gcovr_html(
# NAME ctest_coverage # New target name
# EXECUTABLE ctest -j ${PROCESSOR_COUNT} # Executable in PROJECT_BINARY_DIR
# DEPENDENCIES executable_target # Dependencies to build first
# BASE_DIRECTORY "../" # Base directory for report
# # (defaults to PROJECT_SOURCE_DIR)
# EXCLUDE "src/dir1/*" "src/dir2/*" # Patterns to exclude (can be relative
# # to BASE_DIRECTORY, with CMake 3.4+)
# )
# The user can set the variable GCOVR_ADDITIONAL_ARGS to supply additional flags to the
# GCVOR command.
function(setup_target_for_coverage_gcovr_html)
set(options NONE)
set(oneValueArgs BASE_DIRECTORY NAME)
set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES)
cmake_parse_arguments(Coverage "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
if(NOT GCOVR_PATH)
message(FATAL_ERROR "gcovr not found! Aborting...")
endif() # NOT GCOVR_PATH
# Set base directory (as absolute path), or default to PROJECT_SOURCE_DIR
if(DEFINED Coverage_BASE_DIRECTORY)
get_filename_component(BASEDIR ${Coverage_BASE_DIRECTORY} ABSOLUTE)
else()
set(BASEDIR ${PROJECT_SOURCE_DIR})
endif()
# Collect excludes (CMake 3.4+: Also compute absolute paths)
set(GCOVR_EXCLUDES "")
foreach(EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_GCOVR_EXCLUDES})
if(CMAKE_VERSION VERSION_GREATER 3.4)
get_filename_component(EXCLUDE ${EXCLUDE} ABSOLUTE BASE_DIR ${BASEDIR})
endif()
list(APPEND GCOVR_EXCLUDES "${EXCLUDE}")
endforeach()
list(REMOVE_DUPLICATES GCOVR_EXCLUDES)
# Combine excludes to several -e arguments
set(GCOVR_EXCLUDE_ARGS "")
foreach(EXCLUDE ${GCOVR_EXCLUDES})
list(APPEND GCOVR_EXCLUDE_ARGS "-e")
list(APPEND GCOVR_EXCLUDE_ARGS "${EXCLUDE}")
endforeach()
# Set up commands which will be run to generate coverage data
# Run tests
set(GCOVR_HTML_EXEC_TESTS_CMD
${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS}
)
# Create folder
set(GCOVR_HTML_FOLDER_CMD
${CMAKE_COMMAND} -E make_directory ${PROJECT_BINARY_DIR}/${Coverage_NAME}
)
# Running gcovr
set(GCOVR_HTML_CMD
${GCOVR_PATH} --html ${Coverage_NAME}/index.html --html-details -r ${BASEDIR} ${GCOVR_ADDITIONAL_ARGS}
${GCOVR_EXCLUDE_ARGS} --object-directory=${PROJECT_BINARY_DIR}
)
if(CODE_COVERAGE_VERBOSE)
message(STATUS "Executed command report")
message(STATUS "Command to run tests: ")
string(REPLACE ";" " " GCOVR_HTML_EXEC_TESTS_CMD_SPACED "${GCOVR_HTML_EXEC_TESTS_CMD}")
message(STATUS "${GCOVR_HTML_EXEC_TESTS_CMD_SPACED}")
message(STATUS "Command to create a folder: ")
string(REPLACE ";" " " GCOVR_HTML_FOLDER_CMD_SPACED "${GCOVR_HTML_FOLDER_CMD}")
message(STATUS "${GCOVR_HTML_FOLDER_CMD_SPACED}")
message(STATUS "Command to generate gcovr HTML coverage data: ")
string(REPLACE ";" " " GCOVR_HTML_CMD_SPACED "${GCOVR_HTML_CMD}")
message(STATUS "${GCOVR_HTML_CMD_SPACED}")
endif()
add_custom_target(${Coverage_NAME}
COMMAND ${GCOVR_HTML_EXEC_TESTS_CMD}
COMMAND ${GCOVR_HTML_FOLDER_CMD}
COMMAND ${GCOVR_HTML_CMD}
BYPRODUCTS ${PROJECT_BINARY_DIR}/${Coverage_NAME}/index.html # report directory
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
DEPENDS ${Coverage_DEPENDENCIES}
VERBATIM # Protect arguments to commands
COMMENT "Running gcovr to produce HTML code coverage report."
)
# Show info where to find the report
add_custom_command(TARGET ${Coverage_NAME} POST_BUILD
COMMAND ;
COMMENT "Open ./${Coverage_NAME}/index.html in your browser to view the coverage report."
)
endfunction() # setup_target_for_coverage_gcovr_html
# Defines a target for running and collection code coverage information
# Builds dependencies, runs the given executable and outputs reports.
# NOTE! The executable should always have a ZERO as exit code otherwise
# the coverage generation will not complete.
#
# setup_target_for_coverage_fastcov(
# NAME testrunner_coverage # New target name
# EXECUTABLE testrunner -j ${PROCESSOR_COUNT} # Executable in PROJECT_BINARY_DIR
# DEPENDENCIES testrunner # Dependencies to build first
# BASE_DIRECTORY "../" # Base directory for report
# # (defaults to PROJECT_SOURCE_DIR)
# EXCLUDE "src/dir1/" "src/dir2/" # Patterns to exclude.
# NO_DEMANGLE # Don't demangle C++ symbols
# # even if c++filt is found
# SKIP_HTML # Don't create html report
# POST_CMD perl -i -pe s!${PROJECT_SOURCE_DIR}/!!g ctest_coverage.json # E.g. for stripping source dir from file paths
# )
function(setup_target_for_coverage_fastcov)
set(options NO_DEMANGLE SKIP_HTML)
set(oneValueArgs BASE_DIRECTORY NAME)
set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES FASTCOV_ARGS GENHTML_ARGS POST_CMD)
cmake_parse_arguments(Coverage "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
if(NOT FASTCOV_PATH)
message(FATAL_ERROR "fastcov not found! Aborting...")
endif()
if(NOT Coverage_SKIP_HTML AND NOT GENHTML_PATH)
message(FATAL_ERROR "genhtml not found! Aborting...")
endif()
# Set base directory (as absolute path), or default to PROJECT_SOURCE_DIR
if(Coverage_BASE_DIRECTORY)
get_filename_component(BASEDIR ${Coverage_BASE_DIRECTORY} ABSOLUTE)
else()
set(BASEDIR ${PROJECT_SOURCE_DIR})
endif()
# Collect excludes (Patterns, not paths, for fastcov)
set(FASTCOV_EXCLUDES "")
foreach(EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_FASTCOV_EXCLUDES})
list(APPEND FASTCOV_EXCLUDES "${EXCLUDE}")
endforeach()
list(REMOVE_DUPLICATES FASTCOV_EXCLUDES)
# Conditional arguments
if(CPPFILT_PATH AND NOT ${Coverage_NO_DEMANGLE})
set(GENHTML_EXTRA_ARGS "--demangle-cpp")
endif()
# Set up commands which will be run to generate coverage data
set(FASTCOV_EXEC_TESTS_CMD ${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS})
set(FASTCOV_CAPTURE_CMD ${FASTCOV_PATH} ${Coverage_FASTCOV_ARGS} --gcov ${GCOV_PATH}
--search-directory ${BASEDIR}
--process-gcno
--output ${Coverage_NAME}.json
--exclude ${FASTCOV_EXCLUDES}
)
set(FASTCOV_CONVERT_CMD ${FASTCOV_PATH}
-C ${Coverage_NAME}.json --lcov --output ${Coverage_NAME}.info
)
if(Coverage_SKIP_HTML)
set(FASTCOV_HTML_CMD ";")
else()
set(FASTCOV_HTML_CMD ${GENHTML_PATH} ${GENHTML_EXTRA_ARGS} ${Coverage_GENHTML_ARGS}
-o ${Coverage_NAME} ${Coverage_NAME}.info
)
endif()
set(FASTCOV_POST_CMD ";")
if(Coverage_POST_CMD)
set(FASTCOV_POST_CMD ${Coverage_POST_CMD})
endif()
if(CODE_COVERAGE_VERBOSE)
message(STATUS "Code coverage commands for target ${Coverage_NAME} (fastcov):")
message(" Running tests:")
string(REPLACE ";" " " FASTCOV_EXEC_TESTS_CMD_SPACED "${FASTCOV_EXEC_TESTS_CMD}")
message(" ${FASTCOV_EXEC_TESTS_CMD_SPACED}")
message(" Capturing fastcov counters and generating report:")
string(REPLACE ";" " " FASTCOV_CAPTURE_CMD_SPACED "${FASTCOV_CAPTURE_CMD}")
message(" ${FASTCOV_CAPTURE_CMD_SPACED}")
message(" Converting fastcov .json to lcov .info:")
string(REPLACE ";" " " FASTCOV_CONVERT_CMD_SPACED "${FASTCOV_CONVERT_CMD}")
message(" ${FASTCOV_CONVERT_CMD_SPACED}")
if(NOT Coverage_SKIP_HTML)
message(" Generating HTML report: ")
string(REPLACE ";" " " FASTCOV_HTML_CMD_SPACED "${FASTCOV_HTML_CMD}")
message(" ${FASTCOV_HTML_CMD_SPACED}")
endif()
if(Coverage_POST_CMD)
message(" Running post command: ")
string(REPLACE ";" " " FASTCOV_POST_CMD_SPACED "${FASTCOV_POST_CMD}")
message(" ${FASTCOV_POST_CMD_SPACED}")
endif()
endif()
# Setup target
add_custom_target(${Coverage_NAME}
# Cleanup fastcov
COMMAND ${FASTCOV_PATH} ${Coverage_FASTCOV_ARGS} --gcov ${GCOV_PATH}
--search-directory ${BASEDIR}
--zerocounters
COMMAND ${FASTCOV_EXEC_TESTS_CMD}
COMMAND ${FASTCOV_CAPTURE_CMD}
COMMAND ${FASTCOV_CONVERT_CMD}
COMMAND ${FASTCOV_HTML_CMD}
COMMAND ${FASTCOV_POST_CMD}
# Set output files as GENERATED (will be removed on 'make clean')
BYPRODUCTS
${Coverage_NAME}.info
${Coverage_NAME}.json
${Coverage_NAME}/index.html # report directory
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
DEPENDS ${Coverage_DEPENDENCIES}
VERBATIM # Protect arguments to commands
COMMENT "Resetting code coverage counters to zero. Processing code coverage counters and generating report."
)
set(INFO_MSG "fastcov code coverage info report saved in ${Coverage_NAME}.info and ${Coverage_NAME}.json.")
if(NOT Coverage_SKIP_HTML)
string(APPEND INFO_MSG " Open ${PROJECT_BINARY_DIR}/${Coverage_NAME}/index.html in your browser to view the coverage report.")
endif()
# Show where to find the fastcov info report
add_custom_command(TARGET ${Coverage_NAME} POST_BUILD
COMMAND ${CMAKE_COMMAND} -E echo ${INFO_MSG}
)
endfunction() # setup_target_for_coverage_fastcov
function(append_coverage_compiler_flags)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
message(STATUS "Appending code coverage compiler flags: ${COVERAGE_COMPILER_FLAGS}")
endfunction() # append_coverage_compiler_flags
# Setup coverage for specific library
function(append_coverage_compiler_flags_to_target name)
separate_arguments(_flag_list NATIVE_COMMAND "${COVERAGE_COMPILER_FLAGS}")
target_compile_options(${name} PRIVATE ${_flag_list})
if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
target_link_libraries(${name} PRIVATE gcov)
endif()
endfunction()
function(generate_python_binding name target_to_bind)
if (PYBIND)
add_definitions(-DPYBIND)
Include(FetchContent)
function(generate_python_binding pybind_module_name target_to_bind)
FetchContent_Declare(
find_package(Python COMPONENTS Interpreter Development.Module)
Include(FetchContent)
set(PYBIND_VERSION v2.13.6)
message(STATUS "Retrieving pybind ${PYBIND_VERSION} from git")
FetchContent_Declare(
PyBind11
GIT_REPOSITORY https://github.com/pybind/pybind11.git
GIT_TAG v2.10.4 # or a later release
)
GIT_TAG ${PYBIND_VERSION} # or a later release
)
# Use the New FindPython mode, recommanded. Requires CMake 3.15+
find_package(Python COMPONENTS Interpreter Development)
FetchContent_MakeAvailable(PyBind11)
FetchContent_MakeAvailable(PyBind11)
message(STATUS "Creating binding for module ${name}")
file(GLOB_RECURSE pybind_src_files "python_binding/*.cpp")
message(STATUS "Creating binding for module ${pybind_module_name}")
file(GLOB_RECURSE pybind_src_files "python_binding/*.cpp")
pybind11_add_module(${name} MODULE ${pybind_src_files} "NO_EXTRAS") # NO EXTRA recquired for pip install
target_include_directories(${name} PUBLIC "python_binding")
target_link_libraries(${name} PUBLIC ${target_to_bind})
endif()
pybind11_add_module(${pybind_module_name} MODULE ${pybind_src_files} "NO_EXTRAS") # NO EXTRA recquired for pip install
target_include_directories(${pybind_module_name} PRIVATE "python_binding")
target_link_libraries(${pybind_module_name} PRIVATE ${target_to_bind})
endfunction()