Skip to content
Snippets Groups Projects
Commit 5fccd193 authored by Cyril Moineau's avatar Cyril Moineau
Browse files

Merge branch 'backend_export' into allowNoInputProducer

parents 824b4405 b6581483
No related branches found
No related tags found
No related merge requests found
Showing
with 219 additions and 75 deletions
cmake_minimum_required(VERSION 3.15)
cmake_minimum_required(VERSION 3.18)
set(CXX_STANDARD 14)
file(STRINGS "${CMAKE_SOURCE_DIR}/version.txt" version)
......@@ -84,6 +84,8 @@ if( ${ENABLE_ASAN} )
endif()
# PYTHON BINDING
set(AIDGE_REQUIRES_PYTHON FALSE) # Will be set if aidge_core lib depends upon python interpreter
set(AIDGE_PYTHON_HAS_EMBED FALSE) # Will be set if python interpreter is found on the system
if (PYBIND)
# Python binding lib is by default installed in <prefix>/python_packages/<package>/
# When installed from python, setup.py should set it to the python package dir
......@@ -92,13 +94,17 @@ if (PYBIND)
include(PybindModuleCreation)
generate_python_binding(${pybind_module_name} ${module_name})
# Handles Python + pybind11 headers dependencies
target_link_libraries(${module_name}
PUBLIC
pybind11::pybind11
PRIVATE
Python::Module
)
##
# As of now, when PYBIND is set, the core archive itself depends upon pybind/python,
# we define -DPYBIND and the dependencies on pybind/python runtime where necessary.
# Add -DPYBIND to compilation and interface
target_compile_definitions(${module_name} PUBLIC PYBIND)
# Add dependencies on pybind/python. See details in add_pybind_dependency()
include(PybindDependency)
add_pybind_dependency(${module_name})
##
endif()
target_link_libraries(${module_name} PUBLIC Threads::Threads fmt::fmt)
......@@ -206,10 +212,10 @@ export(EXPORT ${CMAKE_PROJECT_NAME}-targets
##############################################
## Add test
if(TEST)
if(PYBIND)
message(FATAL_ERROR "PYBIND and TEST are both enabled. But cannot compile with catch_2.\nChoose between pybind and Catch2 for compilation.")
if (AIDGE_REQUIRES_PYTHON AND NOT AIDGE_PYTHON_HAS_EMBED)
message(WARNING "Skipping compilation of tests: missing Python embedded interpreter")
else()
enable_testing()
add_subdirectory(unit_tests)
endif()
enable_testing()
add_subdirectory(unit_tests)
endif()
......@@ -16,6 +16,7 @@ pip install . -v
> - `AIDGE_INSTALL` : to set the installation folder. Defaults to `<python_prefix>/lib/libAidge`
> - `AIDGE_PYTHON_BUILD_TYPE` : to set the compilation mode to **Debug** or **Release** or "" (for default flags). Defaults to **Release**.
> - `AIDGE_BUILD_GEN` : to set the build backend (for development mode) or "" for the cmake default. Default to "".
> - `AIDGE_BUILD_TEST` : to build the C++ unit tests. Set to "ON" or "OFF". Default to "OFF".
## Pip installation for development
......@@ -24,9 +25,10 @@ To setup aidge_core using pip in development (or editable mode), use the `--no-b
For instance run the following command in your python environnement for a typical setup :
``` bash
export AIDGE_BUILD_TEST=ON # enable C++ unit tests
export AIDGE_PYTHON_BUILD_TYPE= # default flags (no debug info but fastest build time)
export AIDGE_PYTHON_BUILD_TYPE=Debug # or if one really need to debug the C++ code
pip install setuptools setuptools_scm[toml] cmake # Pre-install build requirements (refer to the pyproject.toml [build-system] section)
pip install -U pip setuptools setuptools_scm[toml] cmake # Pre-install build requirements (refer to the pyproject.toml [build-system] section)
pip install -v --no-build-isolation -e .
```
......@@ -41,7 +43,7 @@ cmake --build build -j $(nproc) && cmake --install build
One can also use an alternate cmake build backend such as ninja which can be installed easily though pip, for instance :
``` bash
pip install ninja
pip install -U ninja
export AIDGE_BUILD_GEN=Ninja
pip install -v --no-build-isolation -e .
```
......@@ -85,9 +87,12 @@ make all install
| *-DCMAKE_INSTALL_PREFIX:PATH* | ``str`` | Path to the install folder |
| *-DCMAKE_BUILD_TYPE* | ``str`` | If ``Debug``, compile in debug mode, ``Release`` compile with highest optimisations or "" (empty) , default= ``Release`` |
| *-DWERROR* | ``bool`` | If ``ON`` show warning as error during compilation phase, default=``OFF`` |
| *-DPYBIND* | ``bool`` | If ``ON`` activate python binding, default=``ON`` |
| *-DTEST* | ``bool`` | If ``ON`` build C++ unit tests, default=``ON`` |
| *-DPYBIND* | ``bool`` | If ``ON`` activate python binding, default=``OFF`` |
| *-DPYBIND_INSTALL_PREFIX:PATH*| ``str`` | Path to the python module install folder when ``-DPYBIND=ON``, defaults to ``$CMAKE_INSTALL_PREFIX/python_packages/<module>`` |
If you have compiled with PyBind you can find at the root of the ``build`` file the python lib ``aidge_core.cpython*.so``
If one compiles with ``-DPYBIND=ON``, ``-DPYBIND_INSTALL_PREFIX:PATH`` can be used to install the python module directly in the
python sources tree (for instance ``$PWD/aidge_core``). ``setup.py`` takes care of this and installs the module at the right place.
## Run tests
### CPP
......
......@@ -3,6 +3,11 @@
include(CMakeFindDependencyMacro)
find_dependency(fmt)
find_dependency(Threads)
set(AIDGE_REQUIRES_PYTHON @AIDGE_REQUIRES_PYTHON@)
set(AIDGE_PYTHON_HAS_EMBED @AIDGE_PYTHON_HAS_EMBED@)
if (AIDGE_REQUIRES_PYTHON AND AIDGE_PYTHON_HAS_EMBED)
find_dependency(Python COMPONENTS Interpreter Development)
endif()
include(${CMAKE_CURRENT_LIST_DIR}/aidge_core-config-version.cmake)
......
cmake_minimum_required(VERSION 3.15)
cmake_minimum_required(VERSION 3.18)
set(CXX_STANDARD 14)
file(STRINGS "${CMAKE_SOURCE_DIR}/project_name.txt" project_name)
......@@ -18,6 +18,7 @@ set(module_name _${CMAKE_PROJECT_NAME}) # target name
##############################################
# Define options
option(PYBIND "python binding" ON)
option(STANDALONE "Build standalone executable" ON)
option(WERROR "Warning as error" OFF)
option(TEST "Enable tests" OFF)
option(COVERAGE "Enable coverage" OFF)
......@@ -61,16 +62,8 @@ set_property(TARGET ${module_name} PROPERTY POSITION_INDEPENDENT_CODE ON)
# PYTHON BINDING
if (PYBIND)
# Handles Python + pybind11 headers dependencies
include(PybindModuleCreation)
generate_python_binding(${CMAKE_PROJECT_NAME} ${module_name})
target_link_libraries(${module_name}
PUBLIC
pybind11::pybind11
PRIVATE
Python::Python
)
endif()
if( ${ENABLE_ASAN} )
......@@ -94,7 +87,6 @@ target_include_directories(${module_name}
${CMAKE_CURRENT_SOURCE_DIR}/src
)
target_link_libraries(${module_name} PUBLIC fmt::fmt)
target_compile_features(${module_name} PRIVATE cxx_std_14)
target_compile_options(${module_name} PRIVATE
......@@ -151,8 +143,13 @@ install(FILES
## Exporting from the build tree
message(STATUS "Exporting created targets to use them in another build")
export(EXPORT ${CMAKE_PROJECT_NAME}-targets
FILE "${CMAKE_CURRENT_BINARY_DIR}/${project}-targets.cmake")
# Compile executable
add_executable(main main.cpp)
target_link_libraries(main PUBLIC _aidge_core ${module_name})
FILE "${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_PROJECT_NAME}-targets.cmake")
if(STANDALONE)
if(AIDGE_REQUIRES_PYTHON AND NOT AIDGE_PYTHON_HAS_EMBED)
message(WARNING "Skipping compilation of standalone executable: missing Python embedded interpreter")
else()
add_executable(main main.cpp)
target_link_libraries(main PRIVATE ${module_name})
endif()
endif()
function(generate_python_binding name target_to_bind)
find_package(Python COMPONENTS Interpreter Development)
find_package(Python COMPONENTS Interpreter Development.Module)
add_definitions(-DPYBIND)
Include(FetchContent)
FetchContent_Declare(
PyBind11
......@@ -15,11 +14,9 @@ function(generate_python_binding name target_to_bind)
file(GLOB_RECURSE pybind_src_files "python_binding/*.cpp")
pybind11_add_module(${name} MODULE ${pybind_src_files} "NO_EXTRAS") # NO EXTRA recquired for pip install
target_include_directories(${name} PUBLIC "python_binding")
target_include_directories(${name} PRIVATE "python_binding")
# Link target library to bind
target_link_libraries(${name} PRIVATE ${target_to_bind})
# Handles Python + pybind11 headers dependencies
target_link_libraries(${name}
PUBLIC
${target_to_bind}
)
endfunction()
@PACKAGE_INIT@
include(CMakeFindDependencyMacro)
find_dependency(aidge_core)
include(${CMAKE_CURRENT_LIST_DIR}/aidge_backend_cpu-config-version.cmake)
include(${CMAKE_CURRENT_LIST_DIR}/aidge_backend_cpu-targets.cmake)
#include <iostream>
#include <aidge/backend/cpu.hpp>
/* Register default cpu Tensor implementation */
#include <aidge/backend/cpu/data/TensorImpl.hpp>
/* Include model generator */
#include "include/dnn.hpp"
int main()
......
......@@ -4,6 +4,10 @@ This file is copied in the test export.
*/
#include <iostream>
/* Register default cpu Tensor implementation */
#include <aidge/backend/cpu/data/TensorImpl.hpp>
/* Include model generator */
#include "include/dnn.hpp"
int main()
......
......@@ -65,6 +65,7 @@ class test_export(unittest.TestCase):
def setUp(self):
self.EXPORT_PATH: pathlib.Path = pathlib.Path("dummy_export")
self.BUILD_DIR: pathlib.Path = self.EXPORT_PATH / "build"
self.INSTALL_DIR: pathlib.Path = (self.EXPORT_PATH / "install").absolute()
def tearDown(self):
pass
......@@ -90,11 +91,16 @@ class test_export(unittest.TestCase):
# Export model
aidge_core.serialize_to_cpp(self.EXPORT_PATH, model)
self.assertTrue(self.EXPORT_PATH.is_dir(), "Export folder has not been generated")
os.makedirs(self.EXPORT_PATH / "build", exist_ok=True)
self.assertTrue(
self.EXPORT_PATH.is_dir(), "Export folder has not been generated"
)
os.makedirs(self.BUILD_DIR, exist_ok=True)
clean_dir(self.BUILD_DIR) # if build dir existed already ensure its emptyness
clean_dir(self.INSTALL_DIR)
# Test compilation of export
install_path = (
search_path = (
os.path.join(sys.prefix, "lib", "libAidge")
if "AIDGE_INSTALL" not in os.environ
else os.environ["AIDGE_INSTALL"]
......@@ -112,14 +118,16 @@ class test_export(unittest.TestCase):
[
"cmake",
str(self.EXPORT_PATH.absolute()),
"-DPYBIND=1",
f"-DCMAKE_INSTALL_PREFIX:PATH={install_path}",
"-DPYBIND=ON",
f"-DCMAKE_PREFIX_PATH={search_path}", # search dependencies
f"-DCMAKE_INSTALL_PREFIX:PATH={self.INSTALL_DIR}", # local install
],
cwd=str(self.BUILD_DIR),
):
print(std_line, end="")
except subprocess.CalledProcessError as e:
print(f"An error occurred: {e}\nFailed to configure export.")
raise SystemExit(1)
##########################
# BUILD EXPORT
......@@ -131,6 +139,7 @@ class test_export(unittest.TestCase):
print(std_line, end="")
except subprocess.CalledProcessError as e:
print(f"An error occurred: {e}\nFailed to build export.")
raise SystemExit(1)
##########################
# INSTALL EXPORT
......@@ -142,6 +151,7 @@ class test_export(unittest.TestCase):
print(std_line, end="")
except subprocess.CalledProcessError as e:
print(f"An error occurred: {e}\nFailed to install export.")
raise SystemExit(1)
if __name__ == "__main__":
......
function(add_pybind_dependency target_name)
# This function add dependencies on pybind/python in the
# case where a target depends on it. This is orthogonal to
# the creation of a pybind python module.
# In this case we need to add additional dependencies and distinguish the two link time usage for the archive:
#### 1. link for producing a python binding module, which must not include the python interpreter
# For the case 1, the archive is bound to a python module which will provide the runtime,
# hence we add dependency only on the pybind and python headers. Also we install the pybind headers
# for backward compatibility of dependent build systems which may not depend upon pybind.
#### 2. link for producing an executable (tests for instance) which must include the python interpreter
# For the case 2, a library or executable must also depend on the embedded python libraries,
# hence we add dependency on Python::Python when the target is not a module. Also we account for
# the case where the python libraries are not present (such as on cibuildwheel). In this case
# only python modules can be built, not standalone executables.
# Make detection of Development.Embed optional, we need to separate the components detections
# otherwise the variables set by the Interpreter components may be undefined.
find_package(Python COMPONENTS Interpreter)
find_package(Python COMPONENTS Development)
if(NOT Python_Development.Embed_FOUND)
message(WARNING "Could not find Python embed libraries, fall back to Python Module only mode. If you are running this from `cibuildwheel, this warning is nominal.")
find_package(Python COMPONENTS Development.Module)
endif()
# Set these variables which are used in the package config (aidge_core-config.cmake.in)
# and for conditional build on the presence on the python interpreter library
set(AIDGE_REQUIRES_PYTHON TRUE PARENT_SCOPE)
set(AIDGE_PYTHON_HAS_EMBED ${Python_Development.Embed_FOUND} PARENT_SCOPE)
# Add pybind11 headers dependencies, the headers for the package interface are installed below
target_include_directories(${target_name} SYSTEM PUBLIC
$<INSTALL_INTERFACE:include/_packages_deps/${target_name}>
$<BUILD_INTERFACE:${pybind11_INCLUDE_DIR}>)
# Add include dirs for Python.h
target_include_directories(${target_name} SYSTEM PUBLIC ${Python_INCLUDE_DIRS})
# Add Python embedded interpreter when the target is not a module (tests executables for instance)
# Also requires to have Development.Embed installed on the system
if (Python_Development.Embed_FOUND)
set(target_is_module $<STREQUAL:$<TARGET_PROPERTY:TYPE>,MODULE_LIBRARY>)
target_link_libraries(${target_name} INTERFACE $<$<NOT:${target_is_module}>:Python::Python>)
endif()
# Install pybind headers such that dependent modules can find them
install(DIRECTORY ${pybind11_INCLUDE_DIR}/pybind11
DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/_packages_deps/${target_name}
)
endfunction()
......@@ -2,7 +2,6 @@ function(generate_python_binding name target_to_bind)
find_package(Python COMPONENTS Interpreter Development.Module)
add_definitions(-DPYBIND)
Include(FetchContent)
FetchContent_Declare(
PyBind11
......@@ -15,11 +14,8 @@ function(generate_python_binding name target_to_bind)
file(GLOB_RECURSE pybind_src_files "python_binding/*.cpp")
pybind11_add_module(${name} MODULE ${pybind_src_files} "NO_EXTRAS") # NO EXTRA recquired for pip install
target_include_directories(${name} PUBLIC "python_binding")
target_include_directories(${name} PRIVATE "python_binding")
# Handles Python + pybind11 headers dependencies
target_link_libraries(${name}
PUBLIC
${target_to_bind}
)
# Link specified target to bind
target_link_libraries(${name} PRIVATE ${target_to_bind})
endfunction()
......@@ -129,6 +129,7 @@ public:
}
virtual void setBackend(const std::string& name, DeviceIdx_t device = 0) = 0;
void setBackend(const std::vector<std::pair<std::string, DeviceIdx_t>>& backends);
virtual void setDataType(const DataType& dataType) const = 0;
virtual void setDataFormat(const DataFormat& dataFormat) const = 0;
......
......@@ -54,7 +54,18 @@ public:
{
mAnyCompare.emplace(std::make_pair<std::type_index, bool(*)(const future_std::any&, const future_std::any&)>(typeid(T),
[](const future_std::any& lhs, const future_std::any& rhs) {
return (future_std::any_cast<T>(lhs) < future_std::any_cast<T>(rhs));
#ifdef PYBIND
if (lhs.type() == typeid(py::object)) {
return (future_std::any_cast<py::object>(lhs).cast<T>() < future_std::any_cast<T>(rhs));
}
else if (rhs.type() == typeid(py::object)) {
return (future_std::any_cast<T>(lhs) < future_std::any_cast<py::object>(rhs).cast<T>());
}
else
#endif
{
return (future_std::any_cast<T>(lhs) < future_std::any_cast<T>(rhs));
}
}));
const auto dot = name.find('.');
......@@ -94,7 +105,18 @@ public:
{
mAnyCompare.emplace(std::make_pair<std::type_index, bool(*)(const future_std::any&, const future_std::any&)>(typeid(T),
[](const future_std::any& lhs, const future_std::any& rhs) {
return (future_std::any_cast<T>(lhs) < future_std::any_cast<T>(rhs));
#ifdef PYBIND
if (lhs.type() == typeid(py::object)) {
return (future_std::any_cast<py::object>(lhs).cast<T>() < future_std::any_cast<T>(rhs));
}
else if (rhs.type() == typeid(py::object)) {
return (future_std::any_cast<T>(lhs) < future_std::any_cast<py::object>(rhs).cast<T>());
}
else
#endif
{
return (future_std::any_cast<T>(lhs) < future_std::any_cast<T>(rhs));
}
}));
const auto dot = name.find('.');
......@@ -127,7 +149,18 @@ public:
{
mAnyCompare.emplace(std::make_pair<std::type_index, bool(*)(const future_std::any&, const future_std::any&)>(typeid(T),
[](const future_std::any& lhs, const future_std::any& rhs) {
return (future_std::any_cast<T>(lhs) < future_std::any_cast<T>(rhs));
#ifdef PYBIND
if (lhs.type() == typeid(py::object)) {
return (future_std::any_cast<py::object>(lhs).cast<T>() < future_std::any_cast<T>(rhs));
}
else if (rhs.type() == typeid(py::object)) {
return (future_std::any_cast<T>(lhs) < future_std::any_cast<py::object>(rhs).cast<T>());
}
else
#endif
{
return (future_std::any_cast<T>(lhs) < future_std::any_cast<T>(rhs));
}
}));
const auto dot = name.find('.');
......
......@@ -23,7 +23,7 @@ test = [
requires = [
"setuptools>=64",
"setuptools_scm[toml]==7.1.0",
"cmake>=3.15.3.post1"
"cmake>=3.18.4.post1"
]
build-backend = "setuptools.build_meta"
......@@ -51,15 +51,15 @@ write_to = "aidge_core/_version.py"
[tool.cibuildwheel]
build-frontend = "build"
test-requires = "pytest"
# FIXME: The ignored export test requires a to build the generated export via cmake.
# However due to a strange bug I haven't been able to properly link Python::Module to the export target
# Resulting in the need to link Python::Python which is the python interpreter.
# This suppresses the issue but sadly this target is not available on the cibuilwheel image.
# Hence the test is ignored. If you want to try and solve this bug go on.
# Just take care to increment the counter just below.
#
# Work time spent on this bug : 24h
test-command = "pytest --ignore={package}/aidge_core/unit_tests/test_export.py {package}/aidge_core/unit_tests"
# WARNING: in the test suite the `test_export.py` used to be skipped
# because it did not build when the python embedded interpreter is not available
# as it is the case for cibuildwheel containers.
# Now the build system takes care of this and skips the generation of a standalone
# executable when it is not possible.
# The root causes for this conditional build is that 1. the python embedded interpreter
# is not alweays available, and 2. the aidge_core library depends on it as of now.
# Hopefully this latter dependency may be removed in the future, simplifying the build.
test-command = "pytest -v --capture=no {package}/aidge_core/unit_tests"
# uncomment to run cibuildwheel locally on selected distros
# build=[
# "cp38-manylinux_x86_64",
......
......@@ -48,6 +48,16 @@ void init_Node(py::module& m) {
:rtype: str
)mydelimiter")
.def("create_unique_name", &Node::createUniqueName, py::arg("base_name"),
R"mydelimiter(
Given a base name, generate a new name which is unique in all the GraphViews containing this node.
:param base_name: proposed name for the node.
:type base_name: str
:rtype: str
)mydelimiter")
.def("__repr__", &Node::repr)
.def("add_child",
......
......@@ -53,7 +53,8 @@ void init_Operator(py::module& m){
)mydelimiter")
.def("associate_input", &Operator::associateInput, py::arg("inputIdx"), py::arg("data"))
.def("set_datatype", &Operator::setDataType, py::arg("dataType"))
.def("set_backend", &Operator::setBackend, py::arg("name"), py::arg("device") = 0)
.def("set_backend", py::overload_cast<const std::string&, DeviceIdx_t>(&Operator::setBackend), py::arg("name"), py::arg("device") = 0)
.def("set_backend", py::overload_cast<const std::vector<std::pair<std::string, DeviceIdx_t>>&>(&Operator::setBackend), py::arg("backends"))
.def("forward", &Operator::forward)
// py::keep_alive forbide Python to garbage collect the implementation lambda as long as the Operator is not deleted !
.def("set_impl", &Operator::setImpl, py::arg("implementation"), py::keep_alive<1, 2>())
......
......@@ -25,6 +25,7 @@ void init_Scheduler(py::module& m){
.def(py::init<std::shared_ptr<GraphView>&>(), py::arg("graph_view"))
.def("graph_view", &Scheduler::graphView)
.def("save_scheduling_diagram", &Scheduler::saveSchedulingDiagram, py::arg("file_name"))
.def("save_static_scheduling_diagram", &Scheduler::saveStaticSchedulingDiagram, py::arg("file_name"))
.def("resetScheduling", &Scheduler::resetScheduling)
.def("generate_scheduling", &Scheduler::generateScheduling)
.def("get_static_scheduling", &Scheduler::getStaticScheduling, py::arg("step") = 0)
......
......@@ -61,13 +61,14 @@ class CMakeBuild(build_ext):
if build_gen
else []
)
test_onoff = os.environ.get("AIDGE_BUILD_TEST", "OFF")
self.spawn(
[
"cmake",
*build_gen_opts,
str(cwd),
"-DTEST=OFF",
f"-DTEST={test_onoff}",
f"-DCMAKE_INSTALL_PREFIX:PATH={install_path}",
f"-DCMAKE_BUILD_TYPE={compile_type}",
"-DPYBIND=ON",
......
......@@ -73,13 +73,24 @@ void Aidge::Node::setName(const std::string& name) {
mName = name;
}
std::string Aidge::Node::createUniqueName(std::string name){
for (auto graphView : views()){
if (graphView->inView(name)){
return createUniqueName(name.append("_"));
std::string Aidge::Node::createUniqueName(std::string baseName)
{
int index = 0;
bool nameAlreadyUsed = true;
std::string newName;
while (nameAlreadyUsed) {
std::string suffix = "_" + std::to_string(index);
newName = (index == 0) ? baseName : baseName + suffix;
nameAlreadyUsed = false;
for (auto graphView : views()) {
if (graphView->inView(newName)) {
nameAlreadyUsed = true;
break;
}
}
index++;
}
return name;
return newName;
}
///////////////////////////////////////////////////////
......
......@@ -43,16 +43,17 @@ bool Aidge::Conv_Op<DIM>::forwardDims(bool /*allowDataDependency*/) {
if (inputsAssociated()) {
// first check weight since it defines inChannels and outChannels
AIDGE_ASSERT((getInput(1)->nbDims() == (DIM+2)),
"Wrong weight Tensor dimension: {} for Conv{}D operator.", getInput(1)->nbDims(), DIM);
"Wrong weight Tensor dimension: {} for Conv{}D operator. Expected number of dimensions is {}.", getInput(1)->nbDims(), DIM, DIM+2);
// check data
AIDGE_ASSERT((getInput(0)->nbDims() == (DIM+2)) &&
(getInput(0)->template dims<DIM+2>()[1] == inChannels()),
"Wrong input size for Conv operator.");
"Wrong input size ({}) for Conv operator. Expected dims are [x, {}, {}].", getInput(0)->dims(), inChannels(), fmt::join(std::vector<std::string>(DIM, "x"), ", "));
// check optional bias
if(getInput(2))
AIDGE_ASSERT((getInput(2)->nbDims() == (1)) &&
(getInput(2)->template dims<1>()[0] == outChannels()),
"Wrong bias size for Conv operator.");
"Wrong bias size ({}) for Conv operator. Expected dims are [{}].", getInput(2)->dims(), outChannels());
std::array<DimSize_t, DIM + 2> outputDims{};
const std::array<DimSize_t, DIM + 2> inputDims(getInput(0)->template dims<DIM+2>());
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment