Skip to content

Error while exporting model to cpp

What commit version of aidge do you use

Main branch, updated today

Problem description

Error while exporting model to cpp. Note that I do not provide a label.

Loading onnx model
Native operators: 60 (10 types)
- Add: 3
- Conv2D: 7
- MatMul: 3
- MaxPooling2D: 7
- Pad2D: 7
- Producer: 21
- ReLU: 9
- ReduceMean: 1
- Reshape: 1
- Sigmoid: 1
Generic operators: 0 (0 types)
Native types coverage: 100.0% (10/10)
Native operators coverage: 100.0% (60/60)
(defaultdict(<class 'int'>, {'MaxPooling2D': 7, 'MatMul': 3, 'Sigmoid': 1, 'ReLU': 9, 'Conv2D': 7, 'Pad2D': 7, 'ReduceMean': 1, 'Producer': 21, 'Add': 3, 'Reshape': 1}), defaultdict(<class 'int'>, {}))
Generating scheduler
Testing model with random input data
Exporting model to CPP
Removing existing export directory...
Traceback (most recent call last):
  File "/data1/is156025/fa125436/env_aidge/lib/python3.12/site-packages/aidge_core/export_utils/node_export.py", line 527, in export
    code_generation.generate_file(
  File "/data1/is156025/fa125436/env_aidge/lib/python3.12/site-packages/aidge_core/export_utils/code_generation.py", line 29, in generate_file
    file.write(generate_str(template_path, **kwargs))
               ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/data1/is156025/fa125436/env_aidge/lib/python3.12/site-packages/aidge_core/export_utils/code_generation.py", line 45, in generate_str
    template_path.parent), undefined=StrictUndefined, keep_trailing_newline=True).get_template(template_path.name).render(kwargs)
                                                                                                                   ^^^^^^^^^^^^^^
  File "/data1/is156025/fa125436/env_aidge/lib/python3.12/site-packages/jinja2/environment.py", line 1295, in render
    self.environment.handle_exception()
  File "/data1/is156025/fa125436/env_aidge/lib/python3.12/site-packages/jinja2/environment.py", line 942, in handle_exception
    raise rewrite_traceback_stack(source=source)
  File "/data1/is156025/fa125436/env_aidge/lib/python3.12/site-packages/aidge_export_cpp/templates/configuration/activation_config.jinja", line 7, in top-level template code
    {%- set nb_data = in_chan[0] * in_height[0] * in_width[0] %}
    ^^^^^^^^^^^^^^^^^^^^^^^^^
TypeError: unsupported operand type(s) for *: 'NoneType' and 'NoneType'

The above exception was the direct cause of the following exception:

Traceback (most recent call last):
  File "/data1/is156025/fa125436/Confluence/Aidge/mini.py", line 65, in <module>
    aidge_export_cpp.export(EXPORT_FOLDER,
  File "/data1/is156025/fa125436/env_aidge/lib/python3.12/site-packages/aidge_export_cpp/export.py", line 61, in export
    scheduler_export(scheduler,
  File "/data1/is156025/fa125436/env_aidge/lib/python3.12/site-packages/aidge_core/export_utils/scheduler_export.py", line 131, in scheduler_export
    list_configs += op.export(dnn_folder)
                    ^^^^^^^^^^^^^^^^^^^^^
  File "/data1/is156025/fa125436/env_aidge/lib/python3.12/site-packages/aidge_core/export_utils/node_export.py", line 533, in export
    raise RuntimeError(f"Error when creating config file for {self.node.name()}[{self.node.type()}].") from e
RuntimeError: Error when creating config file for final_feedforward_classifier_classifier_1_Relu[ReLU].

Reproducible example code


import aidge_core.utils
import numpy as np

# Aidge Modules
import aidge_core
import aidge_onnx
import aidge_backend_cpu
import aidge_export_cpp

from aidge_export_cpp.export_utils import (
    cpp_fuse_to_metaops,
    set_nodes_names,
    set_nodes_datatypes,
    exclude_unwanted_producers)

from aidge_core.export_utils import remove_optional_inputs, get_node_from_metaop

# Setting Aidge verbose level
aidge_core.Log.set_console_level(aidge_core.Level.Error)

MODEL_FOLDER  = 'models/'
MODEL_NAME    = 'simplified_cnn'
EXPORT_FOLDER = f"export_{MODEL_NAME}_float32"

# Define the target path and filename
file_path = MODEL_FOLDER + MODEL_NAME + ".onnx"
print("Loading onnx model")
model = aidge_onnx.load_onnx(file_path, verbose=False)
aidge_core.remove_flatten(model)
aidge_core.fuse_batchnorm(model)
aidge_core.expand_metaops(model)
print(aidge_onnx.native_coverage_report(model))

# Set up the backend
model.set_datatype(aidge_core.dtype.float32)
model.set_backend("cpu")

# Create the Scheduler
print("Generating scheduler")
scheduler = aidge_core.SequentialScheduler(model)
scheduler.generate_scheduling()  

# Test the model
print("Testing model with random input data")
sample = np.random.rand(1,1,399,128)
input_tensor = aidge_core.Tensor(sample)
input_tensor.set_datatype(aidge_core.dtype.float32)
scheduler.forward(True, [input_tensor])
output_node = model.get_output_nodes().pop()
output_tensor = output_node.get_operator().get_output(0)

# Export to cpp
print("Exporting model to CPP")
aidge_export_cpp.export(EXPORT_FOLDER,
                        model,
                        scheduler,
                        input_tensor)