diff --git a/.gitignore b/.gitignore index 8eb208c0cff93ea86a79a962adbc89978b7ae50e..55ab6d78711f9af47af0458596f474ba44379676 100644 --- a/.gitignore +++ b/.gitignore @@ -31,4 +31,4 @@ ENV/ xml*/ # Model parameters -*.onnx \ No newline at end of file +*.onnx diff --git a/aidge_export_arm_cortexm/export.py b/aidge_export_arm_cortexm/export.py index de1de735401fb6a2790cd0d97215012f1aad42bd..f211fbbe957f55629bc48b77999b12c7c48d2c8a 100644 --- a/aidge_export_arm_cortexm/export.py +++ b/aidge_export_arm_cortexm/export.py @@ -4,7 +4,7 @@ import shutil from pathlib import Path import numpy as np -from aidge_core.export_utils.code_generation import * +from aidge_core.export.code_generation import * from aidge_export_arm_cortexm.utils import (ROOT, AVAILABLE_BOARDS, has_board, \ OPERATORS_REGISTRY, supported_operators) import aidge_export_arm_cortexm.operators @@ -94,23 +94,28 @@ def export(export_folder_name, # It supposes the entry nodes are producers with constant=false # Store the datatype & name list_inputs_name = [] + first_element_added = False for node in graphview.get_nodes(): if node.type() == "Producer": + if not first_element_added: + export_type = aidge_datatype2ctype(node.get_operator().get_output(0).dtype()) + list_inputs_name.append((export_type, node.name())) + first_element_added = True if not node.get_operator().attr.constant: export_type = aidge_datatype2ctype(node.get_operator().get_output(0).dtype()) list_inputs_name.append((export_type, node.name())) # Get output nodes # Store the datatype & name, like entry nodes + list_outputs_name = [] for node in graphview.get_nodes(): if len(node.get_children()) == 0: - # print(node.name(), node.get_operator()) - # if node.get_operator().attr is not None: - # # Temporary fix because impossible to set DataType of a generic operator - # export_type = aidge_datatype2ctype(node.get_operator().attr.dtype) - # else: - export_type = aidge_datatype2ctype(node.get_operator().get_output(0).dtype()) + if node.get_operator().attr.has_attr('dtype'): + # Temporary fix because impossible to set DataType of a generic operator + export_type = aidge_datatype2ctype(node.get_operator().attr.dtype) + else: + export_type = aidge_datatype2ctype(node.get_operator().get_output(0).dtype()) list_outputs_name.append((export_type, node.name())) diff --git a/aidge_export_arm_cortexm/memory.py b/aidge_export_arm_cortexm/memory.py index 859e441bf9fb61d42459700d4017e6dbafe820ac..7f7983fc7898bbd2d7fa383ecc0b5f16f290918f 100644 --- a/aidge_export_arm_cortexm/memory.py +++ b/aidge_export_arm_cortexm/memory.py @@ -29,7 +29,7 @@ MEMORY_INFO_TEMPLATE = ["layer_name", "size", "stride", "length", "count", "cont # Default memory management, which can be used for development def compute_default_mem_info(scheduler: aidge_core.Scheduler): - list_forward_nodes = scheduler.get_static_scheduling() + list_forward_nodes = scheduler mem_info = [] mem_size = 0 diff --git a/aidge_export_arm_cortexm/operators.py b/aidge_export_arm_cortexm/operators.py index 3a504dba94a23b8c78b6bfff57972659a23353c3..0d2a3c6f7f5773684ea71a8d562d97243cd5fae5 100644 --- a/aidge_export_arm_cortexm/operators.py +++ b/aidge_export_arm_cortexm/operators.py @@ -8,7 +8,7 @@ from typing import Tuple, List, Union, Dict import aidge_core from aidge_core import ExportNode -from aidge_core.export_utils.code_generation import * +from aidge_core.export.code_generation import * from aidge_export_arm_cortexm.utils import ROOT, operator_register from aidge_export_arm_cortexm.utils.converter import numpy_dtype2ctype, aidge_datatype2dataformat, aidge_datatype2ctype from aidge_export_arm_cortexm.utils.generation import * @@ -275,9 +275,10 @@ class Conv_ARMCortexM(ExportNode): # Use PaddedConv to add padding attribute self.padding = [0, 0] - self.nb_channels = node.get_operator().attr.in_channels - self.nb_outputs = node.get_operator().attr.out_channels - + self.nb_channels = node.get_operator().in_channels() + self.nb_outputs = node.get_operator().out_channels() + if self.inputs[0] is None : + raise RuntimeError("") if len(self.inputs_dims[0]) == 4: # if dims == [batch, nb_channels, height, width] # transform to [nb_channels, height, width] @@ -414,7 +415,7 @@ class ConvReluScaling_ARMCortexM(Conv_ARMCortexM): def __init__(self, node, board, library): super(Conv_ARMCortexM, self).__init__(node, board, library) - if self.operator.attr.has_attr.begin_end_borders: + if self.operator.has_attr("Begin_End_Borders"): self.padding = self.operator.attr.begin_end_borders self.activation = "Rectifier" diff --git a/aidge_export_arm_cortexm/operators_old.py b/aidge_export_arm_cortexm/operators_old.py index 04a090b65c56a0a503618c80480e51ac4fbd6550..3440b248b1b719f4e5573d5c4dcc9df0b450122c 100644 --- a/aidge_export_arm_cortexm/operators_old.py +++ b/aidge_export_arm_cortexm/operators_old.py @@ -403,9 +403,9 @@ class Slice(ExportNode): def __init__(self, node, board, dataformat, library): - self.axes = node.get_operator().get_attr("axes") - self.starts = node.get_operator().get_attr("starts") - self.ends = node.get_operator().get_attr("ends") + self.axes = node.get_operator().attr.axes + self.starts = node.get_operator().attr.starts + self.ends = node.get_operator().attr.ends # Compute output dims out_dims = [self.ends[x-1] - self.starts[x-1] for x in self.axes] @@ -460,7 +460,7 @@ class Concat(ExportNode): def __init__(self, node, board, dataformat, library): - self.axis = node.get_operator().get_attr("axis") + self.axis = node.get_operator().attr.axis out_dims = node.get_operator().get_input(0).dims() out_dims[self.axis - 1] = 0 diff --git a/aidge_export_arm_cortexm/templates/network/network_forward.jinja b/aidge_export_arm_cortexm/templates/network/network_forward.jinja index b00e42f813066505cf03dd9f324a4ac418e45818..bde5553020d1a36f225a1402172715a7446c4496 100644 --- a/aidge_export_arm_cortexm/templates/network/network_forward.jinja +++ b/aidge_export_arm_cortexm/templates/network/network_forward.jinja @@ -12,7 +12,11 @@ {# mem has the datatype of the firt input #} {#- Change here to improve it -#} +{% if inputs[0][0] %} static {{inputs[0][0]}} mem[MEMORY_SIZE]; +{% else %} +static float mem[MEMORY_SIZE]; +{% endif %} {# Forward function #} {#- Support multiple inputs with different datatypes and multiple outputs with different datatypes -#} diff --git a/aidge_export_arm_cortexm/utils/converter.py b/aidge_export_arm_cortexm/utils/converter.py index cffc0df919f15cd48a1dc1c58326f5557a76dc48..9d8bd15b2684986829a542b9c2bf1002605e37e0 100644 --- a/aidge_export_arm_cortexm/utils/converter.py +++ b/aidge_export_arm_cortexm/utils/converter.py @@ -21,35 +21,35 @@ def numpy_dtype2ctype(dtype): raise ValueError(f"Unsupported {dtype} dtype") -def aidge_datatype2ctype(dtype): - if dtype == aidge_core.dtype.int8: +def aidge_datatype2ctype(datatype): + if datatype == aidge_core.dtype.int8: return "int8_t" - elif dtype == aidge_core.dtype.uint8: + elif datatype == aidge_core.dtype.uint8: return "uint8_t" - elif dtype == aidge_core.dtype.int32: + elif datatype == aidge_core.dtype.int32: return "int32_t" - elif dtype == aidge_core.dtype.int64: + elif datatype == aidge_core.dtype.int64: return "int64_t" - elif dtype == aidge_core.dtype.float32: + elif datatype == aidge_core.dtype.float32: return "float" - elif dtype == aidge_core.dtype.float64: + elif datatype == aidge_core.dtype.float64: return "double" # Add more dtype mappings as needed else: - raise ValueError(f"Unsupported {dtype} aidge dtype") + raise ValueError(f"Unsupported {datatype} aidge dtype") -def aidge_datatype2dataformat(dtype): - if dtype == aidge_core.dtype.int8: +def aidge_datatype2dataformat(datatype): + if datatype == aidge_core.dtype.int8: return "int8" - elif dtype == aidge_core.dtype.int32: + elif datatype == aidge_core.dtype.int32: return "int32" - elif dtype == aidge_core.dtype.int64: + elif datatype == aidge_core.dtype.int64: return "int64" - elif dtype == aidge_core.dtype.float32: + elif datatype == aidge_core.dtype.float32: return "float32" - elif dtype == aidge_core.dtype.float64: + elif datatype == aidge_core.dtype.float64: return "float64" # Add more dtype mappings as needed else: - raise ValueError(f"Unsupported {dtype} aidge dtype") + raise ValueError(f"Unsupported {datatype} aidge dtype")