From 1d321c958777ba97c82fc975df27ea370b55e1ec Mon Sep 17 00:00:00 2001
From: Wissam Boussella <wissam.boussella@cea.fr>
Date: Mon, 9 Dec 2024 15:09:38 +0100
Subject: [PATCH]  [fix] fix for the MR, changing sum_t by bias_t in ConvDW ,
 removing conv_dw_config to reuse conv_config, fixing indent problem on
 softmax.c

---
 .gitignore                                    |  3 ++
 .../_Aidge_Arm/kernels/Convolution/ConvDW.hpp |  3 +-
 .../Softmax/aidge_softmax_chw_float32.h       |  5 +--
 .../configuration/conv_dw_cinfig.jinja        | 40 -------------------
 .../templates/configuration/softmax.jinja     |  2 -
 aidge_export_arm_cortexm/operators.py         |  4 +-
 6 files changed, 8 insertions(+), 49 deletions(-)
 delete mode 100644 aidge_export_arm_cortexm/_Aidge_Arm/templates/configuration/conv_dw_cinfig.jinja

diff --git a/.gitignore b/.gitignore
index 4b4d03f..5129aa2 100644
--- a/.gitignore
+++ b/.gitignore
@@ -34,3 +34,6 @@ xml*/
 
 # Model parameters
 *.onnx
+uni_tests/benchmark.py
+uni_tests/superpoint.py
+uni_tests/test_cmsis_nn_conv.py
diff --git a/aidge_export_arm_cortexm/_Aidge_Arm/kernels/Convolution/ConvDW.hpp b/aidge_export_arm_cortexm/_Aidge_Arm/kernels/Convolution/ConvDW.hpp
index e1cca08..4e47d55 100644
--- a/aidge_export_arm_cortexm/_Aidge_Arm/kernels/Convolution/ConvDW.hpp
+++ b/aidge_export_arm_cortexm/_Aidge_Arm/kernels/Convolution/ConvDW.hpp
@@ -95,8 +95,7 @@ __attribute__((always_inline)) inline void convcellDWPropagate(
 
             for (int output = 0; output < NB_OUTPUTS; ++output) {
                 const int channel = (output * NB_CHANNELS) / NB_OUTPUTS;
-                using Sum_T = typename std::conditional<std::is_floating_point<Input_T>::value, float, int32_t>::type;
-                Sum_T weightedSum = biasses[output];
+                Bias_T weightedSum = biasses[output];
 
                 for (int sy = 0; sy < KERNEL_HEIGHT; ++sy) {
                     if ((PADDING_Y != 0
diff --git a/aidge_export_arm_cortexm/_Aidge_Arm/kernels/Softmax/aidge_softmax_chw_float32.h b/aidge_export_arm_cortexm/_Aidge_Arm/kernels/Softmax/aidge_softmax_chw_float32.h
index 6ec9122..154c63b 100644
--- a/aidge_export_arm_cortexm/_Aidge_Arm/kernels/Softmax/aidge_softmax_chw_float32.h
+++ b/aidge_export_arm_cortexm/_Aidge_Arm/kernels/Softmax/aidge_softmax_chw_float32.h
@@ -1,14 +1,13 @@
 #include <math.h>
 
-void aidge_softmax_chw_float32 (float* inputs, 
+void aidge_softmax_chw_float32(float* inputs, 
                             float* outputs,
                             const int inputDims[],
                             int axis,
                             const unsigned int size_inputDim,
                             const unsigned int size)
 {
-
-	axis += (axis >= 0 ) ? 0 : size_inputDim;
+    axis += (axis >= 0 ) ? 0 : size_inputDim;
 
     int postAxisElems = 1;
     for (unsigned int i = axis+1; i < size_inputDim; ++i) {
diff --git a/aidge_export_arm_cortexm/_Aidge_Arm/templates/configuration/conv_dw_cinfig.jinja b/aidge_export_arm_cortexm/_Aidge_Arm/templates/configuration/conv_dw_cinfig.jinja
deleted file mode 100644
index 71c8b59..0000000
--- a/aidge_export_arm_cortexm/_Aidge_Arm/templates/configuration/conv_dw_cinfig.jinja
+++ /dev/null
@@ -1,40 +0,0 @@
-#define {{ name|upper }}_LAYER_H
-
-#include "typedefs.h"
-#include "nn_scaling_functions.hpp"
-
-{% include "./_def_io.jinja" %}
-{% include "./_meminfo.jinja" %}
-
-// Attributes
-#define {{ name|upper }}_KERNEL_HEIGHT {{ kernel_dims[1] }}
-#define {{ name|upper }}_KERNEL_WIDTH {{ kernel_dims[0] }}
-#define {{ name|upper }}_PADDING_Y {{ padding[1] }}
-#define {{ name|upper }}_PADDING_X {{ padding[0] }}
-#define {{ name|upper }}_STRIDE_Y {{ stride_dims[1] }}
-#define {{ name|upper }}_STRIDE_X {{ stride_dims[0] }}
-#define {{ name|upper }}_DILATION_Y {{ dilation_dims[1] }}
-#define {{ name|upper }}_DILATION_X {{ dilation_dims[0] }}
-{# #define {{ name|upper }}_GROUP {{ group }} #}
-
-// Activation/Scaling
-#define {{ name|upper }}_ACTIVATION {{ activation }}
-
-{%- if scaling_type == "floating_point" %}
-static const N2D2_Export::FloatingPointScaling {{ name|upper }}_SCALING = { {{scaling_value}} };
-{%- elif scaling_type == "fixed_point" %}
-static const N2D2_Export::FixedPointScaling<{{scaling_value}}, {{fractional_bits}}> {{ name|upper }}_SCALING;
-{%- elif scaling_type == "single_shift" %}
-static const N2D2_Export::SingleShiftScaling<{{shift_value}}> {{ name|upper }}_SCALING;
-{%- else %}
-static const N2D2_Export::NoScaling {{ name|upper }}_SCALING;
-{%- endif %}
-
-// Sizes
-#define {{ name|upper }}_WEIGHTS_SIZE {{ out_chan[0] * in_chan[0] * kernel_dims[1] * kernel_dims[0] }}
-#define {{ name|upper }}_BIASES_SIZE {{ out_chan[0] }}
-#define {{ name|upper }}_OUTPUTS_SIZE {{ out_chan[0] * out_height[0] * out_width[0] }}
-#define {{ name|upper }}_CHANNELS_SIZE {{ in_chan[0] * in_height[0] * in_width[0] }}
-
-
-#endif /* {{ name|upper }}_LAYER_H */
diff --git a/aidge_export_arm_cortexm/_Aidge_Arm/templates/configuration/softmax.jinja b/aidge_export_arm_cortexm/_Aidge_Arm/templates/configuration/softmax.jinja
index b48a02c..4c63894 100644
--- a/aidge_export_arm_cortexm/_Aidge_Arm/templates/configuration/softmax.jinja
+++ b/aidge_export_arm_cortexm/_Aidge_Arm/templates/configuration/softmax.jinja
@@ -1,6 +1,5 @@
 
 {% include "./_def_io.jinja" %}
-{% include "./_meminfo.jinja" %}
 
 {#- For name header -#}
 #ifndef {{ name|upper }}_LAYER_H
@@ -12,5 +11,4 @@
 #define {{ name|upper }}_AXIS {{ axis }}
 #define {{ name|upper }}_INPUT_DIMS_SIZE {{ in_dims[0]|length}}
 static const int {{ name|upper }}_DIMS[] = { {{ in_dims[0] | join(', ') }} };
-
 #endif /* {{ name|upper }}_LAYER_H */
diff --git a/aidge_export_arm_cortexm/operators.py b/aidge_export_arm_cortexm/operators.py
index b27efa7..d3bf4e5 100644
--- a/aidge_export_arm_cortexm/operators.py
+++ b/aidge_export_arm_cortexm/operators.py
@@ -250,7 +250,7 @@ class ConvDW_ARMCortexM(ExportNodeCpp):
         # Use PaddedConv to add padding attribute
         self.attributes["padding"] = [0, 0]
 
-        self.config_template = str(ROOT / "_Aidge_Arm" / "templates" / "configuration" / "conv_dw_config.jinja")
+        self.config_template = str(ROOT / "_Aidge_Arm" / "templates" / "configuration" / "conv_config.jinja")
         self.forward_template = str(ROOT / "_Aidge_Arm" / "templates" / "forward_call" / "conv_dw_kernel.jinja")
         self.include_list = []
         self.kernels_to_copy = [
@@ -275,7 +275,7 @@ class PaddedConvDW_ARMCortexM(ExportNodeCpp):
                 self.attributes["dilation_dims"] = n.get_operator(
                 ).attr.dilation_dims
 
-        self.config_template = str(ROOT / "_Aidge_Arm" / "templates" / "configuration" / "conv_dw_config.jinja")
+        self.config_template = str(ROOT / "_Aidge_Arm" / "templates" / "configuration" / "conv_config.jinja")
         self.forward_template = str(ROOT / "_Aidge_Arm" / "templates" / "forward_call" / "conv_dw_kernel.jinja")
         self.include_list = []
         self.kernels_to_copy = [
-- 
GitLab