From 0cf74856aa82e8f4873af2d2e71af33ddd7e8dd1 Mon Sep 17 00:00:00 2001
From: Axel Farrugia <axel.farrugia@cea.fr>
Date: Fri, 2 May 2025 13:44:16 +0200
Subject: [PATCH] [Refactor] get_node_from_metaop now returns a list of all
 operators found

---
 aidge_export_cpp/operators/Conv.py      | 15 ++++++------
 aidge_export_cpp/operators/ElemWise.py  | 31 +++++++++----------------
 aidge_export_cpp/operators/Fc.py        | 12 ++++------
 aidge_export_cpp/operators/Pool.py      | 17 +++++++-------
 aidge_export_cpp/operators/Quantizer.py | 14 ++++++-----
 5 files changed, 39 insertions(+), 50 deletions(-)

diff --git a/aidge_export_cpp/operators/Conv.py b/aidge_export_cpp/operators/Conv.py
index e1d17bc..c8137c5 100644
--- a/aidge_export_cpp/operators/Conv.py
+++ b/aidge_export_cpp/operators/Conv.py
@@ -18,9 +18,9 @@ class Conv(ExportNodeCpp):
 
         # Browse the metaop to update kernel attributes
         ConvNode = get_node_from_metaop(node, "Conv2D") 
-        self.attributes["kernel_dims"] = ConvNode.get_operator().attr.kernel_dims
-        self.attributes["stride_dims"] = ConvNode.get_operator().attr.stride_dims
-        self.attributes["dilation_dims"] = ConvNode.get_operator().attr.dilation_dims
+        self.attributes["kernel_dims"] = ConvNode[0].get_operator().attr.kernel_dims
+        self.attributes["stride_dims"] = ConvNode[0].get_operator().attr.stride_dims
+        self.attributes["dilation_dims"] = ConvNode[0].get_operator().attr.dilation_dims
 
         # Template for layer configutation file generation
         self.config_template = str(ROOT / "templates" / "configuration" / "convolution_config.jinja")
@@ -61,7 +61,7 @@ class PadConv(QConv):
 
         # Browse the metaop to update kernel attributes
         PadNode = get_node_from_metaop(node, "Pad2D")
-        self.attributes["padding"] = PadNode.get_operator().attr.begin_end_borders
+        self.attributes["padding"] = PadNode[0].get_operator().attr.begin_end_borders
 
 
 @ExportLibCpp.register_metaop("ConvAct", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
@@ -70,11 +70,10 @@ class ConvAct(QConv):
         super().__init__(node, mem_info)
 
         # Browse the metaop to update kernel attributes
-        if get_node_from_metaop(node, "ReLU") is not None:
+        if get_node_from_metaop(node, "ReLU"):
             self.attributes["activation"] = "Rectifier"
-        elif get_node_from_metaop(node, "LeakyReLU") is not None:
-            aidge_core.Log.fatal(f"{node.type()} activation is not yet supported.")
-            # TODO : Should not be checked manually for each activation     
+        else:
+            aidge_core.Log.error(f"{node.type()} activation is not yet supported.")
 
 @ExportLibCpp.register_metaop("PadConvAct", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
 class PadConvAct(PadConv, ConvAct):
diff --git a/aidge_export_cpp/operators/ElemWise.py b/aidge_export_cpp/operators/ElemWise.py
index 4603509..7d073ca 100644
--- a/aidge_export_cpp/operators/ElemWise.py
+++ b/aidge_export_cpp/operators/ElemWise.py
@@ -66,11 +66,10 @@ class AddAct(QAdd):
         super().__init__(node, mem_info)
 
         # Browse the metaop to update kernel attributes
-        if get_node_from_metaop(node, "ReLU") is not None:
+        if get_node_from_metaop(node, "ReLU"):
             self.attributes["activation"] = "Rectifier"
-        elif get_node_from_metaop(node, "LeakyReLU") is not None:
-            aidge_core.Log.fatal(f"{node.type()} activation is not yet supported.")
-            # TODO : Should not be checked manually for each activation    
+        else:
+            aidge_core.Log.error(f"{node.type()} activation is not yet supported.") 
 
 
 @ExportLibCpp.register("Sub", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
@@ -92,34 +91,26 @@ class SubAct(QSub):
         super().__init__(node, mem_info)
 
         # Browse the metaop to update kernel attributes
-        if get_node_from_metaop(node, "ReLU") is not None:
+        if get_node_from_metaop(node, "ReLU"):
             self.attributes["activation"] = "Rectifier"
-        elif get_node_from_metaop(node, "LeakyReLU") is not None:
-            aidge_core.Log.fatal(f"{node.type()} activation is not yet supported.")
-            # TODO : Should not be checked manually for each activation    
+        else:
+            aidge_core.Log.error(f"{node.type()} activation is not yet supported.") 
 
 
 @ExportLibCpp.register("Mul", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
-class Mul(ElemWise):
+class Mul(QElemWise):
     def __init__(self, node, mem_info):
         super().__init__(node, mem_info)
         self.attributes["elemwise_op"] = "Mul"
 
 
-@ExportLibCpp.register_metaop("QMul", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
-class QMul(QElemWise, Mul):
-    def __init__(self, node, mem_info):
-        super().__init__(node, mem_info)
-
-
 @ExportLibCpp.register_metaop("MulAct", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
-class MulAct(QMul):
+class MulAct(Mul):
     def __init__(self, node, mem_info):
         super().__init__(node, mem_info)
 
         # Browse the metaop to update kernel attributes
-        if get_node_from_metaop(node, "ReLU") is not None:
+        if get_node_from_metaop(node, "ReLU"):
             self.attributes["activation"] = "Rectifier"
-        elif get_node_from_metaop(node, "LeakyReLU") is not None:
-            aidge_core.Log.fatal(f"{node.type()} activation is not yet supported.")
-            # TODO : Should not be checked manually for each activation    
\ No newline at end of file
+        else:
+            aidge_core.Log.error(f"{node.type()} activation is not yet supported.")
\ No newline at end of file
diff --git a/aidge_export_cpp/operators/Fc.py b/aidge_export_cpp/operators/Fc.py
index b83754b..d32d20e 100644
--- a/aidge_export_cpp/operators/Fc.py
+++ b/aidge_export_cpp/operators/Fc.py
@@ -1,5 +1,5 @@
 import aidge_core
-from aidge_core.export_utils import ExportNodeCpp
+from aidge_core.export_utils import ExportNodeCpp, get_node_from_metaop
 from aidge_export_cpp import ROOT, ExportLibCpp, set_scaling_attributes
 
 @ExportLibCpp.register("FC", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
@@ -53,9 +53,7 @@ class FCAct(QFC):
         super().__init__(node, mem_info)
 
         # Browse the metaop to update kernel attributes
-        for n in node.get_operator().get_micro_graph().get_nodes():
-            if n.type() == "ReLU":
-                self.attributes["activation"] = "Rectifier"
-            elif n.type() == "LeakyReLU":
-                aidge_core.Log.fatal(f"{n.type()} activation is not yet supported.")
-                # TODO : Should not be checked manually for each activation
+        if get_node_from_metaop(node, "ReLU"):
+            self.attributes["activation"] = "Rectifier"
+        else:
+            aidge_core.Log.error(f"{node.type()} activation is not yet supported.")
diff --git a/aidge_export_cpp/operators/Pool.py b/aidge_export_cpp/operators/Pool.py
index bf7e7eb..10d595e 100644
--- a/aidge_export_cpp/operators/Pool.py
+++ b/aidge_export_cpp/operators/Pool.py
@@ -38,7 +38,7 @@ class PadPool(Pool):
 
         # Browse the metaop to update kernel attributes
         PadNode = get_node_from_metaop(node, "Pad2D")
-        self.attributes["padding"] = PadNode.get_operator().attr.begin_end_borders
+        self.attributes["padding"] = PadNode[0].get_operator().attr.begin_end_borders
 
 
 class PoolAct(Pool):
@@ -46,11 +46,10 @@ class PoolAct(Pool):
         super().__init__(node, mem_info)
 
         # Browse the metaop to update kernel attributes
-        if get_node_from_metaop(node, "ReLU") is not None:
+        if get_node_from_metaop(node, "ReLU"):
             self.attributes["activation"] = "Rectifier"
-        elif get_node_from_metaop(node, "LeakyReLU") is not None:
-            aidge_core.Log.fatal(f"{node.type()} activation is not yet supported.")
-            # TODO : Should not be checked manually for each activation    
+        else:
+            aidge_core.Log.error(f"{node.type()} activation is not yet supported.")  
 
 
 @ExportLibCpp.register("MaxPooling2D", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
@@ -61,8 +60,8 @@ class MaxPool(Pool):
         # Browse the metaop to update kernel attributes
         PoolNode = get_node_from_metaop(node, "MaxPooling2D")
         self.attributes["pool_type"] = "Max"
-        self.attributes["kernel_dims"] = PoolNode.get_operator().attr.kernel_dims
-        self.attributes["stride_dims"] = PoolNode.get_operator().attr.stride_dims
+        self.attributes["kernel_dims"] = PoolNode[0].get_operator().attr.kernel_dims
+        self.attributes["stride_dims"] = PoolNode[0].get_operator().attr.stride_dims
 
 
 @ExportLibCpp.register_metaop("PadMaxPool", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
@@ -91,8 +90,8 @@ class AvgPool(Pool):
         # Browse the metaop to update kernel attributes
         PoolNode = get_node_from_metaop(node, "AvgPooling2D")
         self.attributes["pool_type"] = "Average"
-        self.attributes["kernel_dims"] = PoolNode.get_operator().attr.kernel_dims
-        self.attributes["stride_dims"] = PoolNode.get_operator().attr.stride_dims
+        self.attributes["kernel_dims"] = PoolNode[0].get_operator().attr.kernel_dims
+        self.attributes["stride_dims"] = PoolNode[0].get_operator().attr.stride_dims
 
 
 @ExportLibCpp.register_metaop("PadAvgPool", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
diff --git a/aidge_export_cpp/operators/Quantizer.py b/aidge_export_cpp/operators/Quantizer.py
index a33bc14..51f5c23 100644
--- a/aidge_export_cpp/operators/Quantizer.py
+++ b/aidge_export_cpp/operators/Quantizer.py
@@ -3,7 +3,7 @@ from aidge_core.export_utils import ExportNodeCpp, get_node_from_metaop
 from aidge_export_cpp import ROOT, ExportLibCpp, set_scaling_attributes
 
 @ExportLibCpp.register_metaop("Quantizer", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
-class CppRescaling(ExportNodeCpp):
+class Quantizer(ExportNodeCpp):
     def __init__(self, node, mem_info):
         super().__init__(node, mem_info)
 
@@ -15,11 +15,8 @@ class CppRescaling(ExportNodeCpp):
         self.attributes["aidge_cmp"] = node.attributes().has_attr("aidge_cmp")
 
         # Browse the metaop to update kernel attributes
-        if get_node_from_metaop(node, "ReLU") is not None:
+        if get_node_from_metaop(node, "ReLU"):
             self.attributes["activation"] = "Rectifier"
-        elif get_node_from_metaop(node, "LeakyReLU") is not None:
-            aidge_core.Log.fatal(f"{node.type()} activation is not yet supported.")
-            # TODO : Should not be checked manually for each activation     
         
         # Set scaling attributes
         set_scaling_attributes(self, node)
@@ -45,4 +42,9 @@ class CppRescaling(ExportNodeCpp):
         # Include aidge outputs within the fwd file
         if self.attributes["aidge_cmp"]:
             self.include_list.append("network/utils.hpp")   # aidge_cmp function
-            self.include_list.append("data/aidge_outputs/" + node.name() + ".hpp")
\ No newline at end of file
+            self.include_list.append("data/aidge_outputs/" + node.name() + ".hpp")
+
+@ExportLibCpp.register_metaop("QMul", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
+class QMul(Quantizer):
+    def __init__(self, node, mem_info):
+        super().__init__(node, mem_info)
-- 
GitLab