diff --git a/aidge_export_cpp/operators/Conv.py b/aidge_export_cpp/operators/Conv.py
index e1d17bc637a98bb237d69fabe901542a0871dc2c..c8137c51377c103855b9c2d133707124ebef64c5 100644
--- a/aidge_export_cpp/operators/Conv.py
+++ b/aidge_export_cpp/operators/Conv.py
@@ -18,9 +18,9 @@ class Conv(ExportNodeCpp):
 
         # Browse the metaop to update kernel attributes
         ConvNode = get_node_from_metaop(node, "Conv2D") 
-        self.attributes["kernel_dims"] = ConvNode.get_operator().attr.kernel_dims
-        self.attributes["stride_dims"] = ConvNode.get_operator().attr.stride_dims
-        self.attributes["dilation_dims"] = ConvNode.get_operator().attr.dilation_dims
+        self.attributes["kernel_dims"] = ConvNode[0].get_operator().attr.kernel_dims
+        self.attributes["stride_dims"] = ConvNode[0].get_operator().attr.stride_dims
+        self.attributes["dilation_dims"] = ConvNode[0].get_operator().attr.dilation_dims
 
         # Template for layer configutation file generation
         self.config_template = str(ROOT / "templates" / "configuration" / "convolution_config.jinja")
@@ -61,7 +61,7 @@ class PadConv(QConv):
 
         # Browse the metaop to update kernel attributes
         PadNode = get_node_from_metaop(node, "Pad2D")
-        self.attributes["padding"] = PadNode.get_operator().attr.begin_end_borders
+        self.attributes["padding"] = PadNode[0].get_operator().attr.begin_end_borders
 
 
 @ExportLibCpp.register_metaop("ConvAct", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
@@ -70,11 +70,10 @@ class ConvAct(QConv):
         super().__init__(node, mem_info)
 
         # Browse the metaop to update kernel attributes
-        if get_node_from_metaop(node, "ReLU") is not None:
+        if get_node_from_metaop(node, "ReLU"):
             self.attributes["activation"] = "Rectifier"
-        elif get_node_from_metaop(node, "LeakyReLU") is not None:
-            aidge_core.Log.fatal(f"{node.type()} activation is not yet supported.")
-            # TODO : Should not be checked manually for each activation     
+        else:
+            aidge_core.Log.error(f"{node.type()} activation is not yet supported.")
 
 @ExportLibCpp.register_metaop("PadConvAct", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
 class PadConvAct(PadConv, ConvAct):
diff --git a/aidge_export_cpp/operators/ElemWise.py b/aidge_export_cpp/operators/ElemWise.py
index 460350948961d7435269c6a9c5f1046ae11b98cd..7d073ca9549aa0fb67a2c63562536a7c6808e6cd 100644
--- a/aidge_export_cpp/operators/ElemWise.py
+++ b/aidge_export_cpp/operators/ElemWise.py
@@ -66,11 +66,10 @@ class AddAct(QAdd):
         super().__init__(node, mem_info)
 
         # Browse the metaop to update kernel attributes
-        if get_node_from_metaop(node, "ReLU") is not None:
+        if get_node_from_metaop(node, "ReLU"):
             self.attributes["activation"] = "Rectifier"
-        elif get_node_from_metaop(node, "LeakyReLU") is not None:
-            aidge_core.Log.fatal(f"{node.type()} activation is not yet supported.")
-            # TODO : Should not be checked manually for each activation    
+        else:
+            aidge_core.Log.error(f"{node.type()} activation is not yet supported.") 
 
 
 @ExportLibCpp.register("Sub", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
@@ -92,34 +91,26 @@ class SubAct(QSub):
         super().__init__(node, mem_info)
 
         # Browse the metaop to update kernel attributes
-        if get_node_from_metaop(node, "ReLU") is not None:
+        if get_node_from_metaop(node, "ReLU"):
             self.attributes["activation"] = "Rectifier"
-        elif get_node_from_metaop(node, "LeakyReLU") is not None:
-            aidge_core.Log.fatal(f"{node.type()} activation is not yet supported.")
-            # TODO : Should not be checked manually for each activation    
+        else:
+            aidge_core.Log.error(f"{node.type()} activation is not yet supported.") 
 
 
 @ExportLibCpp.register("Mul", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
-class Mul(ElemWise):
+class Mul(QElemWise):
     def __init__(self, node, mem_info):
         super().__init__(node, mem_info)
         self.attributes["elemwise_op"] = "Mul"
 
 
-@ExportLibCpp.register_metaop("QMul", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
-class QMul(QElemWise, Mul):
-    def __init__(self, node, mem_info):
-        super().__init__(node, mem_info)
-
-
 @ExportLibCpp.register_metaop("MulAct", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
-class MulAct(QMul):
+class MulAct(Mul):
     def __init__(self, node, mem_info):
         super().__init__(node, mem_info)
 
         # Browse the metaop to update kernel attributes
-        if get_node_from_metaop(node, "ReLU") is not None:
+        if get_node_from_metaop(node, "ReLU"):
             self.attributes["activation"] = "Rectifier"
-        elif get_node_from_metaop(node, "LeakyReLU") is not None:
-            aidge_core.Log.fatal(f"{node.type()} activation is not yet supported.")
-            # TODO : Should not be checked manually for each activation    
\ No newline at end of file
+        else:
+            aidge_core.Log.error(f"{node.type()} activation is not yet supported.")
\ No newline at end of file
diff --git a/aidge_export_cpp/operators/Fc.py b/aidge_export_cpp/operators/Fc.py
index b83754be36bc8f79dc981cbeb41bb9a86aa4e50a..d32d20e2fad90f8418ee58067f1cd6e6c7e72065 100644
--- a/aidge_export_cpp/operators/Fc.py
+++ b/aidge_export_cpp/operators/Fc.py
@@ -1,5 +1,5 @@
 import aidge_core
-from aidge_core.export_utils import ExportNodeCpp
+from aidge_core.export_utils import ExportNodeCpp, get_node_from_metaop
 from aidge_export_cpp import ROOT, ExportLibCpp, set_scaling_attributes
 
 @ExportLibCpp.register("FC", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
@@ -53,9 +53,7 @@ class FCAct(QFC):
         super().__init__(node, mem_info)
 
         # Browse the metaop to update kernel attributes
-        for n in node.get_operator().get_micro_graph().get_nodes():
-            if n.type() == "ReLU":
-                self.attributes["activation"] = "Rectifier"
-            elif n.type() == "LeakyReLU":
-                aidge_core.Log.fatal(f"{n.type()} activation is not yet supported.")
-                # TODO : Should not be checked manually for each activation
+        if get_node_from_metaop(node, "ReLU"):
+            self.attributes["activation"] = "Rectifier"
+        else:
+            aidge_core.Log.error(f"{node.type()} activation is not yet supported.")
diff --git a/aidge_export_cpp/operators/Pool.py b/aidge_export_cpp/operators/Pool.py
index bf7e7eb81d3e86227db545acc20d9c894218b8da..10d595e5ed4a76c22bcc15f90d8c693b8dbf2144 100644
--- a/aidge_export_cpp/operators/Pool.py
+++ b/aidge_export_cpp/operators/Pool.py
@@ -38,7 +38,7 @@ class PadPool(Pool):
 
         # Browse the metaop to update kernel attributes
         PadNode = get_node_from_metaop(node, "Pad2D")
-        self.attributes["padding"] = PadNode.get_operator().attr.begin_end_borders
+        self.attributes["padding"] = PadNode[0].get_operator().attr.begin_end_borders
 
 
 class PoolAct(Pool):
@@ -46,11 +46,10 @@ class PoolAct(Pool):
         super().__init__(node, mem_info)
 
         # Browse the metaop to update kernel attributes
-        if get_node_from_metaop(node, "ReLU") is not None:
+        if get_node_from_metaop(node, "ReLU"):
             self.attributes["activation"] = "Rectifier"
-        elif get_node_from_metaop(node, "LeakyReLU") is not None:
-            aidge_core.Log.fatal(f"{node.type()} activation is not yet supported.")
-            # TODO : Should not be checked manually for each activation    
+        else:
+            aidge_core.Log.error(f"{node.type()} activation is not yet supported.")  
 
 
 @ExportLibCpp.register("MaxPooling2D", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
@@ -61,8 +60,8 @@ class MaxPool(Pool):
         # Browse the metaop to update kernel attributes
         PoolNode = get_node_from_metaop(node, "MaxPooling2D")
         self.attributes["pool_type"] = "Max"
-        self.attributes["kernel_dims"] = PoolNode.get_operator().attr.kernel_dims
-        self.attributes["stride_dims"] = PoolNode.get_operator().attr.stride_dims
+        self.attributes["kernel_dims"] = PoolNode[0].get_operator().attr.kernel_dims
+        self.attributes["stride_dims"] = PoolNode[0].get_operator().attr.stride_dims
 
 
 @ExportLibCpp.register_metaop("PadMaxPool", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
@@ -91,8 +90,8 @@ class AvgPool(Pool):
         # Browse the metaop to update kernel attributes
         PoolNode = get_node_from_metaop(node, "AvgPooling2D")
         self.attributes["pool_type"] = "Average"
-        self.attributes["kernel_dims"] = PoolNode.get_operator().attr.kernel_dims
-        self.attributes["stride_dims"] = PoolNode.get_operator().attr.stride_dims
+        self.attributes["kernel_dims"] = PoolNode[0].get_operator().attr.kernel_dims
+        self.attributes["stride_dims"] = PoolNode[0].get_operator().attr.stride_dims
 
 
 @ExportLibCpp.register_metaop("PadAvgPool", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
diff --git a/aidge_export_cpp/operators/Quantizer.py b/aidge_export_cpp/operators/Quantizer.py
index a33bc147ff5677700ccf96413b5e84edffaf41fe..51f5c23da24e7c6a47c162314f54a15c8845fc00 100644
--- a/aidge_export_cpp/operators/Quantizer.py
+++ b/aidge_export_cpp/operators/Quantizer.py
@@ -3,7 +3,7 @@ from aidge_core.export_utils import ExportNodeCpp, get_node_from_metaop
 from aidge_export_cpp import ROOT, ExportLibCpp, set_scaling_attributes
 
 @ExportLibCpp.register_metaop("Quantizer", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
-class CppRescaling(ExportNodeCpp):
+class Quantizer(ExportNodeCpp):
     def __init__(self, node, mem_info):
         super().__init__(node, mem_info)
 
@@ -15,11 +15,8 @@ class CppRescaling(ExportNodeCpp):
         self.attributes["aidge_cmp"] = node.attributes().has_attr("aidge_cmp")
 
         # Browse the metaop to update kernel attributes
-        if get_node_from_metaop(node, "ReLU") is not None:
+        if get_node_from_metaop(node, "ReLU"):
             self.attributes["activation"] = "Rectifier"
-        elif get_node_from_metaop(node, "LeakyReLU") is not None:
-            aidge_core.Log.fatal(f"{node.type()} activation is not yet supported.")
-            # TODO : Should not be checked manually for each activation     
         
         # Set scaling attributes
         set_scaling_attributes(self, node)
@@ -45,4 +42,9 @@ class CppRescaling(ExportNodeCpp):
         # Include aidge outputs within the fwd file
         if self.attributes["aidge_cmp"]:
             self.include_list.append("network/utils.hpp")   # aidge_cmp function
-            self.include_list.append("data/aidge_outputs/" + node.name() + ".hpp")
\ No newline at end of file
+            self.include_list.append("data/aidge_outputs/" + node.name() + ".hpp")
+
+@ExportLibCpp.register_metaop("QMul", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
+class QMul(Quantizer):
+    def __init__(self, node, mem_info):
+        super().__init__(node, mem_info)