diff --git a/aidge_core/aidge_export_aidge/operator_export/add.py b/aidge_core/aidge_export_aidge/operator_export/add.py
new file mode 100644
index 0000000000000000000000000000000000000000..4eb7c3e37f0d63388a5bfe8600f184b9da2ffc49
--- /dev/null
+++ b/aidge_core/aidge_export_aidge/operator_export/add.py
@@ -0,0 +1,14 @@
+from aidge_core.aidge_export_aidge.registry import ExportSerialize
+from aidge_core.aidge_export_aidge import ROOT_EXPORT
+from aidge_core.export_utils import ExportNodeCpp
+from aidge_core import ImplSpec, IOSpec, dtype
+
+@ExportSerialize.register("Add", ImplSpec(IOSpec(dtype.any)))
+class Add(ExportNodeCpp):
+    def __init__(self, node, mem_info):
+        super().__init__(node, mem_info)
+        self.config_template = ""
+        self.forward_template = str(
+            ROOT_EXPORT / "templates/graph_ctor/add.jinja")
+        self.include_list = ["aidge/operator/Add.hpp"]
+        self.kernels_to_copy = []
diff --git a/aidge_core/aidge_export_aidge/templates/graph_ctor/add.jinja b/aidge_core/aidge_export_aidge/templates/graph_ctor/add.jinja
new file mode 100644
index 0000000000000000000000000000000000000000..2bfaf93646fc24f6a44ac170a8c2c932f5daf0fc
--- /dev/null
+++ b/aidge_core/aidge_export_aidge/templates/graph_ctor/add.jinja
@@ -0,0 +1,9 @@
+{% filter indent(width=4, first=False) %}
+/*** {{name|upper}} ***/
+std::shared_ptr<Aidge::Node> {{name}} =
+        Aidge::Add(
+            "{{name}}"
+        );
+{% include "./_set_input.jinja" %}
+graph->add({{name}});
+{% endfilter %}
diff --git a/aidge_core/unit_tests/test_recipes.py b/aidge_core/unit_tests/test_recipes.py
index c8dd4c727fbaf8224e8d04111a5054caeb5e5c99..f4dd0220ecdc5950e1b1dcef0d8bf2d4782216bf 100644
--- a/aidge_core/unit_tests/test_recipes.py
+++ b/aidge_core/unit_tests/test_recipes.py
@@ -46,9 +46,9 @@ class test_recipes(unittest.TestCase):
 
     def test_fuse_matmul_add(self):
         matmul0 = aidge_core.MatMul(name="MatMul0")
-        add0 = aidge_core.Add(2, name="Add0")
+        add0 = aidge_core.Add(name="Add0")
         matmul1 = aidge_core.MatMul(name="MatMul1")
-        add1 = aidge_core.Add(2, name="Add1")
+        add1 = aidge_core.Add(name="Add1")
         w0 = aidge_core.Producer([1, 1], name="W0")
         w0.add_child(matmul0, 0, 0)
         b0 = aidge_core.Producer([1], name="B0")
diff --git a/aidge_core/unit_tests/test_topological_order.py b/aidge_core/unit_tests/test_topological_order.py
index 8e7f2e2d9b9770c2fae1e5c2812ba33113589134..01a69409e86c486ec2fb8c8bdb2a18ab0e3d9c1c 100644
--- a/aidge_core/unit_tests/test_topological_order.py
+++ b/aidge_core/unit_tests/test_topological_order.py
@@ -29,7 +29,7 @@ class test_topological_order(unittest.TestCase):
         loop0.get_operator().set_back_edges({1})
         assert not loop0.get_operator().is_back_edge(0)
         assert loop0.get_operator().is_back_edge(1)
-        add0 = aidge_core.Add(2, "add0")
+        add0 = aidge_core.Add("add0")
 
         loop0.add_child(add0, 0, 1)
         add0.add_child(loop0, 0, 1)
@@ -50,7 +50,7 @@ class test_topological_order(unittest.TestCase):
         loop0.get_operator().set_back_edges({0})
         assert not loop0.get_operator().is_back_edge(1)
         assert loop0.get_operator().is_back_edge(0)
-        add0 = aidge_core.Add(2, "add0")
+        add0 = aidge_core.Add("add0")
 
         loop0.add_child(add0, 0, 1)
         add0.add_child(loop0, 0, 0)
diff --git a/include/aidge/operator/Add.hpp b/include/aidge/operator/Add.hpp
index f96996079b9e89f80c78b8e409830369480705a8..827fc0c2732695364aa2393692d7040b8b1a0e9f 100644
--- a/include/aidge/operator/Add.hpp
+++ b/include/aidge/operator/Add.hpp
@@ -29,7 +29,7 @@ class Add_Op : public OperatorTensor,
 public:
     static const std::string Type;
 
-    Add_Op(const IOIndex_t nbIn);
+    Add_Op();
 
     /**
      * @brief Copy-constructor. Copy the operator attributes and its output tensor(s), but not its input tensors (the new operator has no input associated).
@@ -66,7 +66,7 @@ public:
     }
 };
 
-std::shared_ptr<Node> Add(const IOIndex_t nbIn, const std::string& name = "");
+std::shared_ptr<Node> Add(const std::string& name = "");
 }
 
 #endif /* AIDGE_CORE_OPERATOR_ADD_H_ */
diff --git a/python_binding/operator/pybind_Add.cpp b/python_binding/operator/pybind_Add.cpp
index 8a00a1cb4a419f1125411b5b1c823bf91570d62e..f8adfd5f4becb7677b3a59791f8549bb114fbbc4 100644
--- a/python_binding/operator/pybind_Add.cpp
+++ b/python_binding/operator/pybind_Add.cpp
@@ -22,14 +22,14 @@ namespace Aidge {
 
 void declare_Add(py::module &m) {
   py::class_<Add_Op, std::shared_ptr<Add_Op>, OperatorTensor>(m, "AddOp", py::multiple_inheritance())
-    .def(py::init<const IOIndex_t>(), py::arg("nb_inputs"))
+    .def(py::init<>())
     .def_static("get_inputs_name", &Add_Op::getInputsName)
     .def_static("get_outputs_name", &Add_Op::getOutputsName)
     .def_readonly_static("Type", &Add_Op::Type);
 
   declare_registrable<Add_Op>(m, "AddOp");
 
-  m.def("Add", &Add, py::arg("nb_inputs"), py::arg("name") = "");
+  m.def("Add", &Add, py::arg("name") = "");
 }
 
 void init_Add(py::module &m) {
diff --git a/src/data/Tensor.cpp b/src/data/Tensor.cpp
index abfc91c6cdf9fd4f6eb46100074b22083514d82e..6f60d2f15ce0e561c32d7bc5a7561c2f8d507588 100644
--- a/src/data/Tensor.cpp
+++ b/src/data/Tensor.cpp
@@ -35,7 +35,7 @@ Aidge::Tensor Aidge::Tensor::operator+(const Aidge::Tensor& other) const {
     AIDGE_ASSERT(mImpl->backend() == other.mImpl->backend(), "Tensors must have the same backend");
     AIDGE_ASSERT(dataType() == other.dataType(), "Tensors must have the same data type");
     AIDGE_ASSERT(dataFormat() == other.dataFormat(), "Tensors must have the same data format");
-    auto add_ = Add_Op(2);
+    auto add_ = Add_Op();
     add_.associateInput(0, std::make_shared<Tensor>(*this));
     add_.associateInput(1, std::make_shared<Tensor>(other));
     add_.setDataType(dataType());
diff --git a/src/operator/Add.cpp b/src/operator/Add.cpp
index 033c476c8a9e865fdf9d5670e295c3e4fb6101b3..f6fd0cd9fc647e29402d36f1f6838642e099ae6c 100644
--- a/src/operator/Add.cpp
+++ b/src/operator/Add.cpp
@@ -22,12 +22,10 @@
 
 const std::string Aidge::Add_Op::Type = "Add";
 
-Aidge::Add_Op::Add_Op(const IOIndex_t nbIn)
-    : OperatorTensor(Type, std::vector<InputCategory>(nbIn, InputCategory::Data), 1)
+Aidge::Add_Op::Add_Op()
+    : OperatorTensor(Type, {InputCategory::Data, InputCategory::Data}, 1)
 {
-    if (nbIn == 0) {
-        AIDGE_THROW_OR_ABORT(std::runtime_error, "Add operator should have at least one input.");
-    }
+    // ctor
 }
 
 Aidge::Add_Op::Add_Op(const Add_Op& op)
@@ -89,6 +87,8 @@ std::set<std::string> Aidge::Add_Op::getAvailableBackends() const {
     return Registrar<Add_Op>::getKeys();
 }
 
-std::shared_ptr<Aidge::Node> Aidge::Add(const IOIndex_t nbIn, const std::string& name) {
-    return std::make_shared<Node>(std::make_shared<Add_Op>(nbIn), name);
+////////////////////////////////////////////////////////////////////////////////
+
+std::shared_ptr<Aidge::Node> Aidge::Add(const std::string& name) {
+    return std::make_shared<Node>(std::make_shared<Add_Op>(), name);
 }
\ No newline at end of file
diff --git a/src/operator/MetaOperatorDefs/LSTM.cpp b/src/operator/MetaOperatorDefs/LSTM.cpp
index 9620f040472aed984afb99018cde5476ec5f60d3..2ed548805010a6cc87950c4d1f7b89edbea4f75c 100644
--- a/src/operator/MetaOperatorDefs/LSTM.cpp
+++ b/src/operator/MetaOperatorDefs/LSTM.cpp
@@ -35,14 +35,14 @@ std::shared_ptr<Node> LSTM(const DimSize_t inChannel,
     auto input = Identity((!name.empty()) ? name + "_input" : "");
     auto hiddenState = Memorize(seqLength, (!name.empty()) ? name + "_hidden_state" : "");
     auto cellState = Memorize(seqLength, (!name.empty()) ? name + "_cell_state" : "");
-    auto add = Add(2, (!name.empty()) ? name + "_add" : "");
+    auto add = Add((!name.empty()) ? name + "_add" : "");
 
     // Forget gate
     auto forgetGateX = std::make_shared<Node>(std::make_shared<FC_Op>(), (!name.empty()) ? name + "_forgetGateX" : "");
     input->addChild(forgetGateX, 0, 0);
     auto forgetGateH = std::make_shared<Node>(std::make_shared<FC_Op>(), (!name.empty()) ? name + "_forgetGateH" : "");
     hiddenState->addChild(forgetGateH, 1, 0);
-    auto forgetGate = Add(2, (!name.empty()) ? name + "_forgetGate" : "");
+    auto forgetGate = Add((!name.empty()) ? name + "_forgetGate" : "");
     forgetGateX->addChild(forgetGate, 0, 0);
     forgetGateH->addChild(forgetGate, 0, 1);
     auto forgetGateAct = Sigmoid((!name.empty()) ? name + "_forgetGateAct" : "");
@@ -57,7 +57,7 @@ std::shared_ptr<Node> LSTM(const DimSize_t inChannel,
     input->addChild(inputGateX, 0, 0);
     auto inputGateH = std::make_shared<Node>(std::make_shared<FC_Op>(), (!name.empty()) ? name + "_inputGateH" : "");
     hiddenState->addChild(inputGateH, 1, 0);
-    auto inputGate = Add(2, (!name.empty()) ? name + "_inputGate" : "");
+    auto inputGate = Add((!name.empty()) ? name + "_inputGate" : "");
     inputGateX->addChild(inputGate, 0, 0);
     inputGateH->addChild(inputGate, 0, 1);
     auto inputGateAct = Sigmoid((!name.empty()) ? name + "_inputGateAct" : "");
@@ -71,7 +71,7 @@ std::shared_ptr<Node> LSTM(const DimSize_t inChannel,
     input->addChild(cellCandidateX, 0, 0);
     auto cellCandidateH = std::make_shared<Node>(std::make_shared<FC_Op>(), (!name.empty()) ? name + "_cellCandidateH" : "");
     hiddenState->addChild(cellCandidateH, 1, 0);
-    auto cellCandidate = Add(2, (!name.empty()) ? name + "_cellCandidate" : "");
+    auto cellCandidate = Add((!name.empty()) ? name + "_cellCandidate" : "");
     cellCandidateX->addChild(cellCandidate, 0, 0);
     cellCandidateH->addChild(cellCandidate, 0, 1);
     auto cellCandidateAct = Tanh((!name.empty()) ? name + "_cellCandidateAct" : "");
@@ -83,7 +83,7 @@ std::shared_ptr<Node> LSTM(const DimSize_t inChannel,
     input->addChild(outputGateX, 0, 0);
     auto outputGateH = std::make_shared<Node>(std::make_shared<FC_Op>(), (!name.empty()) ? name + "_outputGateH" : "");
     hiddenState->addChild(outputGateH, 1, 0);
-    auto outputGate = Add(2, (!name.empty()) ? name + "_outputGate" : "");
+    auto outputGate = Add((!name.empty()) ? name + "_outputGate" : "");
     outputGateX->addChild(outputGate, 0, 0);
     outputGateH->addChild(outputGate, 0, 1);
     auto outputGateAct = Sigmoid((!name.empty()) ? name + "_outputGateAct" : "");
@@ -143,14 +143,14 @@ std::shared_ptr<MetaOperator_Op> LSTM_Op(const DimSize_t seqLength)
     auto input = Identity("");
     auto hiddenState = Memorize(seqLength, "");
     auto cellState = Memorize(seqLength, "");
-    auto add = Add(2, "");
+    auto add = Add("");
 
     // Forget gate
     auto forgetGateX = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
     input->addChild(forgetGateX, 0, 0);
     auto forgetGateH = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
     hiddenState->addChild(forgetGateH, 1, 0);
-    auto forgetGate = Add(2, "");
+    auto forgetGate = Add("");
     forgetGateX->addChild(forgetGate, 0, 0);
     forgetGateH->addChild(forgetGate, 0, 1);
     auto forgetGateAct = Sigmoid("");
@@ -165,7 +165,7 @@ std::shared_ptr<MetaOperator_Op> LSTM_Op(const DimSize_t seqLength)
     input->addChild(inputGateX, 0, 0);
     auto inputGateH = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
     hiddenState->addChild(inputGateH, 1, 0);
-    auto inputGate = Add(2, "");
+    auto inputGate = Add("");
     inputGateX->addChild(inputGate, 0, 0);
     inputGateH->addChild(inputGate, 0, 1);
     auto inputGateAct = Sigmoid("");
@@ -179,7 +179,7 @@ std::shared_ptr<MetaOperator_Op> LSTM_Op(const DimSize_t seqLength)
     input->addChild(cellCandidateX, 0, 0);
     auto cellCandidateH = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
     hiddenState->addChild(cellCandidateH, 1, 0);
-    auto cellCandidate = Add(2, "");
+    auto cellCandidate = Add("");
     cellCandidateX->addChild(cellCandidate, 0, 0);
     cellCandidateH->addChild(cellCandidate, 0, 1);
     auto cellCandidateAct = Tanh("");
@@ -191,7 +191,7 @@ std::shared_ptr<MetaOperator_Op> LSTM_Op(const DimSize_t seqLength)
     input->addChild(outputGateX, 0, 0);
     auto outputGateH = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
     hiddenState->addChild(outputGateH, 1, 0);
-    auto outputGate = Add(2,"");
+    auto outputGate = Add("");
     outputGateX->addChild(outputGate, 0, 0);
     outputGateH->addChild(outputGate, 0, 1);
     auto outputGateAct = Sigmoid("");
diff --git a/src/recipes/ConvToMatMul.cpp b/src/recipes/ConvToMatMul.cpp
index 31462861e1bbe29cb467ad719576ec86c2d46f7f..70be33932295aab49653bdc2853f4411ded919b4 100644
--- a/src/recipes/ConvToMatMul.cpp
+++ b/src/recipes/ConvToMatMul.cpp
@@ -75,7 +75,7 @@ size_t Aidge::convToMatMul(std::shared_ptr<GraphView> graphView) {
 
         // Handle bias
         if (convOp->getInput(2) && !convOp->getInput(2)->empty()) {
-            auto add = Add(2, (!convNode->name().empty()) ? convNode->name() + "_add" : "");
+            auto add = Add((!convNode->name().empty()) ? convNode->name() + "_add" : "");
             auto bReshapeProd = Producer(std::make_shared<Tensor>(Vector<int64_t>{{1, static_cast<int64_t>(convOp->getInput(2)->size()), 1, 1}}),
                 (!convNode->name().empty()) ? convNode->name() + "_b_reshape_shape_prod" : "",
                 true);
diff --git a/unit_tests/graph/Test_GraphView.cpp b/unit_tests/graph/Test_GraphView.cpp
index a08808ee5e6c2657a76213dcff80cec53b23e7ee..2fa06cf23b3b681211208a3e5bbea9226f0930b8 100644
--- a/unit_tests/graph/Test_GraphView.cpp
+++ b/unit_tests/graph/Test_GraphView.cpp
@@ -447,10 +447,10 @@ TEST_CASE("[core/graph] GraphView(getOrderedNodes)", "[GraphView][getOrderedNode
     auto data1 = Producer({2}, "data1");
     auto data2 = Producer({2}, "data2");
     auto data3 = Producer({2}, "data3");
-    auto add1 = Add(2, "add1");
-    auto add2 = Add(2, "add2");
+    auto add1 = Add("add1");
+    auto add2 = Add("add2");
     auto split1 = Split(2, 0, {1, 1}, "split1");
-    auto add3 = Add(3, "add3");
+    auto add3 = Add("add3");
     auto g = std::make_shared<GraphView>("TestGraph");
     data1->addChild(add1);
     data2->addChild(add1);
@@ -508,9 +508,9 @@ TEST_CASE("[core/graph] GraphView(getOrderedNodes)", "[GraphView][getOrderedNode
 TEST_CASE("[core/graph] GraphView(getOrderedNodes) cyclic", "[GraphView][getOrderedNodes]") {
     auto data1 = Producer({2}, "data1");
     auto data2 = Producer({2}, "data2");
-    auto add1 = Add(2, "add1");
+    auto add1 = Add("add1");
     auto mem1 = Memorize(1, "mem1");
-    auto add2 = Add(2, "add2");
+    auto add2 = Add("add2");
     auto g = std::make_shared<GraphView>("TestGraph");
     data1->addChild(add1);
     data2->addChild(add1);
diff --git a/unit_tests/graph/Test_Matching.cpp b/unit_tests/graph/Test_Matching.cpp
index d63b1e754a254e7ba69089ba465eb0226922f352..d6d98d4701cba900548d127879c9b3940cf1d739 100644
--- a/unit_tests/graph/Test_Matching.cpp
+++ b/unit_tests/graph/Test_Matching.cpp
@@ -51,10 +51,10 @@ TEST_CASE("[core/graph] Matching") {
         PaddedConv(8, 16, {3, 3}, "conv3", {1, 1}, {2, 2, 2, 2}),
         ReLU("relu3"),
         PaddedConv(8, 16, {5, 5}, "conv4", {1, 1}, {2, 2, 2, 2}),
-        Add(2, "add"),
+        Add("add"),
         PaddedConv(8, 16, {5, 5}, "conv5", {1, 1}, {2, 2, 2, 2}),
         ReLU("relu5"),
-        Add(2, "add2")
+        Add("add2")
     });
 
     g1->getNode("relu3")->addChild(g1->getNode("add"), 0, 1);
diff --git a/unit_tests/operator/Test_Operator.cpp b/unit_tests/operator/Test_Operator.cpp
index a050bbc4021b0c70a0d8faf6478eb2bd13ebdb58..6bd12c51ef367ad1cf1859afc56af8a21a706237 100644
--- a/unit_tests/operator/Test_Operator.cpp
+++ b/unit_tests/operator/Test_Operator.cpp
@@ -26,7 +26,7 @@ namespace Aidge {
 // TEST_CASE("[core/operator] Operator(computeReceptiveField)", "[Operator][computeReceptiveFiled]") {
 //     auto dataProvider1 = Producer({16, 3, 224, 224}, "dataProvider1");
 //     auto dataProvider2 = Producer({16, 3, 224, 224}, "dataProvider2");
-//     auto gen1 = Add(2);
+//     auto gen1 = Add();
 //     auto gen2 = ReLU();
 
 //     auto g = std::make_shared<GraphView>("TestGraph");
diff --git a/unit_tests/recipes/Test_MatMulToFC.cpp b/unit_tests/recipes/Test_MatMulToFC.cpp
index 2adf882ca69e0d5ca5f050d1b89cfb09d81b536b..28eae0be17297467a29eab4e868e074c336d4a12 100644
--- a/unit_tests/recipes/Test_MatMulToFC.cpp
+++ b/unit_tests/recipes/Test_MatMulToFC.cpp
@@ -27,9 +27,9 @@ TEST_CASE("[cpu/recipes] MatMulToFC", "[MatMulToFC][recipes]") {
     SECTION("with Add") {
         // generate the original GraphView
         auto matmul0 = MatMul("matmul0");
-        auto add0 = Add(2, "add0");
+        auto add0 = Add("add0");
         auto matmul1 = MatMul("matmul1");
-        auto add1 = Add(2, "add1");
+        auto add1 = Add("add1");
 
         auto b0 = Producer({5}, "B0");
         auto w0 = Producer({5, 5}, "W0");
@@ -76,7 +76,7 @@ TEST_CASE("[cpu/recipes] MatMulToFC", "[MatMulToFC][recipes]") {
         // generate the original GraphView
         auto matmul0 = MatMul("matmul0");
         auto matmul1 = MatMul("matmul1");
-        auto add1 = Add(2, "add1");
+        auto add1 = Add("add1");
 
         auto w0 = Producer({5, 5}, "W0");
         auto b1 = Producer({5}, "B1");