From 3b3f7548be30cee23711123da00ce6f6d966ced6 Mon Sep 17 00:00:00 2001
From: Jerome Hue <jerome.hue@cea.fr>
Date: Wed, 27 Nov 2024 15:46:28 +0100
Subject: [PATCH] feat: Add basic leaky operator declaration

---
 include/aidge/operator/MetaOperatorDefs.hpp |   3 +
 src/operator/MetaOperatorDefs/LSTM.cpp      |   2 -
 src/operator/MetaOperatorDefs/Leaky.cpp     | 124 ++++++++++++++++++++
 unit_tests/operator/Test_MetaOperator.cpp   |  14 +++
 4 files changed, 141 insertions(+), 2 deletions(-)
 create mode 100644 src/operator/MetaOperatorDefs/Leaky.cpp

diff --git a/include/aidge/operator/MetaOperatorDefs.hpp b/include/aidge/operator/MetaOperatorDefs.hpp
index 481a7795e..02237d1d5 100644
--- a/include/aidge/operator/MetaOperatorDefs.hpp
+++ b/include/aidge/operator/MetaOperatorDefs.hpp
@@ -165,6 +165,9 @@ std::shared_ptr<Node> LSTM(DimSize_t in_channels,
 
 std::shared_ptr<MetaOperator_Op> LSTM_Op(DimSize_t seq_length);
 
+std::shared_ptr<MetaOperator_Op> LeakyOp();
+std::shared_ptr<Node> Leaky(const std::string& name = "");
+
 }  // namespace Aidge
 
 #endif /* AIDGE_CORE_OPERATOR_METAOPERATORDEFS_H_ */
diff --git a/src/operator/MetaOperatorDefs/LSTM.cpp b/src/operator/MetaOperatorDefs/LSTM.cpp
index 2ed548805..22c0469b3 100644
--- a/src/operator/MetaOperatorDefs/LSTM.cpp
+++ b/src/operator/MetaOperatorDefs/LSTM.cpp
@@ -11,7 +11,6 @@
 
 #include "aidge/operator/MetaOperatorDefs.hpp"
 
-#include <array>
 #include <memory>
 #include <string>
 
@@ -20,7 +19,6 @@
 #include "aidge/operator/Mul.hpp"
 #include "aidge/operator/FC.hpp"
 #include "aidge/operator/Identity.hpp"
-#include "aidge/operator/Concat.hpp"
 #include "aidge/operator/Tanh.hpp"
 
 namespace Aidge {
diff --git a/src/operator/MetaOperatorDefs/Leaky.cpp b/src/operator/MetaOperatorDefs/Leaky.cpp
new file mode 100644
index 000000000..98927e19b
--- /dev/null
+++ b/src/operator/MetaOperatorDefs/Leaky.cpp
@@ -0,0 +1,124 @@
+#include "aidge/filler/Filler.hpp"
+#include "aidge/operator/Add.hpp"
+#include "aidge/operator/Heaviside.hpp"
+#include "aidge/operator/Identity.hpp"
+#include "aidge/operator/Memorize.hpp"
+#include "aidge/operator/MetaOperatorDefs.hpp"
+#include "aidge/operator/Mul.hpp"
+#include "aidge/operator/Producer.hpp"
+#include "aidge/operator/Sub.hpp"
+
+namespace Aidge {
+
+constexpr auto memorizeOpDataOutputRecIndex = 1;
+constexpr auto memorizeOpDataOutputIndex = 0;
+
+std::shared_ptr<Node> Leaky(const std::string &name) {
+
+    Log::warn("! Lots of parameters are hardcoded");
+    const auto softReset = true;
+    const auto beta = 0.9;
+    const auto thresholdValue = 1.0;
+    const auto seqLength = 2;
+
+    auto microGraph = std::make_shared<GraphView>();
+
+    auto inputNode = Identity((!name.empty()) ? name + "_input" : "");
+    auto addNode = Add(!name.empty() ? name + "_add" : "");
+    auto mulNode = Mul(!name.empty() ? name + "_mul" : "");
+    auto subNode = Sub(!name.empty() ? name + "_sub" : "");
+    auto hsNode = Heaviside(0, !name.empty() ? name + "_hs" : "");
+    auto subNode2 = Sub(!name.empty() ? name + "_threshold" : "");
+    auto reset = Mul(!name.empty() ? name + "_reset" : "");
+
+    // auto betaTensor = std::make_shared<Tensor>(Array2D<float, 16, 32>{});
+    //  FIXME: Use beta instead of a fixed value here, and put real dimensions
+    auto betaTensor = std::make_shared<Tensor>(Array2D<float, 3, 2>{});
+    auto uthTensor =
+        std::make_shared<Tensor>(static_cast<float>(thresholdValue));
+    uniformFiller<float>(betaTensor, beta, beta);
+    uniformFiller<float>(uthTensor, thresholdValue, thresholdValue);
+
+    auto decayRate = Producer(betaTensor, "leaky_beta", true);
+    auto uth = Producer(uthTensor, "leaky_uth", true);
+
+    auto potentialMem =
+        Memorize(seqLength, (!name.empty()) ? name + "_potential" : "");
+    auto spikeMem =
+        Memorize(seqLength, (!name.empty()) ? name + "_spike" : "");
+
+    // U[t] = Input[T] + beta * U[T-1] - S[T-1] * U_th
+    // with S[T] = | 1, if U[T] - U_th > 0
+    //             | 0 otherwise
+
+    // beta * U[T-1]
+    decayRate->addChild(/*otherNode=*/mulNode, /*outId=*/0, /*otherInId=*/1);
+    potentialMem->addChild(mulNode, 1, 0);
+
+    // Input[T] + beta * U[T-1]
+    mulNode->addChild(/*otherNode=*/addNode, /*outId=*/0, /*otherInId=*/1);
+    inputNode->addChild(/*otherNode=*/addNode, /*outId=*/0, /*otherInId=*/0);
+
+    // S[T-1] * Uth
+    spikeMem->addChild(reset,
+                       /*outId=*/memorizeOpDataOutputRecIndex,
+                       /*otherInId=*/0);
+    uth->addChild(reset, 0, 1);
+    if (softReset) {
+        uth->addChild(reset, 0, 1);
+    } else {
+        // addNode->addChild(reset, 0, 1);
+        AIDGE_THROW_OR_ABORT(std::runtime_error,
+                             "Hard reset not implemented yet.");
+    }
+
+    // Input[T] + beta * U[T-1] - S[T-1] * Uth
+    addNode->addChild(subNode, 0, 0);
+    reset->addChild(subNode, 0, 1);
+
+    // U[t] = Input[T] + beta * U[T-1] - S[T-1]
+    subNode->addChild(potentialMem, 0, 0);
+
+    // U[T] - U_th
+    subNode->addChild(subNode2, 0, 0);
+    uth->addChild(subNode2, 0, 1);
+
+    // with S[T] = | 1, if U[T] - U_th > 0
+    subNode2->addChild(hsNode, 0, 0);
+    hsNode->addChild(spikeMem, 0, 0);
+
+    microGraph->add(inputNode);
+    microGraph->add({addNode,
+                     mulNode,
+                     potentialMem,
+                     decayRate,
+                     uth,
+                     spikeMem,
+                     hsNode,
+                     subNode,
+                     subNode2,
+                     reset},
+                    false);
+
+    microGraph->setOrderedInputs(
+        {{inputNode, 0}, {potentialMem, 1}, {spikeMem, 1}});
+
+    microGraph->setOrderedOutputs({//{potentialMem, memorizeOpDataOutputIndex},
+                                   //{spikeMem, memorizeOpDataOutputIndex}
+                                   {addNode, 0},
+                                   {hsNode, 0}});
+
+    auto metaOp = MetaOperator(/*type*/ "Leaky",
+                               /*graph*/ microGraph,
+                               /*forcedInputsCategory=*/{},
+                               /*name*/ "leaky");
+
+    // addProducer(metaOp, 1, {1,2}, "memorizeInit1");
+    // addProducer(metaOp, 2, {1,2}, "memorizeInit2");
+    return metaOp;
+}
+
+std::shared_ptr<MetaOperator_Op> LeakyOp() {
+    AIDGE_THROW_OR_ABORT(std::runtime_error, "Not implemented yet");
+}
+} // namespace Aidge
diff --git a/unit_tests/operator/Test_MetaOperator.cpp b/unit_tests/operator/Test_MetaOperator.cpp
index 6711e1524..97aea2414 100644
--- a/unit_tests/operator/Test_MetaOperator.cpp
+++ b/unit_tests/operator/Test_MetaOperator.cpp
@@ -23,6 +23,7 @@
 #include "aidge/graph/GraphView.hpp"
 #include "aidge/graph/Testing.hpp"
 #include "aidge/recipes/Recipes.hpp"
+#include "aidge/utils/ErrorHandling.hpp"
 
 using namespace Aidge;
 
@@ -145,4 +146,17 @@ TEST_CASE("[core/operators] MetaOperator", "[Operator][MetaOperator]") {
 
         REQUIRE(g->getNodes().size() == 33);
     }
+
+    SECTION("Leaky") {
+        auto myLeaky = Leaky();
+        auto op = std::static_pointer_cast<OperatorTensor>(myLeaky->getOperator());
+
+        // 2 inputs :
+        // 1 for the actual input data,
+        // 1 for the Memorize init tensor.
+        auto inputs = myLeaky->inputs();
+
+        REQUIRE(myLeaky->nbInputs() == 3);
+    	REQUIRE(true);
+    }
 }
-- 
GitLab