Skip to content
Snippets Groups Projects
Commit 3b3f7548 authored by Jerome Hue's avatar Jerome Hue Committed by Olivier BICHLER
Browse files

feat: Add basic leaky operator declaration

parent e82fd3fc
No related branches found
No related tags found
2 merge requests!318[Upd] release verision 0.5.0,!283[Add] Operator Leaky_MetaOperator
......@@ -165,6 +165,9 @@ std::shared_ptr<Node> LSTM(DimSize_t in_channels,
std::shared_ptr<MetaOperator_Op> LSTM_Op(DimSize_t seq_length);
std::shared_ptr<MetaOperator_Op> LeakyOp();
std::shared_ptr<Node> Leaky(const std::string& name = "");
} // namespace Aidge
#endif /* AIDGE_CORE_OPERATOR_METAOPERATORDEFS_H_ */
......@@ -11,7 +11,6 @@
#include "aidge/operator/MetaOperatorDefs.hpp"
#include <array>
#include <memory>
#include <string>
......@@ -20,7 +19,6 @@
#include "aidge/operator/Mul.hpp"
#include "aidge/operator/FC.hpp"
#include "aidge/operator/Identity.hpp"
#include "aidge/operator/Concat.hpp"
#include "aidge/operator/Tanh.hpp"
namespace Aidge {
......
#include "aidge/filler/Filler.hpp"
#include "aidge/operator/Add.hpp"
#include "aidge/operator/Heaviside.hpp"
#include "aidge/operator/Identity.hpp"
#include "aidge/operator/Memorize.hpp"
#include "aidge/operator/MetaOperatorDefs.hpp"
#include "aidge/operator/Mul.hpp"
#include "aidge/operator/Producer.hpp"
#include "aidge/operator/Sub.hpp"
namespace Aidge {
constexpr auto memorizeOpDataOutputRecIndex = 1;
constexpr auto memorizeOpDataOutputIndex = 0;
std::shared_ptr<Node> Leaky(const std::string &name) {
Log::warn("! Lots of parameters are hardcoded");
const auto softReset = true;
const auto beta = 0.9;
const auto thresholdValue = 1.0;
const auto seqLength = 2;
auto microGraph = std::make_shared<GraphView>();
auto inputNode = Identity((!name.empty()) ? name + "_input" : "");
auto addNode = Add(!name.empty() ? name + "_add" : "");
auto mulNode = Mul(!name.empty() ? name + "_mul" : "");
auto subNode = Sub(!name.empty() ? name + "_sub" : "");
auto hsNode = Heaviside(0, !name.empty() ? name + "_hs" : "");
auto subNode2 = Sub(!name.empty() ? name + "_threshold" : "");
auto reset = Mul(!name.empty() ? name + "_reset" : "");
// auto betaTensor = std::make_shared<Tensor>(Array2D<float, 16, 32>{});
// FIXME: Use beta instead of a fixed value here, and put real dimensions
auto betaTensor = std::make_shared<Tensor>(Array2D<float, 3, 2>{});
auto uthTensor =
std::make_shared<Tensor>(static_cast<float>(thresholdValue));
uniformFiller<float>(betaTensor, beta, beta);
uniformFiller<float>(uthTensor, thresholdValue, thresholdValue);
auto decayRate = Producer(betaTensor, "leaky_beta", true);
auto uth = Producer(uthTensor, "leaky_uth", true);
auto potentialMem =
Memorize(seqLength, (!name.empty()) ? name + "_potential" : "");
auto spikeMem =
Memorize(seqLength, (!name.empty()) ? name + "_spike" : "");
// U[t] = Input[T] + beta * U[T-1] - S[T-1] * U_th
// with S[T] = | 1, if U[T] - U_th > 0
// | 0 otherwise
// beta * U[T-1]
decayRate->addChild(/*otherNode=*/mulNode, /*outId=*/0, /*otherInId=*/1);
potentialMem->addChild(mulNode, 1, 0);
// Input[T] + beta * U[T-1]
mulNode->addChild(/*otherNode=*/addNode, /*outId=*/0, /*otherInId=*/1);
inputNode->addChild(/*otherNode=*/addNode, /*outId=*/0, /*otherInId=*/0);
// S[T-1] * Uth
spikeMem->addChild(reset,
/*outId=*/memorizeOpDataOutputRecIndex,
/*otherInId=*/0);
uth->addChild(reset, 0, 1);
if (softReset) {
uth->addChild(reset, 0, 1);
} else {
// addNode->addChild(reset, 0, 1);
AIDGE_THROW_OR_ABORT(std::runtime_error,
"Hard reset not implemented yet.");
}
// Input[T] + beta * U[T-1] - S[T-1] * Uth
addNode->addChild(subNode, 0, 0);
reset->addChild(subNode, 0, 1);
// U[t] = Input[T] + beta * U[T-1] - S[T-1]
subNode->addChild(potentialMem, 0, 0);
// U[T] - U_th
subNode->addChild(subNode2, 0, 0);
uth->addChild(subNode2, 0, 1);
// with S[T] = | 1, if U[T] - U_th > 0
subNode2->addChild(hsNode, 0, 0);
hsNode->addChild(spikeMem, 0, 0);
microGraph->add(inputNode);
microGraph->add({addNode,
mulNode,
potentialMem,
decayRate,
uth,
spikeMem,
hsNode,
subNode,
subNode2,
reset},
false);
microGraph->setOrderedInputs(
{{inputNode, 0}, {potentialMem, 1}, {spikeMem, 1}});
microGraph->setOrderedOutputs({//{potentialMem, memorizeOpDataOutputIndex},
//{spikeMem, memorizeOpDataOutputIndex}
{addNode, 0},
{hsNode, 0}});
auto metaOp = MetaOperator(/*type*/ "Leaky",
/*graph*/ microGraph,
/*forcedInputsCategory=*/{},
/*name*/ "leaky");
// addProducer(metaOp, 1, {1,2}, "memorizeInit1");
// addProducer(metaOp, 2, {1,2}, "memorizeInit2");
return metaOp;
}
std::shared_ptr<MetaOperator_Op> LeakyOp() {
AIDGE_THROW_OR_ABORT(std::runtime_error, "Not implemented yet");
}
} // namespace Aidge
......@@ -23,6 +23,7 @@
#include "aidge/graph/GraphView.hpp"
#include "aidge/graph/Testing.hpp"
#include "aidge/recipes/Recipes.hpp"
#include "aidge/utils/ErrorHandling.hpp"
using namespace Aidge;
......@@ -145,4 +146,17 @@ TEST_CASE("[core/operators] MetaOperator", "[Operator][MetaOperator]") {
REQUIRE(g->getNodes().size() == 33);
}
SECTION("Leaky") {
auto myLeaky = Leaky();
auto op = std::static_pointer_cast<OperatorTensor>(myLeaky->getOperator());
// 2 inputs :
// 1 for the actual input data,
// 1 for the Memorize init tensor.
auto inputs = myLeaky->inputs();
REQUIRE(myLeaky->nbInputs() == 3);
REQUIRE(true);
}
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment