Skip to content
Snippets Groups Projects
Commit 1d89fb1d authored by Marwa ABDELOUINISSE's avatar Marwa ABDELOUINISSE
Browse files

Merge branch aidge_core:main into feat_183_add_dropout

parents cb53ae29 4e3bce3f
No related branches found
No related tags found
No related merge requests found
Pipeline #65504 failed
# Version 0.4.0 (February 2025)
# Version 0.5.1 (February 13, 2025)
# Version 0.5.0 (January 31, 2025)
# Version 0.4.0 (December 2024)
......
......@@ -136,7 +136,8 @@ class StaticAnalysisExt(aidge_core.StaticAnalysis):
bot += serie
else:
plt.bar(names_only, values)
ax.yaxis.minorticks_on()
if callable(getattr(ax.yaxis, 'minorticks_on', None)):
ax.yaxis.minorticks_on() # introduced in matplotlib 3.9.x
plt.grid(axis='y', which='major', linestyle='--', color='gray')
plt.grid(axis='y', which='minor', linestyle=':', color='lightgray')
formatter0 = matplotlib.ticker.EngFormatter(unit='')
......@@ -171,7 +172,8 @@ class StaticAnalysisExt(aidge_core.StaticAnalysis):
left += serie
else:
plt.barh(names_only, values)
ax.xaxis.minorticks_on()
if callable(getattr(ax.xaxis, 'minorticks_on', None)):
ax.xaxis.minorticks_on() # introduced in matplotlib 3.9.x
plt.grid(axis='x', which='major', linestyle='--', color='gray')
plt.grid(axis='x', which='minor', linestyle=':', color='lightgray')
formatter0 = matplotlib.ticker.EngFormatter(unit='')
......
......@@ -69,10 +69,7 @@ public:
*
* @param op The operator to copy.
*/
MetaOperator_Op(const MetaOperator_Op& op)
: OperatorTensor(op),
mGraph(op.mGraph->clone()) // Clone the micro-graph for isolation
{}
MetaOperator_Op(const MetaOperator_Op& op);
/**
* @brief Set the node for scheduling.
......
......@@ -260,6 +260,17 @@ inline std::shared_ptr<Node> PaddedMaxPooling(
return PaddedMaxPooling(to_array(kernel_dims), name, stride_dims, padding_dims, ceil_mode);
}
/**
* @brief Creates an LSTM (Long Short-Term Memory) operation as a MetaOperator.
*
* This function creates an LSTM operation as a MetaOperator for use in graph-based computation.
*
* @param[in] seq_length The length of the input sequence.
* @return A shared pointer to the MetaOperator_Op representing the LSTM operation.
*/
std::shared_ptr<MetaOperator_Op> LSTM_Op(DimSize_t seq_length,
const std::string &name = "");
/**
* @brief Creates an LSTM (Long Short-Term Memory) operator.
*
......@@ -278,16 +289,6 @@ std::shared_ptr<Node> LSTM(DimSize_t in_channels,
bool noBias = false,
const std::string &name = "");
/**
* @brief Creates an LSTM (Long Short-Term Memory) operation as a MetaOperator.
*
* This function creates an LSTM operation as a MetaOperator for use in graph-based computation.
*
* @param[in] seq_length The length of the input sequence.
* @return A shared pointer to the MetaOperator_Op representing the LSTM operation.
*/
std::shared_ptr<MetaOperator_Op> LSTM_Op(DimSize_t seq_length);
std::shared_ptr<MetaOperator_Op> LeakyOp();
std::shared_ptr<Node> Leaky(const int nbTimeSteps,
const float beta,
......
......@@ -118,12 +118,12 @@ public:
*/
Operator(const Operator& op):
std::enable_shared_from_this<Operator>(),
mType(op.mType),
mOperatorType(op.mOperatorType),
mInputsCategory(op.mInputsCategory),
mNbOut(op.mNbOut),
mBackEdges(op.mBackEdges)
{
mType = op.mType;
mImpl = nullptr;
// Implementation is never cloned. It is up to the non-abstract Operator copy-constructor to create a new implementation matching the copied Operator implementation.
// See https://gitlab.eclipse.org/eclipse/aidge/aidge_core/-/merge_requests/8#note_1214050 for the discussion.
......
......@@ -176,7 +176,8 @@ void declare_LSTMOp(py::module &m) {
py::arg("nobias") = false,
py::arg("name") = "");
m.def("LSTMOp", &LSTM_Op,
py::arg("seq_length"));
py::arg("seq_length"),
py::arg("name") = "");
}
void declare_LeakyOp(py::module &m) {
......
......@@ -74,13 +74,6 @@ Aidge::ImplSpec Aidge::OperatorImpl::getRequiredSpec() const {
requiredSpec.outputs.push_back({opTensor.getOutput(i)->dataType(), opTensor.getOutput(i)->dataFormat(), dims});
}
// Attributes
if (!mOp.isAtomic()) {
requiredSpec.attrs.setAttr("type:!", mOp.type()); // :! mandatory qualifier
}
else {
requiredSpec.attrs.setAttr("type", mOp.type());
}
const auto& inhAttrs = mOp.inheritedAttributes();
if (inhAttrs) {
......
......@@ -266,7 +266,12 @@ void Aidge::GraphView::logOutputs(const std::string& dirName) const {
AIDGE_THROW_OR_ABORT(std::runtime_error,
"Could not create graph view log file: {}", inputPath);
}
fmt::print(fp.get(), "{}\n", nodePtr->getOperator()->getRawOutput(outIdx)->toString().c_str());
auto oTensor = std::static_pointer_cast<OperatorTensor>(nodePtr->getOperator())->getOutput(outIdx);
std::shared_ptr<Tensor> fallback;
const Tensor& localTensor = oTensor->refFrom(fallback, "cpu");
fmt::print(fp.get(), "{}\n", localTensor.toString().c_str());
}
}
}
......
......@@ -67,7 +67,7 @@ Aidge::Connector Aidge::Node::operator()(const std::vector<Connector>& ctors) {
}
// Skip to next possible input idx
for (; idx < nbInputs() && (inputCategory(idx) != InputCategory::Data && inputCategory(idx) != InputCategory::OptionalData); ++idx) {}
for (; idx < nbInputs() && (inputCategory(idx) != InputCategory::Data); ++idx) {}
AIDGE_ASSERT(idx == nbInputs(), "Missing an input connector for Data input#{}", idx);
return Connector(shared_from_this());
......
......@@ -45,7 +45,7 @@ Aidge::GenericOperator_Op::GenericOperator_Op(const std::string& type,
Aidge::GenericOperator_Op::GenericOperator_Op(const Aidge::GenericOperator_Op& op)
: OperatorTensor(op),
mForwardDims(op.mForwardDims),
mAttributes(op.attributes() ? op.mAttributes : std::make_shared<DynamicAttributes>())
mAttributes(std::make_shared<DynamicAttributes>(*op.mAttributes))
{
mImpl = std::make_shared<OperatorImpl>(*this, op.backend());
}
......
......@@ -54,8 +54,31 @@ Aidge::MetaOperator_Op::MetaOperator_Op(const std::string& type, const std::shar
}
}
Aidge::MetaOperator_Op::MetaOperator_Op(const MetaOperator_Op& op)
: OperatorTensor(op),
mGraph(op.mGraph->clone()), // Clone the micro-graph for isolation
mAttributes(std::make_shared<DynamicAttributes>(*op.mAttributes)) // Clone attributes
{
// Associate outputs to micro-graph outputs for custom implementation
for (size_t outputIdx = 0; outputIdx < mOutputs.size(); ++outputIdx) {
const auto& outputOp = mGraph->getOrderedOutputs()[outputIdx];
if (outputOp.first) {
mOutputs[outputIdx] = std::dynamic_pointer_cast<Tensor>(outputOp.first->getOperator()->getRawOutput(outputOp.second));
}
}
// Attributes are already cloned.
}
std::shared_ptr<Aidge::Operator> Aidge::MetaOperator_Op::clone() const {
return std::make_shared<MetaOperator_Op>(type(), mGraph->clone());
auto metaOp = std::make_shared<MetaOperator_Op>(*this);
if (mImpl) {
// Only setBackend() is mImpl is not nullptr.
// The inner-graph backend is already set in MetaOperator_Op copy
// construtor, when the graph is cloned.
metaOp->setBackend(mImpl->backend());
}
return metaOp;
}
void Aidge::MetaOperator_Op::associateInput(const IOIndex_t inputIdx, const std::shared_ptr<Data>& data) {
......
......@@ -23,11 +23,8 @@
namespace Aidge {
std::shared_ptr<Node> LSTM(const DimSize_t inChannel,
const DimSize_t hiddenChannel,
const DimSize_t seqLength,
bool noBias,
const std::string& name)
std::shared_ptr<MetaOperator_Op> LSTM_Op(const DimSize_t seqLength,
const std::string& name)
{
// Construct micro-graph
auto input = Identity((!name.empty()) ? name + "_input" : "");
......@@ -113,7 +110,18 @@ std::shared_ptr<Node> LSTM(const DimSize_t inChannel,
{hiddenState, 1}, {cellState, 1}});
microGraph->setOrderedOutputs({{hiddenState, 0}, {cellState, 0}});
auto metaOp = MetaOperator("LSTM", microGraph, {}, name);
return std::make_shared<MetaOperator_Op>("LSTM", microGraph);
}
std::shared_ptr<Node> LSTM(const DimSize_t inChannel,
const DimSize_t hiddenChannel,
const DimSize_t seqLength,
bool noBias,
const std::string& name)
{
auto op = LSTM_Op(seqLength, name);
auto metaOp = std::make_shared<Node>(op, name);
op->setUpperNode(metaOp);
addProducer(metaOp, 1, {hiddenChannel, inChannel}, "wi");
addProducer(metaOp, 2, {hiddenChannel, inChannel}, "wo");
addProducer(metaOp, 3, {hiddenChannel, inChannel}, "wf");
......@@ -135,93 +143,4 @@ std::shared_ptr<Node> LSTM(const DimSize_t inChannel,
return metaOp;
}
std::shared_ptr<MetaOperator_Op> LSTM_Op(const DimSize_t seqLength)
{
// Construct micro-graph
auto input = Identity("");
auto hiddenState = Memorize(seqLength, "");
auto cellState = Memorize(seqLength, "");
auto add = Add("");
// Forget gate
auto forgetGateX = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
input->addChild(forgetGateX, 0, 0);
auto forgetGateH = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
hiddenState->addChild(forgetGateH, 1, 0);
auto forgetGate = Add("");
forgetGateX->addChild(forgetGate, 0, 0);
forgetGateH->addChild(forgetGate, 0, 1);
auto forgetGateAct = Sigmoid("");
auto forgetGateMul = Mul("");
forgetGate->addChild(forgetGateAct, 0, 0);
forgetGateAct->addChild(forgetGateMul, 0, 0);
forgetGateMul->addChild(add, 0, 0);
cellState->addChild(forgetGateMul, 1, 1);
// Input gate
auto inputGateX = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
input->addChild(inputGateX, 0, 0);
auto inputGateH = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
hiddenState->addChild(inputGateH, 1, 0);
auto inputGate = Add("");
inputGateX->addChild(inputGate, 0, 0);
inputGateH->addChild(inputGate, 0, 1);
auto inputGateAct = Sigmoid("");
auto inputGateMul = Mul("");
inputGate->addChild(inputGateAct, 0, 0);
inputGateAct->addChild(inputGateMul, 0, 0);
inputGateMul->addChild(add, 0, 1);
// Candidate for cell update
auto cellCandidateX = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
input->addChild(cellCandidateX, 0, 0);
auto cellCandidateH = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
hiddenState->addChild(cellCandidateH, 1, 0);
auto cellCandidate = Add("");
cellCandidateX->addChild(cellCandidate, 0, 0);
cellCandidateH->addChild(cellCandidate, 0, 1);
auto cellCandidateAct = Tanh("");
cellCandidate->addChild(cellCandidateAct, 0, 0);
cellCandidateAct->addChild(inputGateMul, 0, 1);
// Output gate
auto outputGateX = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
input->addChild(outputGateX, 0, 0);
auto outputGateH = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
hiddenState->addChild(outputGateH, 1, 0);
auto outputGate = Add("");
outputGateX->addChild(outputGate, 0, 0);
outputGateH->addChild(outputGate, 0, 1);
auto outputGateAct = Sigmoid("");
auto outputGateMul = Mul("");
outputGate->addChild(outputGateAct, 0, 0);
outputGateAct->addChild(outputGateMul, 0, 0);
// Updated cell state to help determine new hidden state
auto cellUpdatedAct = Tanh("");
add->addChild(cellUpdatedAct, 0, 0);
cellUpdatedAct->addChild(outputGateMul, 0, 1);
outputGateMul->addChild(hiddenState, 0, 0);
add->addChild(cellState, 0, 0);
std::shared_ptr<GraphView> microGraph = std::make_shared<GraphView>();
microGraph->add(input);
microGraph->add({hiddenState, cellState, add,
forgetGateX, forgetGateH, forgetGate, forgetGateAct, forgetGateMul,
inputGateX, inputGateH, inputGate, inputGateAct, inputGateMul,
cellCandidateX, cellCandidateH, cellCandidate, cellCandidateAct,
outputGateX, outputGateH, outputGate, outputGateAct, outputGateMul,
cellUpdatedAct}, false);
microGraph->setOrderedInputs({{input, 0},
{inputGateX, 1}, {outputGateX, 1}, {forgetGateX, 1}, {cellCandidateX, 1},
{inputGateH, 1}, {outputGateH, 1}, {forgetGateH, 1}, {cellCandidateH, 1},
{inputGateX, 2}, {outputGateX, 2}, {forgetGateX, 2}, {cellCandidateX, 2},
{inputGateH, 2}, {outputGateH, 2}, {forgetGateH, 2}, {cellCandidateH, 2},
{hiddenState, 1}, {cellState, 1}});
microGraph->setOrderedOutputs({{hiddenState, 0}, {cellState, 0}});
return std::make_shared<MetaOperator_Op>("LSTM", microGraph);
}
} // namespace Aidge
......@@ -24,6 +24,8 @@ namespace Aidge {
* @brief Initialize console log level from environment. If compile mode is
* DEBUG, then the default level is Log::Level::Debug, else it is
* Log::Level::Notice.
*
* WARNING: Do not use this variable directly, use getConsoleLevel() instead.
*/
Log::Level Log::mConsoleLevel = []() {
#ifndef NDEBUG
......@@ -58,7 +60,7 @@ bool Log::mConsoleColor = []() {
*/
Log::Level Log::mFileLevel = []() {
#ifndef NDEBUG
constexpr Level defaultLevel = Level::Debug;
constexpr Log::Level defaultLevel = Level::Debug;
#else
constexpr Log::Level defaultLevel = Level::Notice;
#endif
......@@ -102,8 +104,13 @@ void Log::log(Level level, const std::string& msg) {
while (start < text.size()) {
std::size_t lineWidth = 0;
std::size_t current = start;
while (current < text.size() && lineWidth < width) {
bool inPath = false;
while (current < text.size() && (lineWidth < width || inPath)) {
if (inPath){
if (text[current] == ' ' || text[current] == '\n'){
inPath = false;
}
}
if (text[current] == '\033') {
// Found ANSI escape sequence, skip until 'm'
std::size_t ansiEnd = text.find('m', current);
......@@ -119,6 +126,9 @@ void Log::log(Level level, const std::string& msg) {
// Handle explicit line break
break;
} else {
if(!inPath && (text[current] == '/' || text[current] == '\\')) {
inPath = true;
}
// Normal character, increase line width
++lineWidth;
++current;
......@@ -162,9 +172,9 @@ void Log::log(Level level, const std::string& msg) {
// Get the string representation of the log level
const auto levelStr = EnumStrings<Level>::data[static_cast<std::size_t>(level)];
const std::size_t levelIndentSizes[6] = {10, 9, 11, 12, 10, 10};
const std::size_t width = 80 - levelIndentSizes[static_cast<std::size_t>(level)];
const std::size_t width = 100 - levelIndentSizes[static_cast<std::size_t>(level)];
if (level >= mConsoleLevel) {
if (level >= getConsoleLevel()) {
for (const auto& context : mContext) {
fmt::println("Context: {}", context);
}
......
0.5.0
0.5.1
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment