From 2dcc7e76d85c79fd4d25167595748763d43c87f0 Mon Sep 17 00:00:00 2001
From: Olivier BICHLER <olivier.bichler@cea.fr>
Date: Wed, 5 Feb 2025 11:50:41 +0100
Subject: [PATCH 01/11] Fix issue eclipse/aidge/aidge#243

---
 aidge_core/static_analysis.py | 6 ++++--
 1 file changed, 4 insertions(+), 2 deletions(-)

diff --git a/aidge_core/static_analysis.py b/aidge_core/static_analysis.py
index c65a102a1..b4a82a4fb 100644
--- a/aidge_core/static_analysis.py
+++ b/aidge_core/static_analysis.py
@@ -136,7 +136,8 @@ class StaticAnalysisExt(aidge_core.StaticAnalysis):
                 bot += serie
         else:
             plt.bar(names_only, values)
-        ax.yaxis.minorticks_on()
+        if callable(getattr(ax.yaxis, 'minorticks_on', None)):
+            ax.yaxis.minorticks_on() # introduced in matplotlib 3.9.x
         plt.grid(axis='y', which='major', linestyle='--', color='gray')
         plt.grid(axis='y', which='minor', linestyle=':', color='lightgray')
         formatter0 = matplotlib.ticker.EngFormatter(unit='')
@@ -171,7 +172,8 @@ class StaticAnalysisExt(aidge_core.StaticAnalysis):
                 left += serie
         else:
             plt.barh(names_only, values)
-        ax.xaxis.minorticks_on()
+        if callable(getattr(ax.xaxis, 'minorticks_on', None)):
+            ax.xaxis.minorticks_on() # introduced in matplotlib 3.9.x
         plt.grid(axis='x', which='major', linestyle='--', color='gray')
         plt.grid(axis='x', which='minor', linestyle=':', color='lightgray')
         formatter0 = matplotlib.ticker.EngFormatter(unit='')
-- 
GitLab


From 451ac9e90b7b999cce5e3dacc3cb1a273992524e Mon Sep 17 00:00:00 2001
From: Olivier BICHLER <olivier.bichler@cea.fr>
Date: Wed, 5 Feb 2025 12:01:33 +0100
Subject: [PATCH 02/11] Fix bug #231

---
 src/graph/GraphView.cpp | 7 ++++++-
 1 file changed, 6 insertions(+), 1 deletion(-)

diff --git a/src/graph/GraphView.cpp b/src/graph/GraphView.cpp
index e1a520865..fab9be915 100644
--- a/src/graph/GraphView.cpp
+++ b/src/graph/GraphView.cpp
@@ -266,7 +266,12 @@ void Aidge::GraphView::logOutputs(const std::string& dirName) const {
         AIDGE_THROW_OR_ABORT(std::runtime_error,
             "Could not create graph view log file: {}", inputPath);
       }
-      fmt::print(fp.get(), "{}\n", nodePtr->getOperator()->getRawOutput(outIdx)->toString().c_str());
+
+      auto oTensor = std::static_pointer_cast<OperatorTensor>(nodePtr->getOperator())->getOutput(outIdx);
+      std::shared_ptr<Tensor> fallback;
+      const Tensor& localTensor = oTensor->refFrom(fallback, "cpu");
+
+      fmt::print(fp.get(), "{}\n", localTensor.toString().c_str());
     }
   }
 }
-- 
GitLab


From d0ee078d3ff117f4a4a66fc13780c5ffa0f444ef Mon Sep 17 00:00:00 2001
From: Olivier BICHLER <olivier.bichler@cea.fr>
Date: Thu, 6 Feb 2025 12:23:37 +0100
Subject: [PATCH 03/11] Fixed attributes not properly cloned in GenericOperator
 copy constructor

---
 src/operator/GenericOperator.cpp | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/operator/GenericOperator.cpp b/src/operator/GenericOperator.cpp
index 1e28cf289..e0f7cf34a 100644
--- a/src/operator/GenericOperator.cpp
+++ b/src/operator/GenericOperator.cpp
@@ -45,7 +45,7 @@ Aidge::GenericOperator_Op::GenericOperator_Op(const std::string& type,
 Aidge::GenericOperator_Op::GenericOperator_Op(const Aidge::GenericOperator_Op& op)
     : OperatorTensor(op),
         mForwardDims(op.mForwardDims),
-        mAttributes(op.attributes() ? op.mAttributes : std::make_shared<DynamicAttributes>())
+        mAttributes(std::make_shared<DynamicAttributes>(*op.mAttributes))
 {
     mImpl = std::make_shared<OperatorImpl>(*this, op.backend());
 }
-- 
GitLab


From fe598818afc6bcd56d97bdc1bb54ed2e52efae31 Mon Sep 17 00:00:00 2001
From: Olivier BICHLER <olivier.bichler@cea.fr>
Date: Thu, 6 Feb 2025 12:24:11 +0100
Subject: [PATCH 04/11] Coding style

---
 include/aidge/operator/Operator.hpp | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/include/aidge/operator/Operator.hpp b/include/aidge/operator/Operator.hpp
index 40899ffa7..dd59af175 100644
--- a/include/aidge/operator/Operator.hpp
+++ b/include/aidge/operator/Operator.hpp
@@ -118,12 +118,12 @@ public:
      */
     Operator(const Operator& op):
         std::enable_shared_from_this<Operator>(),
+        mType(op.mType),
         mOperatorType(op.mOperatorType),
         mInputsCategory(op.mInputsCategory),
         mNbOut(op.mNbOut),
         mBackEdges(op.mBackEdges)
     {
-        mType = op.mType;
         mImpl = nullptr;
         // Implementation is never cloned. It is up to the non-abstract Operator copy-constructor to create a new implementation matching the copied Operator implementation.
         // See https://gitlab.eclipse.org/eclipse/aidge/aidge_core/-/merge_requests/8#note_1214050 for the discussion.
-- 
GitLab


From dc82c0f57596c71b2fef8521db0c9aa17b51809a Mon Sep 17 00:00:00 2001
From: Olivier BICHLER <olivier.bichler@cea.fr>
Date: Thu, 6 Feb 2025 12:24:51 +0100
Subject: [PATCH 05/11] Fixed incorrect MetaOperator copy constructor and
 clone() method

---
 include/aidge/operator/MetaOperator.hpp |  5 +----
 src/operator/MetaOperator.cpp           | 22 +++++++++++++++++++++-
 2 files changed, 22 insertions(+), 5 deletions(-)

diff --git a/include/aidge/operator/MetaOperator.hpp b/include/aidge/operator/MetaOperator.hpp
index f7f1cdfd5..c6ab45290 100644
--- a/include/aidge/operator/MetaOperator.hpp
+++ b/include/aidge/operator/MetaOperator.hpp
@@ -69,10 +69,7 @@ public:
      * 
      * @param op The operator to copy.
      */
-    MetaOperator_Op(const MetaOperator_Op& op)
-        : OperatorTensor(op),
-          mGraph(op.mGraph->clone()) // Clone the micro-graph for isolation
-    {}
+    MetaOperator_Op(const MetaOperator_Op& op);
 
     /**
      * @brief Set the node for scheduling.
diff --git a/src/operator/MetaOperator.cpp b/src/operator/MetaOperator.cpp
index ae3c3ed6c..9a8a943fc 100644
--- a/src/operator/MetaOperator.cpp
+++ b/src/operator/MetaOperator.cpp
@@ -54,8 +54,28 @@ Aidge::MetaOperator_Op::MetaOperator_Op(const std::string& type, const std::shar
     }
 }
 
+Aidge::MetaOperator_Op::MetaOperator_Op(const MetaOperator_Op& op)
+    : OperatorTensor(op),
+        mGraph(op.mGraph->clone()), // Clone the micro-graph for isolation
+        mAttributes(std::make_shared<DynamicAttributes>(*op.mAttributes)) // Clone attributes
+{
+    // Associate outputs to micro-graph outputs for custom implementation
+    for (size_t outputIdx = 0; outputIdx < mOutputs.size(); ++outputIdx) {
+        const auto& outputOp = mGraph->getOrderedOutputs()[outputIdx];
+        if (outputOp.first) {
+            mOutputs[outputIdx] = std::dynamic_pointer_cast<Tensor>(outputOp.first->getOperator()->getRawOutput(outputOp.second));
+        }
+    }
+
+    // Attributes are already cloned.
+}
+
 std::shared_ptr<Aidge::Operator> Aidge::MetaOperator_Op::clone() const {
-    return std::make_shared<MetaOperator_Op>(type(), mGraph->clone());
+    auto metaOp = std::make_shared<MetaOperator_Op>(*this);
+    if (mImpl) {
+        metaOp->setBackend(mImpl->backend());
+    }
+    return metaOp;
 }
 
 void Aidge::MetaOperator_Op::associateInput(const IOIndex_t inputIdx, const std::shared_ptr<Data>& data) {
-- 
GitLab


From cab9965199049b2c0e4d45798c51f3226f6e2c10 Mon Sep 17 00:00:00 2001
From: Olivier BICHLER <olivier.bichler@cea.fr>
Date: Thu, 6 Feb 2025 12:36:17 +0100
Subject: [PATCH 06/11] Added doc comment

---
 src/operator/MetaOperator.cpp | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/src/operator/MetaOperator.cpp b/src/operator/MetaOperator.cpp
index 9a8a943fc..96c5b219a 100644
--- a/src/operator/MetaOperator.cpp
+++ b/src/operator/MetaOperator.cpp
@@ -73,6 +73,9 @@ Aidge::MetaOperator_Op::MetaOperator_Op(const MetaOperator_Op& op)
 std::shared_ptr<Aidge::Operator> Aidge::MetaOperator_Op::clone() const {
     auto metaOp = std::make_shared<MetaOperator_Op>(*this);
     if (mImpl) {
+        // Only setBackend() is mImpl is not nullptr.
+        // The inner-graph backend is already set in MetaOperator_Op copy
+        // construtor, when the graph is cloned.
         metaOp->setBackend(mImpl->backend());
     }
     return metaOp;
-- 
GitLab


From 3ce55d06a09ae0f9e95d95918b61a4ff51841dba Mon Sep 17 00:00:00 2001
From: Olivier BICHLER <olivier.bichler@cea.fr>
Date: Thu, 6 Feb 2025 13:55:47 +0100
Subject: [PATCH 07/11] Removed mandatory type attribute for Meta op, which is
 redundant with Meta op impl registry

---
 src/backend/OperatorImpl.cpp | 7 -------
 1 file changed, 7 deletions(-)

diff --git a/src/backend/OperatorImpl.cpp b/src/backend/OperatorImpl.cpp
index 71f4f04b2..08f5fe671 100644
--- a/src/backend/OperatorImpl.cpp
+++ b/src/backend/OperatorImpl.cpp
@@ -74,13 +74,6 @@ Aidge::ImplSpec Aidge::OperatorImpl::getRequiredSpec() const {
 
         requiredSpec.outputs.push_back({opTensor.getOutput(i)->dataType(), opTensor.getOutput(i)->dataFormat(), dims});
     }
-    // Attributes
-    if (!mOp.isAtomic()) {
-        requiredSpec.attrs.setAttr("type:!", mOp.type()); // :! mandatory qualifier
-    }
-    else {
-        requiredSpec.attrs.setAttr("type", mOp.type());
-    }
 
     const auto& inhAttrs = mOp.inheritedAttributes();
     if (inhAttrs) {
-- 
GitLab


From c5a91f3b427055feef8718ac766a7bb2add1c5cf Mon Sep 17 00:00:00 2001
From: Maxence Naud <maxence.naud@cea.fr>
Date: Tue, 11 Feb 2025 10:00:31 +0000
Subject: [PATCH 08/11] Merge branch 'FixLog' into 'dev'

Fix log

See merge request eclipse/aidge/aidge_core!326

(cherry picked from commit e068a93fdc9828fc88de513350a4609d78b69f0e)

9b70101b Fix https://gitlab.eclipse.org/eclipse/aidge/aidge_core/-/issues/228

Co-authored-by: Maxence Naud <maxence.naud@cea.fr>
---
 src/utils/Log.cpp | 6 ++++--
 1 file changed, 4 insertions(+), 2 deletions(-)

diff --git a/src/utils/Log.cpp b/src/utils/Log.cpp
index b4c64d527..9755aa61d 100644
--- a/src/utils/Log.cpp
+++ b/src/utils/Log.cpp
@@ -24,6 +24,8 @@ namespace Aidge {
  * @brief Initialize console log level from environment. If compile mode is
  * DEBUG, then the default level is Log::Level::Debug, else it is
  * Log::Level::Notice.
+ *
+ * WARNING: Do not use this variable directly, use getConsoleLevel() instead.
  */
 Log::Level Log::mConsoleLevel = []() {
 #ifndef NDEBUG
@@ -58,7 +60,7 @@ bool Log::mConsoleColor = []() {
  */
 Log::Level Log::mFileLevel = []() {
 #ifndef NDEBUG
-    constexpr Level defaultLevel = Level::Debug;
+    constexpr Log::Level defaultLevel = Level::Debug;
 #else
     constexpr Log::Level defaultLevel = Level::Notice;
 #endif
@@ -164,7 +166,7 @@ void Log::log(Level level, const std::string& msg) {
     const std::size_t levelIndentSizes[6] = {10, 9, 11, 12, 10, 10};
     const std::size_t width = 80 - levelIndentSizes[static_cast<std::size_t>(level)];
 
-    if (level >= mConsoleLevel) {
+    if (level >= getConsoleLevel()) {
         for (const auto& context : mContext) {
             fmt::println("Context: {}", context);
         }
-- 
GitLab


From 4e75c3ea10915b3f05c0760b3356c110fcba9443 Mon Sep 17 00:00:00 2001
From: Olivier BICHLER <olivier.bichler@cea.fr>
Date: Thu, 6 Feb 2025 15:06:45 +0100
Subject: [PATCH 09/11] Removed code redundancy

---
 include/aidge/operator/MetaOperatorDefs.hpp   |  21 ++--
 .../operator/pybind_MetaOperatorDefs.cpp      |   3 +-
 src/operator/MetaOperatorDefs/LSTM.cpp        | 109 +++---------------
 3 files changed, 27 insertions(+), 106 deletions(-)

diff --git a/include/aidge/operator/MetaOperatorDefs.hpp b/include/aidge/operator/MetaOperatorDefs.hpp
index 5bb184b80..9597b533c 100644
--- a/include/aidge/operator/MetaOperatorDefs.hpp
+++ b/include/aidge/operator/MetaOperatorDefs.hpp
@@ -260,6 +260,17 @@ inline std::shared_ptr<Node> PaddedMaxPooling(
     return PaddedMaxPooling(to_array(kernel_dims), name, stride_dims, padding_dims, ceil_mode);
 }
 
+/**
+ * @brief Creates an LSTM (Long Short-Term Memory) operation as a MetaOperator.
+ *
+ * This function creates an LSTM operation as a MetaOperator for use in graph-based computation.
+ *
+ * @param[in] seq_length The length of the input sequence.
+ * @return A shared pointer to the MetaOperator_Op representing the LSTM operation.
+ */
+std::shared_ptr<MetaOperator_Op> LSTM_Op(DimSize_t seq_length,
+                                         const std::string &name = "");
+
 /**
  * @brief Creates an LSTM (Long Short-Term Memory) operator.
  *
@@ -278,16 +289,6 @@ std::shared_ptr<Node> LSTM(DimSize_t in_channels,
                            bool noBias = false,
                            const std::string &name = "");
 
-/**
- * @brief Creates an LSTM (Long Short-Term Memory) operation as a MetaOperator.
- *
- * This function creates an LSTM operation as a MetaOperator for use in graph-based computation.
- *
- * @param[in] seq_length The length of the input sequence.
- * @return A shared pointer to the MetaOperator_Op representing the LSTM operation.
- */
-std::shared_ptr<MetaOperator_Op> LSTM_Op(DimSize_t seq_length);
-
 std::shared_ptr<MetaOperator_Op> LeakyOp();
 std::shared_ptr<Node> Leaky(const int nbTimeSteps,
                             const float beta,
diff --git a/python_binding/operator/pybind_MetaOperatorDefs.cpp b/python_binding/operator/pybind_MetaOperatorDefs.cpp
index b2811fbaa..35f3d2134 100644
--- a/python_binding/operator/pybind_MetaOperatorDefs.cpp
+++ b/python_binding/operator/pybind_MetaOperatorDefs.cpp
@@ -176,7 +176,8 @@ void declare_LSTMOp(py::module &m) {
        py::arg("nobias") = false,
        py::arg("name") = "");
   m.def("LSTMOp", &LSTM_Op,
-       py::arg("seq_length"));
+       py::arg("seq_length"),
+       py::arg("name") = "");
 }
 
 void declare_LeakyOp(py::module &m) {
diff --git a/src/operator/MetaOperatorDefs/LSTM.cpp b/src/operator/MetaOperatorDefs/LSTM.cpp
index 22c0469b3..c7fbe8a16 100644
--- a/src/operator/MetaOperatorDefs/LSTM.cpp
+++ b/src/operator/MetaOperatorDefs/LSTM.cpp
@@ -23,11 +23,8 @@
 
 namespace Aidge {
 
-std::shared_ptr<Node> LSTM(const DimSize_t inChannel,
-                           const DimSize_t hiddenChannel,
-                           const DimSize_t seqLength,
-                           bool noBias,
-                           const std::string& name)
+std::shared_ptr<MetaOperator_Op> LSTM_Op(const DimSize_t seqLength,
+                                         const std::string& name)
 {
     // Construct micro-graph
     auto input = Identity((!name.empty()) ? name + "_input" : "");
@@ -113,7 +110,18 @@ std::shared_ptr<Node> LSTM(const DimSize_t inChannel,
         {hiddenState, 1}, {cellState, 1}});
     microGraph->setOrderedOutputs({{hiddenState, 0}, {cellState, 0}});
 
-    auto metaOp = MetaOperator("LSTM", microGraph, {}, name);
+    return std::make_shared<MetaOperator_Op>("LSTM", microGraph);
+}
+
+std::shared_ptr<Node> LSTM(const DimSize_t inChannel,
+                           const DimSize_t hiddenChannel,
+                           const DimSize_t seqLength,
+                           bool noBias,
+                           const std::string& name)
+{
+    auto op = LSTM_Op(seqLength, name);
+    auto metaOp = std::make_shared<Node>(op, name);
+    op->setUpperNode(metaOp);
     addProducer(metaOp, 1, {hiddenChannel, inChannel}, "wi");
     addProducer(metaOp, 2, {hiddenChannel, inChannel}, "wo");
     addProducer(metaOp, 3, {hiddenChannel, inChannel}, "wf");
@@ -135,93 +143,4 @@ std::shared_ptr<Node> LSTM(const DimSize_t inChannel,
     return metaOp;
 }
 
-std::shared_ptr<MetaOperator_Op> LSTM_Op(const DimSize_t seqLength)
-{
-    // Construct micro-graph
-    auto input = Identity("");
-    auto hiddenState = Memorize(seqLength, "");
-    auto cellState = Memorize(seqLength, "");
-    auto add = Add("");
-
-    // Forget gate
-    auto forgetGateX = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
-    input->addChild(forgetGateX, 0, 0);
-    auto forgetGateH = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
-    hiddenState->addChild(forgetGateH, 1, 0);
-    auto forgetGate = Add("");
-    forgetGateX->addChild(forgetGate, 0, 0);
-    forgetGateH->addChild(forgetGate, 0, 1);
-    auto forgetGateAct = Sigmoid("");
-    auto forgetGateMul = Mul("");
-    forgetGate->addChild(forgetGateAct, 0, 0);
-    forgetGateAct->addChild(forgetGateMul, 0, 0);
-    forgetGateMul->addChild(add, 0, 0);
-    cellState->addChild(forgetGateMul, 1, 1);
-
-    // Input gate
-    auto inputGateX = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
-    input->addChild(inputGateX, 0, 0);
-    auto inputGateH = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
-    hiddenState->addChild(inputGateH, 1, 0);
-    auto inputGate = Add("");
-    inputGateX->addChild(inputGate, 0, 0);
-    inputGateH->addChild(inputGate, 0, 1);
-    auto inputGateAct = Sigmoid("");
-    auto inputGateMul = Mul("");
-    inputGate->addChild(inputGateAct, 0, 0);
-    inputGateAct->addChild(inputGateMul, 0, 0);
-    inputGateMul->addChild(add, 0, 1);
-
-    // Candidate for cell update
-    auto cellCandidateX = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
-    input->addChild(cellCandidateX, 0, 0);
-    auto cellCandidateH = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
-    hiddenState->addChild(cellCandidateH, 1, 0);
-    auto cellCandidate = Add("");
-    cellCandidateX->addChild(cellCandidate, 0, 0);
-    cellCandidateH->addChild(cellCandidate, 0, 1);
-    auto cellCandidateAct = Tanh("");
-    cellCandidate->addChild(cellCandidateAct, 0, 0);
-    cellCandidateAct->addChild(inputGateMul, 0, 1);
-
-    // Output gate
-    auto outputGateX = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
-    input->addChild(outputGateX, 0, 0);
-    auto outputGateH = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
-    hiddenState->addChild(outputGateH, 1, 0);
-    auto outputGate = Add("");
-    outputGateX->addChild(outputGate, 0, 0);
-    outputGateH->addChild(outputGate, 0, 1);
-    auto outputGateAct = Sigmoid("");
-    auto outputGateMul = Mul("");
-    outputGate->addChild(outputGateAct, 0, 0);
-    outputGateAct->addChild(outputGateMul, 0, 0);
-
-    // Updated cell state to help determine new hidden state
-    auto cellUpdatedAct = Tanh("");
-    add->addChild(cellUpdatedAct, 0, 0);
-    cellUpdatedAct->addChild(outputGateMul, 0, 1);
-    outputGateMul->addChild(hiddenState, 0, 0);
-    add->addChild(cellState, 0, 0);
-
-    std::shared_ptr<GraphView> microGraph = std::make_shared<GraphView>();
-    microGraph->add(input);
-    microGraph->add({hiddenState, cellState, add,
-        forgetGateX, forgetGateH, forgetGate, forgetGateAct, forgetGateMul,
-        inputGateX, inputGateH, inputGate, inputGateAct, inputGateMul,
-        cellCandidateX, cellCandidateH, cellCandidate, cellCandidateAct,
-        outputGateX, outputGateH, outputGate, outputGateAct, outputGateMul,
-        cellUpdatedAct}, false);
-
-    microGraph->setOrderedInputs({{input, 0},
-        {inputGateX, 1}, {outputGateX, 1}, {forgetGateX, 1}, {cellCandidateX, 1},
-        {inputGateH, 1}, {outputGateH, 1}, {forgetGateH, 1}, {cellCandidateH, 1},
-        {inputGateX, 2}, {outputGateX, 2}, {forgetGateX, 2}, {cellCandidateX, 2},
-        {inputGateH, 2}, {outputGateH, 2}, {forgetGateH, 2}, {cellCandidateH, 2},
-        {hiddenState, 1}, {cellState, 1}});
-    microGraph->setOrderedOutputs({{hiddenState, 0}, {cellState, 0}});
-
-    return std::make_shared<MetaOperator_Op>("LSTM", microGraph);
-}
-
 } // namespace Aidge
-- 
GitLab


From f887893ae3e46dea6c2bbefa42f9e227f9afb08c Mon Sep 17 00:00:00 2001
From: cmoineau <cyril.moineau@cea.fr>
Date: Wed, 12 Feb 2025 07:46:57 +0000
Subject: [PATCH 10/11] Fix
 https://gitlab.eclipse.org/eclipse/aidge/aidge_core/-/issues/232

---
 src/utils/Log.cpp | 14 +++++++++++---
 1 file changed, 11 insertions(+), 3 deletions(-)

diff --git a/src/utils/Log.cpp b/src/utils/Log.cpp
index 9755aa61d..f4bc32e8b 100644
--- a/src/utils/Log.cpp
+++ b/src/utils/Log.cpp
@@ -104,8 +104,13 @@ void Log::log(Level level, const std::string& msg) {
         while (start < text.size()) {
             std::size_t lineWidth = 0;
             std::size_t current = start;
-
-            while (current < text.size() && lineWidth < width) {
+            bool inPath = false;
+            while (current < text.size() && (lineWidth < width || inPath)) {
+                if (inPath){
+                    if (text[current] == ' ' || text[current] == '\n'){
+                        inPath = false;
+                    }
+                }
                 if (text[current] == '\033') {
                     // Found ANSI escape sequence, skip until 'm'
                     std::size_t ansiEnd = text.find('m', current);
@@ -121,6 +126,9 @@ void Log::log(Level level, const std::string& msg) {
                     // Handle explicit line break
                     break;
                 } else {
+                    if(!inPath && (text[current] == '/' || text[current] == '\\')) {
+                        inPath = true;
+                    }
                     // Normal character, increase line width
                     ++lineWidth;
                     ++current;
@@ -164,7 +172,7 @@ void Log::log(Level level, const std::string& msg) {
     // Get the string representation of the log level
     const auto levelStr = EnumStrings<Level>::data[static_cast<std::size_t>(level)];
     const std::size_t levelIndentSizes[6] = {10, 9, 11, 12, 10, 10};
-    const std::size_t width = 80 - levelIndentSizes[static_cast<std::size_t>(level)];
+    const std::size_t width = 100 - levelIndentSizes[static_cast<std::size_t>(level)];
 
     if (level >= getConsoleLevel()) {
         for (const auto& context : mContext) {
-- 
GitLab


From bb7052a5f0d64032231d11dc4cd3df3c30b8e12e Mon Sep 17 00:00:00 2001
From: cmoineau <cyril.moineau@cea.fr>
Date: Wed, 12 Feb 2025 13:39:54 +0000
Subject: [PATCH 11/11] Fix error when using connector and not providing
 optional data.

---
 src/graph/Node.cpp | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/graph/Node.cpp b/src/graph/Node.cpp
index 692806dc7..edc530179 100644
--- a/src/graph/Node.cpp
+++ b/src/graph/Node.cpp
@@ -67,7 +67,7 @@ Aidge::Connector Aidge::Node::operator()(const std::vector<Connector>& ctors) {
     }
 
     // Skip to next possible input idx
-    for (; idx < nbInputs() && (inputCategory(idx) != InputCategory::Data && inputCategory(idx) != InputCategory::OptionalData); ++idx) {}
+    for (; idx < nbInputs() && (inputCategory(idx) != InputCategory::Data); ++idx) {}
     AIDGE_ASSERT(idx == nbInputs(), "Missing an input connector for Data input#{}", idx);
 
     return Connector(shared_from_this());
-- 
GitLab