diff --git a/aidge_core/static_analysis.py b/aidge_core/static_analysis.py
index c65a102a10601605cd2ca988a2ad3cf2cbd00e6e..b4a82a4fbd9ef5205ce39dc5a519f44305bc455d 100644
--- a/aidge_core/static_analysis.py
+++ b/aidge_core/static_analysis.py
@@ -136,7 +136,8 @@ class StaticAnalysisExt(aidge_core.StaticAnalysis):
                 bot += serie
         else:
             plt.bar(names_only, values)
-        ax.yaxis.minorticks_on()
+        if callable(getattr(ax.yaxis, 'minorticks_on', None)):
+            ax.yaxis.minorticks_on() # introduced in matplotlib 3.9.x
         plt.grid(axis='y', which='major', linestyle='--', color='gray')
         plt.grid(axis='y', which='minor', linestyle=':', color='lightgray')
         formatter0 = matplotlib.ticker.EngFormatter(unit='')
@@ -171,7 +172,8 @@ class StaticAnalysisExt(aidge_core.StaticAnalysis):
                 left += serie
         else:
             plt.barh(names_only, values)
-        ax.xaxis.minorticks_on()
+        if callable(getattr(ax.xaxis, 'minorticks_on', None)):
+            ax.xaxis.minorticks_on() # introduced in matplotlib 3.9.x
         plt.grid(axis='x', which='major', linestyle='--', color='gray')
         plt.grid(axis='x', which='minor', linestyle=':', color='lightgray')
         formatter0 = matplotlib.ticker.EngFormatter(unit='')
diff --git a/include/aidge/operator/MetaOperator.hpp b/include/aidge/operator/MetaOperator.hpp
index f7f1cdfd5bc0d799adc87bd7b7e1be999363627f..c6ab45290265617850c01bb00abd7f972cd3525c 100644
--- a/include/aidge/operator/MetaOperator.hpp
+++ b/include/aidge/operator/MetaOperator.hpp
@@ -69,10 +69,7 @@ public:
      * 
      * @param op The operator to copy.
      */
-    MetaOperator_Op(const MetaOperator_Op& op)
-        : OperatorTensor(op),
-          mGraph(op.mGraph->clone()) // Clone the micro-graph for isolation
-    {}
+    MetaOperator_Op(const MetaOperator_Op& op);
 
     /**
      * @brief Set the node for scheduling.
diff --git a/include/aidge/operator/MetaOperatorDefs.hpp b/include/aidge/operator/MetaOperatorDefs.hpp
index 5bb184b808e0a9d685879e53554ff3be500f5717..9597b533c14b27d282985b13cd8e1199ed5360a8 100644
--- a/include/aidge/operator/MetaOperatorDefs.hpp
+++ b/include/aidge/operator/MetaOperatorDefs.hpp
@@ -260,6 +260,17 @@ inline std::shared_ptr<Node> PaddedMaxPooling(
     return PaddedMaxPooling(to_array(kernel_dims), name, stride_dims, padding_dims, ceil_mode);
 }
 
+/**
+ * @brief Creates an LSTM (Long Short-Term Memory) operation as a MetaOperator.
+ *
+ * This function creates an LSTM operation as a MetaOperator for use in graph-based computation.
+ *
+ * @param[in] seq_length The length of the input sequence.
+ * @return A shared pointer to the MetaOperator_Op representing the LSTM operation.
+ */
+std::shared_ptr<MetaOperator_Op> LSTM_Op(DimSize_t seq_length,
+                                         const std::string &name = "");
+
 /**
  * @brief Creates an LSTM (Long Short-Term Memory) operator.
  *
@@ -278,16 +289,6 @@ std::shared_ptr<Node> LSTM(DimSize_t in_channels,
                            bool noBias = false,
                            const std::string &name = "");
 
-/**
- * @brief Creates an LSTM (Long Short-Term Memory) operation as a MetaOperator.
- *
- * This function creates an LSTM operation as a MetaOperator for use in graph-based computation.
- *
- * @param[in] seq_length The length of the input sequence.
- * @return A shared pointer to the MetaOperator_Op representing the LSTM operation.
- */
-std::shared_ptr<MetaOperator_Op> LSTM_Op(DimSize_t seq_length);
-
 std::shared_ptr<MetaOperator_Op> LeakyOp();
 std::shared_ptr<Node> Leaky(const int nbTimeSteps,
                             const float beta,
diff --git a/include/aidge/operator/Operator.hpp b/include/aidge/operator/Operator.hpp
index 40899ffa7668298f3e90e09b9a30ed9f438d89b2..dd59af175231acb274126d7f396cdd502046b004 100644
--- a/include/aidge/operator/Operator.hpp
+++ b/include/aidge/operator/Operator.hpp
@@ -118,12 +118,12 @@ public:
      */
     Operator(const Operator& op):
         std::enable_shared_from_this<Operator>(),
+        mType(op.mType),
         mOperatorType(op.mOperatorType),
         mInputsCategory(op.mInputsCategory),
         mNbOut(op.mNbOut),
         mBackEdges(op.mBackEdges)
     {
-        mType = op.mType;
         mImpl = nullptr;
         // Implementation is never cloned. It is up to the non-abstract Operator copy-constructor to create a new implementation matching the copied Operator implementation.
         // See https://gitlab.eclipse.org/eclipse/aidge/aidge_core/-/merge_requests/8#note_1214050 for the discussion.
diff --git a/python_binding/operator/pybind_MetaOperatorDefs.cpp b/python_binding/operator/pybind_MetaOperatorDefs.cpp
index b2811fbaab2b6cd33dc2b105f0044cd8a5edbbc7..35f3d21341fbb529d692a71e597c3b2b76c8426e 100644
--- a/python_binding/operator/pybind_MetaOperatorDefs.cpp
+++ b/python_binding/operator/pybind_MetaOperatorDefs.cpp
@@ -176,7 +176,8 @@ void declare_LSTMOp(py::module &m) {
        py::arg("nobias") = false,
        py::arg("name") = "");
   m.def("LSTMOp", &LSTM_Op,
-       py::arg("seq_length"));
+       py::arg("seq_length"),
+       py::arg("name") = "");
 }
 
 void declare_LeakyOp(py::module &m) {
diff --git a/src/backend/OperatorImpl.cpp b/src/backend/OperatorImpl.cpp
index 71f4f04b2d73e1501a2d428dd91dc8f85dd17649..08f5fe671c7502a6c5fe01dbdfb7ae4c9b95ac81 100644
--- a/src/backend/OperatorImpl.cpp
+++ b/src/backend/OperatorImpl.cpp
@@ -74,13 +74,6 @@ Aidge::ImplSpec Aidge::OperatorImpl::getRequiredSpec() const {
 
         requiredSpec.outputs.push_back({opTensor.getOutput(i)->dataType(), opTensor.getOutput(i)->dataFormat(), dims});
     }
-    // Attributes
-    if (!mOp.isAtomic()) {
-        requiredSpec.attrs.setAttr("type:!", mOp.type()); // :! mandatory qualifier
-    }
-    else {
-        requiredSpec.attrs.setAttr("type", mOp.type());
-    }
 
     const auto& inhAttrs = mOp.inheritedAttributes();
     if (inhAttrs) {
diff --git a/src/graph/GraphView.cpp b/src/graph/GraphView.cpp
index e1a520865e38b50bbae268b72d420f947ba6885f..fab9be91556c5ffc0bd446edcbc5abb80e99a1bb 100644
--- a/src/graph/GraphView.cpp
+++ b/src/graph/GraphView.cpp
@@ -266,7 +266,12 @@ void Aidge::GraphView::logOutputs(const std::string& dirName) const {
         AIDGE_THROW_OR_ABORT(std::runtime_error,
             "Could not create graph view log file: {}", inputPath);
       }
-      fmt::print(fp.get(), "{}\n", nodePtr->getOperator()->getRawOutput(outIdx)->toString().c_str());
+
+      auto oTensor = std::static_pointer_cast<OperatorTensor>(nodePtr->getOperator())->getOutput(outIdx);
+      std::shared_ptr<Tensor> fallback;
+      const Tensor& localTensor = oTensor->refFrom(fallback, "cpu");
+
+      fmt::print(fp.get(), "{}\n", localTensor.toString().c_str());
     }
   }
 }
diff --git a/src/graph/Node.cpp b/src/graph/Node.cpp
index 692806dc7c85e5512e9318008d7277881a006232..edc530179265fc1c0ea99d7fd7aafabb2e39b99d 100644
--- a/src/graph/Node.cpp
+++ b/src/graph/Node.cpp
@@ -67,7 +67,7 @@ Aidge::Connector Aidge::Node::operator()(const std::vector<Connector>& ctors) {
     }
 
     // Skip to next possible input idx
-    for (; idx < nbInputs() && (inputCategory(idx) != InputCategory::Data && inputCategory(idx) != InputCategory::OptionalData); ++idx) {}
+    for (; idx < nbInputs() && (inputCategory(idx) != InputCategory::Data); ++idx) {}
     AIDGE_ASSERT(idx == nbInputs(), "Missing an input connector for Data input#{}", idx);
 
     return Connector(shared_from_this());
diff --git a/src/operator/GenericOperator.cpp b/src/operator/GenericOperator.cpp
index 1e28cf289960dee280457cd6ea119fcc9477cf9f..e0f7cf34a91268c33395dfc94d20c25b4cb0e3d1 100644
--- a/src/operator/GenericOperator.cpp
+++ b/src/operator/GenericOperator.cpp
@@ -45,7 +45,7 @@ Aidge::GenericOperator_Op::GenericOperator_Op(const std::string& type,
 Aidge::GenericOperator_Op::GenericOperator_Op(const Aidge::GenericOperator_Op& op)
     : OperatorTensor(op),
         mForwardDims(op.mForwardDims),
-        mAttributes(op.attributes() ? op.mAttributes : std::make_shared<DynamicAttributes>())
+        mAttributes(std::make_shared<DynamicAttributes>(*op.mAttributes))
 {
     mImpl = std::make_shared<OperatorImpl>(*this, op.backend());
 }
diff --git a/src/operator/MetaOperator.cpp b/src/operator/MetaOperator.cpp
index ae3c3ed6ca85c059204c524f467f5387f656e30b..96c5b219a35a32fb9574eda1a36a8fa4ee502cc4 100644
--- a/src/operator/MetaOperator.cpp
+++ b/src/operator/MetaOperator.cpp
@@ -54,8 +54,31 @@ Aidge::MetaOperator_Op::MetaOperator_Op(const std::string& type, const std::shar
     }
 }
 
+Aidge::MetaOperator_Op::MetaOperator_Op(const MetaOperator_Op& op)
+    : OperatorTensor(op),
+        mGraph(op.mGraph->clone()), // Clone the micro-graph for isolation
+        mAttributes(std::make_shared<DynamicAttributes>(*op.mAttributes)) // Clone attributes
+{
+    // Associate outputs to micro-graph outputs for custom implementation
+    for (size_t outputIdx = 0; outputIdx < mOutputs.size(); ++outputIdx) {
+        const auto& outputOp = mGraph->getOrderedOutputs()[outputIdx];
+        if (outputOp.first) {
+            mOutputs[outputIdx] = std::dynamic_pointer_cast<Tensor>(outputOp.first->getOperator()->getRawOutput(outputOp.second));
+        }
+    }
+
+    // Attributes are already cloned.
+}
+
 std::shared_ptr<Aidge::Operator> Aidge::MetaOperator_Op::clone() const {
-    return std::make_shared<MetaOperator_Op>(type(), mGraph->clone());
+    auto metaOp = std::make_shared<MetaOperator_Op>(*this);
+    if (mImpl) {
+        // Only setBackend() is mImpl is not nullptr.
+        // The inner-graph backend is already set in MetaOperator_Op copy
+        // construtor, when the graph is cloned.
+        metaOp->setBackend(mImpl->backend());
+    }
+    return metaOp;
 }
 
 void Aidge::MetaOperator_Op::associateInput(const IOIndex_t inputIdx, const std::shared_ptr<Data>& data) {
diff --git a/src/operator/MetaOperatorDefs/LSTM.cpp b/src/operator/MetaOperatorDefs/LSTM.cpp
index 22c0469b34b52670a910f63604d02f3f8bf6eab7..c7fbe8a16aa727782b4d1b8ecb0b6d8a29c50a86 100644
--- a/src/operator/MetaOperatorDefs/LSTM.cpp
+++ b/src/operator/MetaOperatorDefs/LSTM.cpp
@@ -23,11 +23,8 @@
 
 namespace Aidge {
 
-std::shared_ptr<Node> LSTM(const DimSize_t inChannel,
-                           const DimSize_t hiddenChannel,
-                           const DimSize_t seqLength,
-                           bool noBias,
-                           const std::string& name)
+std::shared_ptr<MetaOperator_Op> LSTM_Op(const DimSize_t seqLength,
+                                         const std::string& name)
 {
     // Construct micro-graph
     auto input = Identity((!name.empty()) ? name + "_input" : "");
@@ -113,7 +110,18 @@ std::shared_ptr<Node> LSTM(const DimSize_t inChannel,
         {hiddenState, 1}, {cellState, 1}});
     microGraph->setOrderedOutputs({{hiddenState, 0}, {cellState, 0}});
 
-    auto metaOp = MetaOperator("LSTM", microGraph, {}, name);
+    return std::make_shared<MetaOperator_Op>("LSTM", microGraph);
+}
+
+std::shared_ptr<Node> LSTM(const DimSize_t inChannel,
+                           const DimSize_t hiddenChannel,
+                           const DimSize_t seqLength,
+                           bool noBias,
+                           const std::string& name)
+{
+    auto op = LSTM_Op(seqLength, name);
+    auto metaOp = std::make_shared<Node>(op, name);
+    op->setUpperNode(metaOp);
     addProducer(metaOp, 1, {hiddenChannel, inChannel}, "wi");
     addProducer(metaOp, 2, {hiddenChannel, inChannel}, "wo");
     addProducer(metaOp, 3, {hiddenChannel, inChannel}, "wf");
@@ -135,93 +143,4 @@ std::shared_ptr<Node> LSTM(const DimSize_t inChannel,
     return metaOp;
 }
 
-std::shared_ptr<MetaOperator_Op> LSTM_Op(const DimSize_t seqLength)
-{
-    // Construct micro-graph
-    auto input = Identity("");
-    auto hiddenState = Memorize(seqLength, "");
-    auto cellState = Memorize(seqLength, "");
-    auto add = Add("");
-
-    // Forget gate
-    auto forgetGateX = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
-    input->addChild(forgetGateX, 0, 0);
-    auto forgetGateH = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
-    hiddenState->addChild(forgetGateH, 1, 0);
-    auto forgetGate = Add("");
-    forgetGateX->addChild(forgetGate, 0, 0);
-    forgetGateH->addChild(forgetGate, 0, 1);
-    auto forgetGateAct = Sigmoid("");
-    auto forgetGateMul = Mul("");
-    forgetGate->addChild(forgetGateAct, 0, 0);
-    forgetGateAct->addChild(forgetGateMul, 0, 0);
-    forgetGateMul->addChild(add, 0, 0);
-    cellState->addChild(forgetGateMul, 1, 1);
-
-    // Input gate
-    auto inputGateX = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
-    input->addChild(inputGateX, 0, 0);
-    auto inputGateH = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
-    hiddenState->addChild(inputGateH, 1, 0);
-    auto inputGate = Add("");
-    inputGateX->addChild(inputGate, 0, 0);
-    inputGateH->addChild(inputGate, 0, 1);
-    auto inputGateAct = Sigmoid("");
-    auto inputGateMul = Mul("");
-    inputGate->addChild(inputGateAct, 0, 0);
-    inputGateAct->addChild(inputGateMul, 0, 0);
-    inputGateMul->addChild(add, 0, 1);
-
-    // Candidate for cell update
-    auto cellCandidateX = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
-    input->addChild(cellCandidateX, 0, 0);
-    auto cellCandidateH = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
-    hiddenState->addChild(cellCandidateH, 1, 0);
-    auto cellCandidate = Add("");
-    cellCandidateX->addChild(cellCandidate, 0, 0);
-    cellCandidateH->addChild(cellCandidate, 0, 1);
-    auto cellCandidateAct = Tanh("");
-    cellCandidate->addChild(cellCandidateAct, 0, 0);
-    cellCandidateAct->addChild(inputGateMul, 0, 1);
-
-    // Output gate
-    auto outputGateX = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
-    input->addChild(outputGateX, 0, 0);
-    auto outputGateH = std::make_shared<Node>(std::make_shared<FC_Op>(), "");
-    hiddenState->addChild(outputGateH, 1, 0);
-    auto outputGate = Add("");
-    outputGateX->addChild(outputGate, 0, 0);
-    outputGateH->addChild(outputGate, 0, 1);
-    auto outputGateAct = Sigmoid("");
-    auto outputGateMul = Mul("");
-    outputGate->addChild(outputGateAct, 0, 0);
-    outputGateAct->addChild(outputGateMul, 0, 0);
-
-    // Updated cell state to help determine new hidden state
-    auto cellUpdatedAct = Tanh("");
-    add->addChild(cellUpdatedAct, 0, 0);
-    cellUpdatedAct->addChild(outputGateMul, 0, 1);
-    outputGateMul->addChild(hiddenState, 0, 0);
-    add->addChild(cellState, 0, 0);
-
-    std::shared_ptr<GraphView> microGraph = std::make_shared<GraphView>();
-    microGraph->add(input);
-    microGraph->add({hiddenState, cellState, add,
-        forgetGateX, forgetGateH, forgetGate, forgetGateAct, forgetGateMul,
-        inputGateX, inputGateH, inputGate, inputGateAct, inputGateMul,
-        cellCandidateX, cellCandidateH, cellCandidate, cellCandidateAct,
-        outputGateX, outputGateH, outputGate, outputGateAct, outputGateMul,
-        cellUpdatedAct}, false);
-
-    microGraph->setOrderedInputs({{input, 0},
-        {inputGateX, 1}, {outputGateX, 1}, {forgetGateX, 1}, {cellCandidateX, 1},
-        {inputGateH, 1}, {outputGateH, 1}, {forgetGateH, 1}, {cellCandidateH, 1},
-        {inputGateX, 2}, {outputGateX, 2}, {forgetGateX, 2}, {cellCandidateX, 2},
-        {inputGateH, 2}, {outputGateH, 2}, {forgetGateH, 2}, {cellCandidateH, 2},
-        {hiddenState, 1}, {cellState, 1}});
-    microGraph->setOrderedOutputs({{hiddenState, 0}, {cellState, 0}});
-
-    return std::make_shared<MetaOperator_Op>("LSTM", microGraph);
-}
-
 } // namespace Aidge
diff --git a/src/utils/Log.cpp b/src/utils/Log.cpp
index b4c64d527d361db31e46c6b6dbe6d3b4ebf6765b..f4bc32e8b70637738b14c684603b71a036e74fb6 100644
--- a/src/utils/Log.cpp
+++ b/src/utils/Log.cpp
@@ -24,6 +24,8 @@ namespace Aidge {
  * @brief Initialize console log level from environment. If compile mode is
  * DEBUG, then the default level is Log::Level::Debug, else it is
  * Log::Level::Notice.
+ *
+ * WARNING: Do not use this variable directly, use getConsoleLevel() instead.
  */
 Log::Level Log::mConsoleLevel = []() {
 #ifndef NDEBUG
@@ -58,7 +60,7 @@ bool Log::mConsoleColor = []() {
  */
 Log::Level Log::mFileLevel = []() {
 #ifndef NDEBUG
-    constexpr Level defaultLevel = Level::Debug;
+    constexpr Log::Level defaultLevel = Level::Debug;
 #else
     constexpr Log::Level defaultLevel = Level::Notice;
 #endif
@@ -102,8 +104,13 @@ void Log::log(Level level, const std::string& msg) {
         while (start < text.size()) {
             std::size_t lineWidth = 0;
             std::size_t current = start;
-
-            while (current < text.size() && lineWidth < width) {
+            bool inPath = false;
+            while (current < text.size() && (lineWidth < width || inPath)) {
+                if (inPath){
+                    if (text[current] == ' ' || text[current] == '\n'){
+                        inPath = false;
+                    }
+                }
                 if (text[current] == '\033') {
                     // Found ANSI escape sequence, skip until 'm'
                     std::size_t ansiEnd = text.find('m', current);
@@ -119,6 +126,9 @@ void Log::log(Level level, const std::string& msg) {
                     // Handle explicit line break
                     break;
                 } else {
+                    if(!inPath && (text[current] == '/' || text[current] == '\\')) {
+                        inPath = true;
+                    }
                     // Normal character, increase line width
                     ++lineWidth;
                     ++current;
@@ -162,9 +172,9 @@ void Log::log(Level level, const std::string& msg) {
     // Get the string representation of the log level
     const auto levelStr = EnumStrings<Level>::data[static_cast<std::size_t>(level)];
     const std::size_t levelIndentSizes[6] = {10, 9, 11, 12, 10, 10};
-    const std::size_t width = 80 - levelIndentSizes[static_cast<std::size_t>(level)];
+    const std::size_t width = 100 - levelIndentSizes[static_cast<std::size_t>(level)];
 
-    if (level >= mConsoleLevel) {
+    if (level >= getConsoleLevel()) {
         for (const auto& context : mContext) {
             fmt::println("Context: {}", context);
         }