diff --git a/include/aidge/scheduler/ProdConso.hpp b/include/aidge/scheduler/ProdConso.hpp
index cfc83cbf91cb7eeef2a3bbb0a4c5017a2480fe9b..f30e00afa082658fce1eca8b4cb885e1b23fb7c7 100644
--- a/include/aidge/scheduler/ProdConso.hpp
+++ b/include/aidge/scheduler/ProdConso.hpp
@@ -12,6 +12,7 @@
 #ifndef AIDGE_SCHEDULER_PRODCONSO_H_
 #define AIDGE_SCHEDULER_PRODCONSO_H_
 
+#include <memory>
 #include <string>
 #include <vector>
 
diff --git a/src/graph/GraphView.cpp b/src/graph/GraphView.cpp
index f7390facd0020bfe2708fd46c858263787acbe89..6220f5558d073f077db632960cff8be10b027bb0 100644
--- a/src/graph/GraphView.cpp
+++ b/src/graph/GraphView.cpp
@@ -438,23 +438,32 @@ void Aidge::GraphView::compile(const std::string& backend, const Aidge::DataType
 }
 
 bool Aidge::GraphView::forwardDims(const std::vector<std::vector<Aidge::DimSize_t>>& dims, bool allowDataDependency) {
+    Log::debug("Starting dimension forward propagation for GraphView");
     // remove current Data connections and use dummy inputs to propagate dimensions
     // setInputs
     // Link every tensor to the right pointer
     // following parent - children information
     if (!dims.empty()){
-      Log::debug("forwardDims(): setting graph input dims ({} dims provided).", dims.size());
+      auto msg = fmt::format("Manually setting GraphView input dims with provided parameters:");
+      for (std::size_t i = 0; i< dims.size(); ++i)
+        msg = fmt::format("{}\n\t* input#{} {}", msg, i, dims[i]);
+      Log::info("{}", msg);
 
+      Log::debug("Validating input dimensions against existing graph inputs");
       std::size_t i = 0;
       for (auto& input : mInputNodes) {
         const auto& currentTensorPtr =
             std::dynamic_pointer_cast<OperatorTensor>(input.first->getOperator())->getInput(input.second);
         if (i < dims.size() && !dims[i].empty()) {
           if (currentTensorPtr) { // tensor detected
-              AIDGE_ASSERT(currentTensorPtr->dims() == dims[i],
-                "forwardDims(): mismatch between existing and provided size for graph input#{} (existing size: {}, provided size: {})",
-                i, currentTensorPtr->dims(), dims[i])
+            if (currentTensorPtr->dims() != dims[i]) {
+              Log::error("Dimension mismatch for input#{} - Expected: {}, Provided: {}",
+                            i, currentTensorPtr->dims(), dims[i]);
+                return false;
+              }
+              Log::debug("Input#{} dimensions match existing tensor", i);
           } else {
+              Log::debug("Creating new tensor for input#{} with dims {}", i, dims[i]);
               auto tensor = std::make_shared<Tensor>(dims[i]);
               input.first->getOperator()->setInput(input.second, tensor);
           }
@@ -464,12 +473,12 @@ bool Aidge::GraphView::forwardDims(const std::vector<std::vector<Aidge::DimSize_
             || input.first->inputCategory(input.second) == InputCategory::OptionalParam);
 
           if (currentTensorPtr) {
-            Log::debug("forwardDims(): existing dims are {} for graph input#{} for input#{} of node {} (of type {})",
-              i, input.second, input.first->name(), input.first->type(), currentTensorPtr->dims());
+            Log::debug("Using existing dimensions {} for graph input#{} (matching input#{} of node [\033[1m\033[3m{}\033[0m] - [\033[1m\033[3m{}\033[0m])",
+                    currentTensorPtr->dims(), i, input.second, input.first->name(), input.first->type());
           }
           else if (!optional) {
-            Log::warn("forwardDims(): did not specify dims for mandatory graph input#{} for input#{} of node {} (of type {})",
-              i, input.second, input.first->name(), input.first->type());
+            Log::warn("Missing dimensions for mandatory graph input#{} (matching input#{} of node [\033[1m\033[3m{}\033[0m] - [\033[1m\033[3m{}\033[0m])",
+                    i, input.second, input.first->name(), input.first->type());
           }
         }
         ++i;
@@ -477,29 +486,35 @@ bool Aidge::GraphView::forwardDims(const std::vector<std::vector<Aidge::DimSize_
     }
 
     // Ensure every node in the graph is correctly connected
+    Log::debug("Verifying graph connections and tensor validity");
     for (std::shared_ptr<Node> nodePtr : getNodes()) {
         for (IOIndex_t i = 0; i < nodePtr->nbInputs(); ++i) {
             std::pair<std::shared_ptr<Node>, IOIndex_t> inputI = nodePtr->input(i);
             if (inputI.first) {
-                // Check that associated Data are properly connected...
-                AIDGE_ASSERT(nodePtr->getOperator()->getRawInput(i) == inputI.first->getOperator()->getRawOutput(inputI.second),
-                  "Input#{} for node {} ({}) is not properly connected to output#{} of node {} ({}): Data or Tensor mismatch!",
-                    i, nodePtr->name(), nodePtr->type(), inputI.second, inputI.first->name(), inputI.first->type());
-            } else if (nodePtr->inputCategory(i) != InputCategory::OptionalData && nodePtr->inputCategory(i) != InputCategory::OptionalParam) {
-                // Input is missing
-                AIDGE_ASSERT(nodePtr->getOperator()->getRawInput(i),
-                  "Missing input#{} for node {} ({})", i, nodePtr->name(), nodePtr->type());
-                AIDGE_ASSERT(!std::static_pointer_cast<Tensor>(nodePtr->getOperator()->getRawInput(i))->undefined(),
-                  "Undefined input#{} for node {} ({})", i, nodePtr->name(), nodePtr->type());
+                if (nodePtr->getOperator()->getRawInput(i) != inputI.first->getOperator()->getRawOutput(inputI.second)) {
+                    Log::error("Connection mismatch: Input#{} of node [\033[1m\033[3m{}\033[0m (\033[1m\033[3m{}\033[0m)] -> Output#{} of node [\033[1m\033[3m{}\033[0m - (\033[1m\033[3m{}\033[0m)]",
+                        i, nodePtr->name(), nodePtr->type(), inputI.second, inputI.first->name(), inputI.first->type());
+                    return false;
+                }
+            } else if (nodePtr->inputCategory(i) != InputCategory::OptionalData &&
+                    nodePtr->inputCategory(i) != InputCategory::OptionalParam) {
+                if (!nodePtr->getOperator()->getRawInput(i)) {
+                    Log::error("Missing mandatory input#{} for node [\033[1m\033[3m{}\033[0m - (\033[1m\033[3m{}\033[0m)]",
+                        i, nodePtr->name(), nodePtr->type());
+                    return false;
+                }
+                if (std::static_pointer_cast<Tensor>(nodePtr->getOperator()->getRawInput(i))->undefined()) {
+                    Log::error("Undefined mandatory input#{} for node [\033[1m\033[3m{}\033[0m - (\033[1m\033[3m{}\033[0m)]",
+                        i, nodePtr->name(), nodePtr->type());
+                    return false;
+                }
             }
-
         }
     }
 
-    // List of nodes that are already dims forwarded
-    std::set<std::shared_ptr<Node>> dimsForwarded;
-    // Establish initial list of dims forwardable nodes:
-    // input nodes and childs from Producers
+    Log::debug("Initializing dimension propagation");
+    // Establish initial list of dims forwardable nodes: graph input node + Producers childs
+    std::set<std::shared_ptr<Node>> dimsForwarded; ///< List of nodes that are already dims forwarded
     std::set<std::shared_ptr<Node>> listNodes = inputNodes();
     for (const auto& nodePtr : getNodes()) {
         if (nodePtr->type() == Producer_Op::Type) {
diff --git a/src/graph/Matching.cpp b/src/graph/Matching.cpp
index cc13308282eba562ee894637d73d1ad790a246c6..282ed20207f3c91637b8778c77ed1c97d11b0ee9 100644
--- a/src/graph/Matching.cpp
+++ b/src/graph/Matching.cpp
@@ -24,6 +24,7 @@
 
 #include "aidge/graph/GraphView.hpp"
 #include "aidge/graph/Node.hpp"
+#include "aidge/utils/Log.hpp"
 
 static void removeLeadingWhitespace(std::string& str) {
     str.erase(str.begin(),
@@ -84,7 +85,7 @@ std::set<Aidge::SinglePassGraphMatching::MatchingResult> Aidge::SinglePassGraphM
     if (disjoint) {
         matches = filterLonguestDisjoint(matches);
     }
-
+    Log::info("Graph matching complete.\nFound {} matches for the query", matches.size());
     return matches;
 }
 
diff --git a/src/recipes/FuseBatchNorm.cpp b/src/recipes/FuseBatchNorm.cpp
index 55be9636f4596b0deeb81d0174b717a91ff76644..0fd9d7b4429412c52cfbfd57c581eed19f242169 100644
--- a/src/recipes/FuseBatchNorm.cpp
+++ b/src/recipes/FuseBatchNorm.cpp
@@ -193,8 +193,8 @@ void Aidge::fuseBatchNorm(std::shared_ptr<Aidge::GraphView> graphView) {
     auto matches = SinglePassGraphMatching(graphView).match("(Conv2D|ConvDepthWise2D|PaddedConv2D|PaddedConvDepthWise2D)->BatchNorm2D");
 
     for (auto match : matches) {
-        fmt::println("Match !");
         auto rootNode = match.graph->rootNode();
         fuseBatchNorm(rootNode, *rootNode->getChildren().begin());
     }
+    Log::info("[\033[1m\033[3mFuseBatchNorm\033[0m recipe completed.");
 }
diff --git a/src/scheduler/ProdConso.cpp b/src/scheduler/ProdConso.cpp
index a3bff53c3643a5da361dec5944f47a27f148a995..0e20796febb86f7e0e795f905222aa621609cc56 100644
--- a/src/scheduler/ProdConso.cpp
+++ b/src/scheduler/ProdConso.cpp
@@ -9,13 +9,19 @@
  *
  ********************************************************************************/
 
-#include <cassert>
-#include <string>
-
 #include "aidge/scheduler/ProdConso.hpp"
+
+#include <algorithm>  // std::fill
+#include <cstddef>  // std::size_t
+#include <memory>
+#include <vector>
+
+#include "aidge/data/Elts.hpp"
 #include "aidge/operator/Operator.hpp"
+#include "aidge/data/Data.hpp"
 #include "aidge/data/Tensor.hpp"
 #include "aidge/utils/ErrorHandling.hpp"
+#include "aidge/utils/Types.h"
 
 Aidge::ProdConso::ProdConso(const Operator& op, bool inPlace):
     mOp(op),