From cfc1e933ac185418acc120781c4541ce471c721f Mon Sep 17 00:00:00 2001
From: cmoineau <cyril.moineau@cea.fr>
Date: Fri, 10 Jan 2025 16:15:38 +0000
Subject: [PATCH 01/14] Update ConstantFolding recipes with arg to consider
 Shape as constant.

---
 include/aidge/recipes/Recipes.hpp         |  8 +++++++-
 python_binding/recipes/pybind_Recipes.cpp |  8 ++++++++
 src/recipes/ConstantFolding.cpp           | 22 ++++++++++++----------
 3 files changed, 27 insertions(+), 11 deletions(-)

diff --git a/include/aidge/recipes/Recipes.hpp b/include/aidge/recipes/Recipes.hpp
index b0bc6dcef..f019eb51c 100644
--- a/include/aidge/recipes/Recipes.hpp
+++ b/include/aidge/recipes/Recipes.hpp
@@ -22,7 +22,13 @@
 
 namespace Aidge {
 
-void constantFolding(std::shared_ptr<GraphView> graph);
+/**
+ * @brief Retrieve part of the graph that can be pre-computed and replace them by a Producer.
+ *
+ * @param graph Graph to fold the constant
+ * @param constant_shape If true Shape operators are considered to be constant
+ */
+void constantFolding(std::shared_ptr<GraphView> graph, bool constantShape=false);
 
 // FUSE MATMUL + ADD -> FC
 
diff --git a/python_binding/recipes/pybind_Recipes.cpp b/python_binding/recipes/pybind_Recipes.cpp
index 500367cb8..06f98ad4f 100644
--- a/python_binding/recipes/pybind_Recipes.cpp
+++ b/python_binding/recipes/pybind_Recipes.cpp
@@ -25,6 +25,14 @@ namespace Aidge {
 void init_Recipes(py::module &m)
 {
 
+  m.def("constant_folding", static_cast<void(*)(std::shared_ptr<GraphView>, bool)>(constantFolding), py::arg("graph_view"), py::arg("constant_shape") = false, R"mydelimiter(
+    Retrieve part of the graph that can be pre-computed and replace them by a Producer.
+
+    :param graph_view: Graph view on which we want to apply the recipe
+    :type graph_view: :py:class:`aidge_core.GraphView`
+    :param constant_shape: If true, ``Shape`` operator are considered constant, default=False
+    :type constant_shape: bool, optional
+    )mydelimiter");
 
   m.def("matmul_to_fc", static_cast<void(*)(std::shared_ptr<GraphView>)>(matMulToFC), py::arg("graph_view"), R"mydelimiter(
     Recipe to Fuse MatMul and Add operators into an :py:class:`aidge_core.FC` operator.
diff --git a/src/recipes/ConstantFolding.cpp b/src/recipes/ConstantFolding.cpp
index 613393756..031e448cf 100644
--- a/src/recipes/ConstantFolding.cpp
+++ b/src/recipes/ConstantFolding.cpp
@@ -17,17 +17,18 @@
 #include "aidge/graph/GraphView.hpp"
 #include "aidge/graph/Node.hpp"
 #include "aidge/operator/Producer.hpp"
+#include "aidge/operator/Shape.hpp"
 #include "aidge/recipes/Recipes.hpp"
 #include "aidge/utils/ErrorHandling.hpp"
 #include "aidge/utils/Types.h"
 
-void Aidge::constantFolding(std::shared_ptr<GraphView> graph) {
+void Aidge::constantFolding(std::shared_ptr<GraphView> graph, bool constantShape) {
     bool folded;
     do {
         folded = false;
         std::set<std::shared_ptr<Node>> candidates;
         for (const std::shared_ptr<Node>& nodePtr : graph->getNodes()) {
-            if (nodePtr->type() == Producer_Op::Type) {
+            if (nodePtr->type() == Producer_Op::Type || (constantShape && (nodePtr->type() != Shape_Op::Type))) {
                 const auto& childs = nodePtr->getChildren();
                 candidates.insert(childs.begin(), childs.end());
             }
@@ -39,17 +40,18 @@ void Aidge::constantFolding(std::shared_ptr<GraphView> graph) {
             size_t i = 0;
             for (const auto& input : node->inputs()) {
                 if (input.first) {
-                    if (input.first->type() != Producer_Op::Type) {
+                    if (input.first->type() != Producer_Op::Type || (constantShape && (input.first->type() != Shape_Op::Type))) {
                         foldable = false;
                         break;
                     }
-
-                    const auto& producer = std::static_pointer_cast<Producer_Op>(input.first->getOperator());
-                    if (!producer->constant()) {
-                        Log::info("Node {} (of type {}) not foldable because Producer input {} not Constant",
-                            node->name(), node->type(), input.first->name());
-                        foldable = false;
-                        break;
+                    if (input.first->type() == Producer_Op::Type){
+                        const auto& producer = std::static_pointer_cast<Producer_Op>(input.first->getOperator());
+                        if (!producer->constant()) {
+                            Log::info("Node {} (of type {}) not foldable because Producer input {} not Constant",
+                                node->name(), node->type(), input.first->name());
+                            foldable = false;
+                            break;
+                        }
                     }
 
                     replaceGraph->add(input.first, false);
-- 
GitLab


From d20e0aca9195c796e3d8c17d9f6f51b48ba432fb Mon Sep 17 00:00:00 2001
From: cmoineau <cyril.moineau@cea.fr>
Date: Thu, 27 Feb 2025 09:12:42 +0000
Subject: [PATCH 02/14] Constant folding now return true if graph has been
 modified + fix behavior when constantShape is True.

---
 include/aidge/recipes/Recipes.hpp         |  3 ++-
 python_binding/recipes/pybind_Recipes.cpp |  4 +++-
 src/recipes/ConstantFolding.cpp           | 25 ++++++++++++++++++++---
 3 files changed, 27 insertions(+), 5 deletions(-)

diff --git a/include/aidge/recipes/Recipes.hpp b/include/aidge/recipes/Recipes.hpp
index f019eb51c..cf428e558 100644
--- a/include/aidge/recipes/Recipes.hpp
+++ b/include/aidge/recipes/Recipes.hpp
@@ -27,8 +27,9 @@ namespace Aidge {
  *
  * @param graph Graph to fold the constant
  * @param constant_shape If true Shape operators are considered to be constant
+ * @return bool True if the graph has been modified
  */
-void constantFolding(std::shared_ptr<GraphView> graph, bool constantShape=false);
+bool constantFolding(std::shared_ptr<GraphView> graph, bool constantShape=false);
 
 // FUSE MATMUL + ADD -> FC
 
diff --git a/python_binding/recipes/pybind_Recipes.cpp b/python_binding/recipes/pybind_Recipes.cpp
index 06f98ad4f..0c9f86fe7 100644
--- a/python_binding/recipes/pybind_Recipes.cpp
+++ b/python_binding/recipes/pybind_Recipes.cpp
@@ -25,13 +25,15 @@ namespace Aidge {
 void init_Recipes(py::module &m)
 {
 
-  m.def("constant_folding", static_cast<void(*)(std::shared_ptr<GraphView>, bool)>(constantFolding), py::arg("graph_view"), py::arg("constant_shape") = false, R"mydelimiter(
+  m.def("constant_folding", static_cast<bool(*)(std::shared_ptr<GraphView>, bool)>(constantFolding), py::arg("graph_view"), py::arg("constant_shape") = false, R"mydelimiter(
     Retrieve part of the graph that can be pre-computed and replace them by a Producer.
 
     :param graph_view: Graph view on which we want to apply the recipe
     :type graph_view: :py:class:`aidge_core.GraphView`
     :param constant_shape: If true, ``Shape`` operator are considered constant, default=False
     :type constant_shape: bool, optional
+    :return: True if the graph has been modified
+    :rtype: bool
     )mydelimiter");
 
   m.def("matmul_to_fc", static_cast<void(*)(std::shared_ptr<GraphView>)>(matMulToFC), py::arg("graph_view"), R"mydelimiter(
diff --git a/src/recipes/ConstantFolding.cpp b/src/recipes/ConstantFolding.cpp
index 031e448cf..05c92afb3 100644
--- a/src/recipes/ConstantFolding.cpp
+++ b/src/recipes/ConstantFolding.cpp
@@ -22,7 +22,9 @@
 #include "aidge/utils/ErrorHandling.hpp"
 #include "aidge/utils/Types.h"
 
-void Aidge::constantFolding(std::shared_ptr<GraphView> graph, bool constantShape) {
+bool Aidge::constantFolding(std::shared_ptr<GraphView> graph, bool constantShape) {
+    bool modified = false;
+    Log::info("Running constant folding on graph {}", graph->name());
     bool folded;
     do {
         folded = false;
@@ -35,19 +37,32 @@ void Aidge::constantFolding(std::shared_ptr<GraphView> graph, bool constantShape
         }
 
         for (const auto& node : candidates) {
+            Log::debug("Checking if node {} (of type {}) is foldable", node->name(), node->type());
             bool foldable = true;
             auto replaceGraph = std::make_shared<GraphView>();
             size_t i = 0;
             for (const auto& input : node->inputs()) {
                 if (input.first) {
-                    if (input.first->type() != Producer_Op::Type || (constantShape && (input.first->type() != Shape_Op::Type))) {
+                    if (!(input.first->type() == Producer_Op::Type || (constantShape && (input.first->type() == Shape_Op::Type)))) {
+                        Log::debug("Input {} of node {} (of type {}) not foldable, because {} (of type {}) is not a constant. With constant = {}",
+                            i, node->name(), node->type(), input.first->name(), input.first->type(), constantShape);
                         foldable = false;
                         break;
                     }
+
+                    if (constantShape && (input.first->type() == Shape_Op::Type)){
+                        if (!std::static_pointer_cast<OperatorTensor>(input.first->getOperator())->dimsForwarded()){
+                            Log::debug("Node {} (of type {}) not foldable because Shape input [{}] {} dims has not been forwarded",
+                                node->name(), node->type(), i, input.first->name());
+                            foldable = false;
+                            break;
+                        }
+                    }
+
                     if (input.first->type() == Producer_Op::Type){
                         const auto& producer = std::static_pointer_cast<Producer_Op>(input.first->getOperator());
                         if (!producer->constant()) {
-                            Log::info("Node {} (of type {}) not foldable because Producer input {} not Constant",
+                            Log::debug("Node {} (of type {}) not foldable because Producer input {} not Constant",
                                 node->name(), node->type(), input.first->name());
                             foldable = false;
                             break;
@@ -59,6 +74,8 @@ void Aidge::constantFolding(std::shared_ptr<GraphView> graph, bool constantShape
                 else if (node->inputCategory(i) != InputCategory::OptionalData
                     && node->inputCategory(i) != InputCategory::OptionalParam)
                 {
+                    Log::debug("Input {} of node {} (of type {}) is mandatory but not set, cannot fold.",
+                        i, node->name(), node->type());
                     foldable = false;
                     break;
                 }
@@ -84,6 +101,7 @@ void Aidge::constantFolding(std::shared_ptr<GraphView> graph, bool constantShape
 
                 if (GraphView::replace(replaceGraph, prodGraph)) {
                     folded = true;
+                    modified = true;
                 }
                 else {
                     Log::warn("Error with replace when folding node {} (of type {})",
@@ -93,4 +111,5 @@ void Aidge::constantFolding(std::shared_ptr<GraphView> graph, bool constantShape
         }
     }
     while (folded);
+    return modified;
 }
-- 
GitLab


From fd8534670497869ca3e611dc713afe1e5d144663 Mon Sep 17 00:00:00 2001
From: cmoineau <cyril.moineau@cea.fr>
Date: Thu, 27 Feb 2025 09:24:24 +0000
Subject: [PATCH 03/14] Adapt forwardDims to also fold shape if
 shapeAsConstant=True.

---
 include/aidge/graph/GraphView.hpp         |  3 +-
 python_binding/graph/pybind_GraphView.cpp |  4 +-
 src/graph/GraphView.cpp                   | 53 ++++++++++++++++++++---
 3 files changed, 51 insertions(+), 9 deletions(-)

diff --git a/include/aidge/graph/GraphView.hpp b/include/aidge/graph/GraphView.hpp
index 8a78d8bfc..8ac05b5b8 100644
--- a/include/aidge/graph/GraphView.hpp
+++ b/include/aidge/graph/GraphView.hpp
@@ -291,9 +291,10 @@ public:
      *
      * @param dims Vector of dimension vectors for graph inputs. Empty by default.
      * @param allowDataDependency Whether to allow data-dependent dimension computation. False by default.
+     * @param shapeAsConstant If true treat shape as constant and fold the graph, this implies that the graph may change if part of the graph are foldable. False by default.
      * @return true if dimension propagation succeeded, false otherwise.
      */
-    bool forwardDims(const std::vector<std::vector<DimSize_t>>& dims = {}, bool allowDataDependency = false);
+    bool forwardDims(const std::vector<std::vector<DimSize_t>>& dims = {}, bool allowDataDependency = false, bool shapeAsConstant = false);
 
     /** @brief Set the same backend for each Operator of the GraphView object's Nodes. */
     void setBackend(const std::string& backend, const DeviceIdx_t device = 0) const;
diff --git a/python_binding/graph/pybind_GraphView.cpp b/python_binding/graph/pybind_GraphView.cpp
index 31e3a0099..ec4119e32 100644
--- a/python_binding/graph/pybind_GraphView.cpp
+++ b/python_binding/graph/pybind_GraphView.cpp
@@ -128,7 +128,7 @@ void init_GraphView(py::module& m) {
           .def("clone", &GraphView::clone)
           .def("get_nodes", &GraphView::getNodes)
           .def("get_node", &GraphView::getNode, py::arg("node_name"))
-          .def("forward_dims", &GraphView::forwardDims, py::arg("dims")=std::vector<std::vector<DimSize_t>>(), py::arg("allow_data_dependency") = false,
+          .def("forward_dims", &GraphView::forwardDims, py::arg("dims")=std::vector<std::vector<DimSize_t>>(), py::arg("allow_data_dependency") = false, py::arg("shape_as_constant") = false,
           R"mydelimiter(
             Compute and propagate Tensor dimensions through the GraphView.
 
@@ -167,6 +167,8 @@ void init_GraphView(py::module& m) {
                 Vector of dimension vectors for graph inputs. Empty by default.
             allow_data_dependency : bool, optional
                 Whether to allow data-dependent dimension computation, by default False
+            shape_as_constant : bool, optional
+                If True, treat shape as constant and fold the graph. This implies that the graph may change if part of the graph are foldable, by default False.
 
             Returns
             -------
diff --git a/src/graph/GraphView.cpp b/src/graph/GraphView.cpp
index b372fd392..0e7951fe8 100644
--- a/src/graph/GraphView.cpp
+++ b/src/graph/GraphView.cpp
@@ -28,10 +28,12 @@
 
 #include "aidge/data/Tensor.hpp"
 #include "aidge/operator/GenericOperator.hpp"
+#include "aidge/operator/Memorize.hpp"
 #include "aidge/operator/MetaOperator.hpp"
 #include "aidge/operator/OperatorTensor.hpp"
 #include "aidge/operator/Producer.hpp"
-#include "aidge/operator/Memorize.hpp"
+#include "aidge/operator/Shape.hpp"
+#include "aidge/recipes/Recipes.hpp" // constantFolding
 #include "aidge/utils/Directories.hpp"
 #include "aidge/utils/FileManagement.hpp"
 #include "aidge/utils/ErrorHandling.hpp"
@@ -443,7 +445,7 @@ void Aidge::GraphView::compile(const std::string& backend, const Aidge::DataType
     forwardDims(dims);
 }
 
-bool Aidge::GraphView::forwardDims(const std::vector<std::vector<Aidge::DimSize_t>>& dims, bool allowDataDependency) {
+bool Aidge::GraphView::forwardDims(const std::vector<std::vector<Aidge::DimSize_t>>& dims, bool allowDataDependency, bool shapeAsConstant) {
     Log::debug("Starting dimension forward propagation for GraphView");
     // remove current Data connections and use dummy inputs to propagate dimensions
     // setInputs
@@ -521,7 +523,7 @@ bool Aidge::GraphView::forwardDims(const std::vector<std::vector<Aidge::DimSize_
     Log::debug("Initializing dimension propagation");
     // Establish initial list of dims forwardable nodes: graph input node + Producers childs
     std::set<std::shared_ptr<Node>> dimsForwarded; ///< List of nodes that are already dims forwarded
-    std::set<std::shared_ptr<Node>> listNodes = inputNodes();
+    std::set<std::shared_ptr<Node>> listNodes = inputNodes(); // list of node to forward dims
     for (const auto& nodePtr : getNodes()) {
         if (nodePtr->type() == Producer_Op::Type) {
             // Producers are already dims forwarded!
@@ -534,13 +536,17 @@ bool Aidge::GraphView::forwardDims(const std::vector<std::vector<Aidge::DimSize_
             }
         }
     }
-
     do {
-        std::set<std::shared_ptr<Node>> nextList;
+        Log::debug("List of node to forward dimensions:");
+        for(auto node : listNodes){
+            Log::debug("\t- Node {} (of type {})", node->name(), node->type());
+        }
+        std::set<std::shared_ptr<Node>> nextList; // future listNodes
         for (const auto& nodePtr : listNodes) {
+            Log::debug("Trying to forward dims of node {} (of type {})", nodePtr->name(), nodePtr->type());
+
             if (nodePtr->getOperator()->operatorType() == OperatorType::Tensor) {
                 const auto op = std::static_pointer_cast<OperatorTensor>(nodePtr->getOperator());
-
                 bool anyParent = false;
                 bool parentsForwarded = true;
                 for (const auto& parent : nodePtr->getParents()) {
@@ -564,6 +570,34 @@ bool Aidge::GraphView::forwardDims(const std::vector<std::vector<Aidge::DimSize_
                     Log::debug("Dimensions forwarded for node {} (of type {})",
                         nodePtr->name(), nodePtr->type());
 
+                    if (shapeAsConstant && (nodePtr->type() == Shape_Op::Type)) {
+                        Log::debug("Trying to constant fold the graph");
+                        // Shape are folded if we don't find back the node in the graph
+                        if(constantFolding(shared_from_this(), true)){
+                            Log::notice("Shape node {} (of type {}) was folded.", nodePtr->name(), nodePtr->type());
+                            Log::debug("Resetting list of nodes to graph inputs");
+                            // Graph was modified during constant folding
+                            // We re-propagate dims starting from the entry of the graph
+                            nextList = inputNodes();
+                            for (const auto& currentNodePtr : getNodes()) {
+                                if (currentNodePtr->type() == Producer_Op::Type) {
+                                    // Producers are already dims forwarded!
+                                    dimsForwarded.insert(currentNodePtr);
+                                    // Producers childs are dims forwardable
+                                    for (const auto& child : currentNodePtr->getChildren()) {
+                                        if (inView(child)) {
+                                            nextList.insert(child);
+                                        }
+                                    }
+                                }
+                            }
+
+                            Log::debug("Breaking loop to restart from the beginning");
+                            break;
+                        }else{
+                            Log::debug("Shape node {} (of type {}) was not folded.", nodePtr->name(), nodePtr->type());
+                        }
+                    }
                     // Recompute every time, even if it was already computed in a
                     // previous call of forwardDims(), as the graph may have changed!
                     dimsForwarded.insert(nodePtr);
@@ -577,12 +611,17 @@ bool Aidge::GraphView::forwardDims(const std::vector<std::vector<Aidge::DimSize_
                     if (parentsForwarded) {
                         Log::debug("Unable to forward dimensions for node {} (of type {})", nodePtr->name(), nodePtr->type());
                     }
+                    Log::debug("Adding back node {} (of type {}) to the list of nodes to forward dimensions", nodePtr->name(), nodePtr->type());
                     nextList.insert(nodePtr);
                 }
             }
+            else {
+                AIDGE_THROW_OR_ABORT(std::runtime_error, "Node {} (of type {}) as it is not an OperatorTensor. ForwardDims is currently only supported for OperatorTensor.", nodePtr->name(), nodePtr->type());
+            }
+            Log::debug("- - - - -");
         }
 
-        Log::debug("********************");
+        Log::debug("Finished treating current list of nodes ...");
 
         // Internal check to make sure we won't enter in an infinite loop!
         if (nextList == listNodes) {
-- 
GitLab


From 02c2276f23e1785063a3b4eddd8e9c4013e0a161 Mon Sep 17 00:00:00 2001
From: cmoineau <cyril.moineau@cea.fr>
Date: Fri, 28 Feb 2025 15:17:27 +0000
Subject: [PATCH 04/14] Update some log messages and fix small typo in Node.hpp

---
 include/aidge/graph/Node.hpp | 6 +++++-
 src/graph/GraphView.cpp      | 5 ++---
 2 files changed, 7 insertions(+), 4 deletions(-)

diff --git a/include/aidge/graph/Node.hpp b/include/aidge/graph/Node.hpp
index 9b16f76d5..15d8aaf4e 100644
--- a/include/aidge/graph/Node.hpp
+++ b/include/aidge/graph/Node.hpp
@@ -326,7 +326,7 @@ public:
    * @param otherInId ID of the other Node input to connect to the current Node.
    * Default to the first available data input.
    *
-   * @note otherNode shared_ptr is passed by refenrece in order to be able to detect
+   * @note otherNode shared_ptr is passed by reference in order to be able to detect
    * possible dangling connection situations in debug using ref counting.
    */
   void addChild(const NodePtr& otherNode,
@@ -507,7 +507,11 @@ private:
    * @param outId
    * @param otherInId
    *
+<<<<<<< HEAD
    * @note otherNode shared_ptr is passed by refenrece in order to be able to detect
+=======
+   * @note otherNode shared_ptr is passed by reference in order to be able to detect
+>>>>>>> 3a88aa7d (Update some log messages and fix small typo in Node.hpp)
    * possible dangling connection situations in debug using ref counting.
    */
   void addChildOp(const NodePtr& otherNode, const IOIndex_t outId,
diff --git a/src/graph/GraphView.cpp b/src/graph/GraphView.cpp
index 0e7951fe8..5d4df27a0 100644
--- a/src/graph/GraphView.cpp
+++ b/src/graph/GraphView.cpp
@@ -574,8 +574,7 @@ bool Aidge::GraphView::forwardDims(const std::vector<std::vector<Aidge::DimSize_
                         Log::debug("Trying to constant fold the graph");
                         // Shape are folded if we don't find back the node in the graph
                         if(constantFolding(shared_from_this(), true)){
-                            Log::notice("Shape node {} (of type {}) was folded.", nodePtr->name(), nodePtr->type());
-                            Log::debug("Resetting list of nodes to graph inputs");
+                            Log::notice("Constant folding worked, resetting list of nodes to graph inputs.");
                             // Graph was modified during constant folding
                             // We re-propagate dims starting from the entry of the graph
                             nextList = inputNodes();
@@ -595,7 +594,7 @@ bool Aidge::GraphView::forwardDims(const std::vector<std::vector<Aidge::DimSize_
                             Log::debug("Breaking loop to restart from the beginning");
                             break;
                         }else{
-                            Log::debug("Shape node {} (of type {}) was not folded.", nodePtr->name(), nodePtr->type());
+                            Log::debug("Constant folding fail to fold any nodes.");
                         }
                     }
                     // Recompute every time, even if it was already computed in a
-- 
GitLab


From d73de8080f43100db336d55cffbdd4ce17cd7f7e Mon Sep 17 00:00:00 2001
From: cmoineau <cyril.moineau@cea.fr>
Date: Fri, 28 Feb 2025 15:23:02 +0000
Subject: [PATCH 05/14] Fix constant folding with shape as constant.

---
 src/recipes/ConstantFolding.cpp | 15 +++++++++++----
 1 file changed, 11 insertions(+), 4 deletions(-)

diff --git a/src/recipes/ConstantFolding.cpp b/src/recipes/ConstantFolding.cpp
index 05c92afb3..40cd30a7d 100644
--- a/src/recipes/ConstantFolding.cpp
+++ b/src/recipes/ConstantFolding.cpp
@@ -30,7 +30,7 @@ bool Aidge::constantFolding(std::shared_ptr<GraphView> graph, bool constantShape
         folded = false;
         std::set<std::shared_ptr<Node>> candidates;
         for (const std::shared_ptr<Node>& nodePtr : graph->getNodes()) {
-            if (nodePtr->type() == Producer_Op::Type || (constantShape && (nodePtr->type() != Shape_Op::Type))) {
+            if (nodePtr->type() == Producer_Op::Type || (constantShape && (nodePtr->type() == Shape_Op::Type))) {
                 const auto& childs = nodePtr->getChildren();
                 candidates.insert(childs.begin(), childs.end());
             }
@@ -44,8 +44,8 @@ bool Aidge::constantFolding(std::shared_ptr<GraphView> graph, bool constantShape
             for (const auto& input : node->inputs()) {
                 if (input.first) {
                     if (!(input.first->type() == Producer_Op::Type || (constantShape && (input.first->type() == Shape_Op::Type)))) {
-                        Log::debug("Input {} of node {} (of type {}) not foldable, because {} (of type {}) is not a constant. With constant = {}",
-                            i, node->name(), node->type(), input.first->name(), input.first->type(), constantShape);
+                        Log::debug("Input {} of node {} (of type {}) not foldable, because {} (of type {}) is not a constant.",
+                            i, node->name(), node->type(), input.first->name(), input.first->type());
                         foldable = false;
                         break;
                     }
@@ -98,7 +98,14 @@ bool Aidge::constantFolding(std::shared_ptr<GraphView> graph, bool constantShape
                     // Add output in right order
                     prodGraph->add(newProd);
                 }
-
+                Log::debug("Trying to replace:");
+                for(auto nodeToReplace: replaceGraph->getNodes()){
+                    Log::debug("\t- {} ({})", nodeToReplace->name(), nodeToReplace->type());
+                }
+                Log::debug("With:");
+                for(auto nodeReplacing: prodGraph->getNodes()){
+                    Log::debug("\t- {} ({})", nodeReplacing->name(), nodeReplacing->type());
+                }
                 if (GraphView::replace(replaceGraph, prodGraph)) {
                     folded = true;
                     modified = true;
-- 
GitLab


From 470d5bed2375951c4bbcb61b902a6ff9627cbba2 Mon Sep 17 00:00:00 2001
From: cmoineau <cyril.moineau@cea.fr>
Date: Fri, 28 Feb 2025 15:25:11 +0000
Subject: [PATCH 06/14] [Replace] Add case when replacing a graph by a Producer
 + add debug log.

---
 src/graph/GraphView.cpp | 18 ++++++++++++++++--
 1 file changed, 16 insertions(+), 2 deletions(-)

diff --git a/src/graph/GraphView.cpp b/src/graph/GraphView.cpp
index 5d4df27a0..91cc0f889 100644
--- a/src/graph/GraphView.cpp
+++ b/src/graph/GraphView.cpp
@@ -1256,6 +1256,7 @@ bool Aidge::GraphView::replace(const std::shared_ptr<GraphView>& oldGraph, const
         for (const auto& nodePtr : newNodes) {
             nodePtr->removeView(newGraph);
         }
+        Log::warn("Discrepancy between the number of input/output of the graph to replace.\n\t- OLD NB INPUTS: {} - NEW NB INPUTS {}\n\t- OLD NB OUTPUTS: {} - NEW NB OUTPUTS {}", oldOIn.size(), newOIn.size(), oldOOut.size(), newOOut.size());
         return false;
     }
 
@@ -1325,14 +1326,27 @@ bool Aidge::GraphView::replace(const std::shared_ptr<GraphView>& oldGraph, const
                     newOOut[o].first -> addChild(child.first, newOOut[o].second, child.second);
                 }
             }
-        }
-        else {
+        } else if ( // for folding case
+          ((newOIn.size() == 0)) &&
+          ((oldOOut.size() == newOOut.size()) && (newOOut.size() == 1))
+        ) {
+            // Case 4
+            // Replace any nodes by a Producer
+            // No need to remove old inputs it is removed later on ...
+            for (std::size_t o = 0; o < oldOOut.size(); ++o) {
+                for (const auto& child : outputChildren[o]) {
+                    newOOut[o].first -> addChild(child.first, newOOut[o].second, child.second);
+                }
+            }
+
+        } else {
             for (const auto& nodePtr : oldNodes) {
                 nodePtr->removeView(oldGraph);
             }
             for (const auto& nodePtr : newNodes) {
                 nodePtr->removeView(newGraph);
             }
+            Log::warn("Could not replace");
             return false;
         }
     }
-- 
GitLab


From 430378f4101728fb3d78bc90d33f9d004580908c Mon Sep 17 00:00:00 2001
From: cmoineau <cyril.moineau@cea.fr>
Date: Fri, 28 Feb 2025 15:36:43 +0000
Subject: [PATCH 07/14] Update replace documentation according to changes.

---
 include/aidge/graph/GraphView.hpp |  3 +++
 src/graph/GraphView.cpp           | 12 ++++++------
 2 files changed, 9 insertions(+), 6 deletions(-)

diff --git a/include/aidge/graph/GraphView.hpp b/include/aidge/graph/GraphView.hpp
index 8ac05b5b8..f6297f790 100644
--- a/include/aidge/graph/GraphView.hpp
+++ b/include/aidge/graph/GraphView.hpp
@@ -523,6 +523,9 @@ public:
      *     - same number of input/output connections in oldNodes, parents and children are linked according
      *       to these connections IDs
      *     - different number of input/output connections in oldNodes => return false
+     * Case 4: newNodes set has no input and one output, oldNodes has any input and one output
+     *     - reconnect output
+     *     - all input are disconnected
      * @param oldNodes
      * @param newNodes
      * @return true replacement has been performed
diff --git a/src/graph/GraphView.cpp b/src/graph/GraphView.cpp
index 91cc0f889..533c9dc25 100644
--- a/src/graph/GraphView.cpp
+++ b/src/graph/GraphView.cpp
@@ -1157,12 +1157,12 @@ void Aidge::GraphView::insertParent(NodePtr childNode,
 
 /**
  * Inputs conditions:
- * |  old    \     new   | 1 node, 1 input  | >1 node, 1 input  | 1 node, >1 inputs  | >1 node, >1 inputs |
- * | ------------------- | ---------------- | ----------------- | ------------------ | ------------------ |
- * | 1 node, 1 input     |     trivial      |      trivial      |     broadcast      |    broadcast       |
- * | >1 node, 1 input    |     trivial      |      trivial      |     broadcast      |    broadcast       |
- * | 1 node, >1 inputs   |   (take first)   |   (take first)    |     same order     |       X            |
- * | >1 node, >1 inputs  |       X          |        X          |         X          |       X            |
+ * |  old    \     new   | 1 node, 1 input  | >1 node, 1 input  | 1 node, >1 inputs  | >1 node, >1 inputs | >=1 node, 0 inputs |
+ * | ------------------- | ---------------- | ----------------- | ------------------ | ------------------ | ------------------ |
+ * | 1 node, 1 input     |     trivial      |      trivial      |     broadcast      |    broadcast       |       trivial      |
+ * | >1 node, 1 input    |     trivial      |      trivial      |     broadcast      |    broadcast       |       trivial      |
+ * | 1 node, >1 inputs   |   (take first)   |   (take first)    |     same order     |       X            |       trivial      |
+ * | >1 node, >1 inputs  |       X          |        X          |         X          |       X            |       trivial      |
  *
  * Outputs conditions:
  * |  old    \     new   | 1 node, 1 output | >1 node, 1 output | 1 node, >1 outputs | >1 node, >1 outputs |
-- 
GitLab


From 52576737cc6666e0391f8e3a06b49c33d5b5aa0d Mon Sep 17 00:00:00 2001
From: cmoineau <cyril.moineau@cea.fr>
Date: Fri, 28 Feb 2025 15:40:06 +0000
Subject: [PATCH 08/14] Add unittest for forward dims with
 shape_as_constant=True

---
 .../test_forward_dims_constant_shape.py       | 104 ++++++++++++++++++
 1 file changed, 104 insertions(+)
 create mode 100644 aidge_core/unit_tests/test_forward_dims_constant_shape.py

diff --git a/aidge_core/unit_tests/test_forward_dims_constant_shape.py b/aidge_core/unit_tests/test_forward_dims_constant_shape.py
new file mode 100644
index 000000000..5490ffdd2
--- /dev/null
+++ b/aidge_core/unit_tests/test_forward_dims_constant_shape.py
@@ -0,0 +1,104 @@
+"""
+Copyright (c) 2023 CEA-List
+
+This program and the accompanying materials are made available under the
+terms of the Eclipse Public License 2.0 which is available at
+http://www.eclipse.org/legal/epl-2.0.
+
+SPDX-License-Identifier: EPL-2.0
+"""
+
+import unittest
+import aidge_core
+import numpy as np
+import aidge_backend_cpu
+
+# class DivImpl(aidge_core.OperatorImpl):
+#     """Div operator implementation to avoid dependency to backend_cpu"""
+
+#     def __init__(self, op: aidge_core.Operator):
+#         aidge_core.OperatorImpl.__init__(self, op, "div")
+#         self.op = op
+#         print("Creating divImpl")
+#     def forward(self):
+#         data_input_0 = np.array(self.op.get_input(0))
+#         data_input_1 = np.array(self.op.get_input(1))
+#         output =  (data_input_0 / data_input_1)
+#         print(output, " = ",  data_input_0, "/", data_input_1)
+#         self.op.set_output(0, aidge_core.Tensor(output)) # setting operator output
+
+# # Note: In this test, except Div, every operator are backend independent
+# aidge_core.register_DivOp("cpu", DivImpl)
+
+class test_forward_dims_constant_shape(unittest.TestCase):
+    """Test forwardDims with shapeAsConstant=True
+    """
+    def setUp(self):
+        # Declaring constant values
+        prod_two_a = aidge_core.Producer(aidge_core.Tensor(np.array(2, dtype=np.int64)), "two_a", constant=True)
+        prod_two_b = aidge_core.Producer(aidge_core.Tensor(np.array(2, dtype=np.int64)), "two_b", constant=True)
+
+        # Declaring operators
+        shape_op_1     = aidge_core.Shape(name="shape_op_1")
+        shape_op_2     = aidge_core.Shape(name="shape_op_2")
+        shape_op_3     = aidge_core.Shape(name="shape_op_3")
+        shape_op_4     = aidge_core.Shape(name="shape_op_4")
+        gather_op_1    = aidge_core.Gather(axis = 0, indices = [0], name="gather_op_1")
+        gather_op_2    = aidge_core.Gather(axis = 0, indices = [1], name="gather_op_2")
+        gather_op_3    = aidge_core.Gather(axis = 0, indices = [2], name="gather_op_3")
+        gather_op_4    = aidge_core.Gather(axis = 0, indices = [3], name="gather_op_4")
+        div_op         = aidge_core.Div(name="div_op")
+
+
+        u_op_1         = aidge_core.Unsqueeze(axes = [0], name="unsqueeze_op_1")
+        u_op_2         = aidge_core.Unsqueeze(axes = [0], name="unsqueeze_op_2")
+        u_op_3         = aidge_core.Unsqueeze(axes = [0], name="unsqueeze_op_3")
+        u_op_4         = aidge_core.Unsqueeze(axes = [0], name="unsqueeze_op_4")
+        u_op_5         = aidge_core.Unsqueeze(axes = [0], name="unsqueeze_op_5")
+        u_op_6         = aidge_core.Unsqueeze(axes = [0], name="unsqueeze_op_6")
+        u_op_7         = aidge_core.Unsqueeze(axes = [0], name="unsqueeze_op_7")
+        u_op_8         = aidge_core.Unsqueeze(axes = [0], name="unsqueeze_op_8")
+        u_op_9         = aidge_core.Unsqueeze(axes = [0], name="unsqueeze_op_9")
+        concat_op_1    = aidge_core.Concat(5, name="concat_op_1")
+        concat_op_2    = aidge_core.Concat(4, name="concat_op_2")
+        reshape_op_1   = aidge_core.Reshape(name="reshape_op_1")
+        reshape_op_2   = aidge_core.Reshape(name="reshape_op_2")
+        transpose_op_1 = aidge_core.Transpose([0, 2, 1, 3, 4], name="transpose_op_1")
+
+        # Declaring Connectors
+        x = aidge_core.Connector(aidge_core.Identity(f"Input"))
+        a = aidge_core.Connector(prod_two_a)
+        b = aidge_core.Connector(prod_two_b)
+
+        # Graph creation using functional declaration
+        x1 = shape_op_1(x)
+        x2 = shape_op_2(x)
+        x3 = shape_op_3(x)
+        x4 = shape_op_4(x)
+        n = gather_op_1(x1)
+        c = gather_op_2(x2)
+        h = gather_op_3(x3)
+        w = gather_op_4(x4)
+
+        shape_1 = concat_op_1(u_op_1(n), u_op_2(a), u_op_3(div_op(c, b)), u_op_4(h), u_op_5(w))
+        shape_2 = concat_op_2(u_op_6(n), u_op_7(c), u_op_8(h), u_op_9(w))
+
+        y = reshape_op_2(transpose_op_1(reshape_op_1(x, shape_1)), shape_2)
+
+        self.graph = aidge_core.generate_graph([y])
+
+
+    def tearDown(self):
+        pass
+
+    def test_attributes_name(self):
+        # Note: Except Div every operator are backend independent
+        self.graph.set_backend("cpu")
+        self.graph.set_datatype(aidge_core.dtype.float32)
+        self.assertTrue(self.graph.forward_dims([[5, 12, 24, 24]], allow_data_dependency = True, shape_as_constant = True),
+                        "Failed to forward dimensions.")
+        self.assertEqual(len(self.graph.get_nodes()), 6, "After forward dims with constant folding we don't have the expected number of nodes.")
+
+
+if __name__ == '__main__':
+    unittest.main()
-- 
GitLab


From 456aa6073bd0aab140b2ede564b8b861540e17bd Mon Sep 17 00:00:00 2001
From: cmoineau <cyril.moineau@cea.fr>
Date: Fri, 28 Feb 2025 16:45:37 +0000
Subject: [PATCH 09/14] Remove dependency to backend_cpu by providing a numpy
 implementation to div operator.

---
 .../test_forward_dims_constant_shape.py       | 34 +++++++++----------
 1 file changed, 16 insertions(+), 18 deletions(-)

diff --git a/aidge_core/unit_tests/test_forward_dims_constant_shape.py b/aidge_core/unit_tests/test_forward_dims_constant_shape.py
index 5490ffdd2..ecab2664a 100644
--- a/aidge_core/unit_tests/test_forward_dims_constant_shape.py
+++ b/aidge_core/unit_tests/test_forward_dims_constant_shape.py
@@ -11,24 +11,22 @@ SPDX-License-Identifier: EPL-2.0
 import unittest
 import aidge_core
 import numpy as np
-import aidge_backend_cpu
-
-# class DivImpl(aidge_core.OperatorImpl):
-#     """Div operator implementation to avoid dependency to backend_cpu"""
-
-#     def __init__(self, op: aidge_core.Operator):
-#         aidge_core.OperatorImpl.__init__(self, op, "div")
-#         self.op = op
-#         print("Creating divImpl")
-#     def forward(self):
-#         data_input_0 = np.array(self.op.get_input(0))
-#         data_input_1 = np.array(self.op.get_input(1))
-#         output =  (data_input_0 / data_input_1)
-#         print(output, " = ",  data_input_0, "/", data_input_1)
-#         self.op.set_output(0, aidge_core.Tensor(output)) # setting operator output
-
-# # Note: In this test, except Div, every operator are backend independent
-# aidge_core.register_DivOp("cpu", DivImpl)
+
+class DivImpl(aidge_core.OperatorImpl):
+    """Div operator implementation to avoid dependency to backend_cpu"""
+
+    def __init__(self, op: aidge_core.Operator):
+        aidge_core.OperatorImpl.__init__(self, op, "div")
+        self.op = op
+        print("Creating divImpl")
+    def forward(self):
+        data_input_0 = np.array(self.op.get_input(0))
+        data_input_1 = np.array(self.op.get_input(1))
+        output =  (data_input_0 / data_input_1)
+        self.op.set_output(0, aidge_core.Tensor(output)) # setting operator output
+
+# Note: In this test, except Div, every operator are backend independent
+aidge_core.register_DivOp("cpu", DivImpl)
 
 class test_forward_dims_constant_shape(unittest.TestCase):
     """Test forwardDims with shapeAsConstant=True
-- 
GitLab


From c7da2a046684956b7cd73fa1420da12c9f476efe Mon Sep 17 00:00:00 2001
From: cmoineau <cyril.moineau@cea.fr>
Date: Mon, 3 Mar 2025 14:36:06 +0000
Subject: [PATCH 10/14] Remove constant shape folding from forwardDims and
 create a recipes.

---
 .../test_forward_dims_constant_shape.py       |  4 +-
 include/aidge/graph/GraphView.hpp             |  3 +-
 include/aidge/recipes/Recipes.hpp             |  8 ++++
 python_binding/graph/pybind_GraphView.cpp     |  4 +-
 python_binding/recipes/pybind_Recipes.cpp     | 11 +++++
 src/graph/GraphView.cpp                       | 29 +-------------
 src/recipes/ShapeFolding.cpp                  | 40 +++++++++++++++++++
 7 files changed, 64 insertions(+), 35 deletions(-)
 create mode 100644 src/recipes/ShapeFolding.cpp

diff --git a/aidge_core/unit_tests/test_forward_dims_constant_shape.py b/aidge_core/unit_tests/test_forward_dims_constant_shape.py
index ecab2664a..aea0260c8 100644
--- a/aidge_core/unit_tests/test_forward_dims_constant_shape.py
+++ b/aidge_core/unit_tests/test_forward_dims_constant_shape.py
@@ -93,8 +93,8 @@ class test_forward_dims_constant_shape(unittest.TestCase):
         # Note: Except Div every operator are backend independent
         self.graph.set_backend("cpu")
         self.graph.set_datatype(aidge_core.dtype.float32)
-        self.assertTrue(self.graph.forward_dims([[5, 12, 24, 24]], allow_data_dependency = True, shape_as_constant = True),
-                        "Failed to forward dimensions.")
+
+        aidge_core.constant_shape_folding(self.graph, [[5, 12, 24, 24]])
         self.assertEqual(len(self.graph.get_nodes()), 6, "After forward dims with constant folding we don't have the expected number of nodes.")
 
 
diff --git a/include/aidge/graph/GraphView.hpp b/include/aidge/graph/GraphView.hpp
index f6297f790..be325cb96 100644
--- a/include/aidge/graph/GraphView.hpp
+++ b/include/aidge/graph/GraphView.hpp
@@ -291,10 +291,9 @@ public:
      *
      * @param dims Vector of dimension vectors for graph inputs. Empty by default.
      * @param allowDataDependency Whether to allow data-dependent dimension computation. False by default.
-     * @param shapeAsConstant If true treat shape as constant and fold the graph, this implies that the graph may change if part of the graph are foldable. False by default.
      * @return true if dimension propagation succeeded, false otherwise.
      */
-    bool forwardDims(const std::vector<std::vector<DimSize_t>>& dims = {}, bool allowDataDependency = false, bool shapeAsConstant = false);
+    bool forwardDims(const std::vector<std::vector<DimSize_t>>& dims = {}, bool allowDataDependency = false);
 
     /** @brief Set the same backend for each Operator of the GraphView object's Nodes. */
     void setBackend(const std::string& backend, const DeviceIdx_t device = 0) const;
diff --git a/include/aidge/recipes/Recipes.hpp b/include/aidge/recipes/Recipes.hpp
index cf428e558..2d04fc426 100644
--- a/include/aidge/recipes/Recipes.hpp
+++ b/include/aidge/recipes/Recipes.hpp
@@ -31,6 +31,14 @@ namespace Aidge {
  */
 bool constantFolding(std::shared_ptr<GraphView> graph, bool constantShape=false);
 
+/**
+ * @brief Retrieve part of the graph that can be pre-computed by setting Shape as constant and replace them by a Producer.
+ *
+ * @param graph Graph to fold the constant
+ * @return bool True if the graph has been modified
+ */
+bool constantShapeFolding(std::shared_ptr<GraphView> graph, const std::vector<std::vector<DimSize_t>>& dims = {});
+
 // FUSE MATMUL + ADD -> FC
 
 /**
diff --git a/python_binding/graph/pybind_GraphView.cpp b/python_binding/graph/pybind_GraphView.cpp
index ec4119e32..31e3a0099 100644
--- a/python_binding/graph/pybind_GraphView.cpp
+++ b/python_binding/graph/pybind_GraphView.cpp
@@ -128,7 +128,7 @@ void init_GraphView(py::module& m) {
           .def("clone", &GraphView::clone)
           .def("get_nodes", &GraphView::getNodes)
           .def("get_node", &GraphView::getNode, py::arg("node_name"))
-          .def("forward_dims", &GraphView::forwardDims, py::arg("dims")=std::vector<std::vector<DimSize_t>>(), py::arg("allow_data_dependency") = false, py::arg("shape_as_constant") = false,
+          .def("forward_dims", &GraphView::forwardDims, py::arg("dims")=std::vector<std::vector<DimSize_t>>(), py::arg("allow_data_dependency") = false,
           R"mydelimiter(
             Compute and propagate Tensor dimensions through the GraphView.
 
@@ -167,8 +167,6 @@ void init_GraphView(py::module& m) {
                 Vector of dimension vectors for graph inputs. Empty by default.
             allow_data_dependency : bool, optional
                 Whether to allow data-dependent dimension computation, by default False
-            shape_as_constant : bool, optional
-                If True, treat shape as constant and fold the graph. This implies that the graph may change if part of the graph are foldable, by default False.
 
             Returns
             -------
diff --git a/python_binding/recipes/pybind_Recipes.cpp b/python_binding/recipes/pybind_Recipes.cpp
index 0c9f86fe7..68ad81b8b 100644
--- a/python_binding/recipes/pybind_Recipes.cpp
+++ b/python_binding/recipes/pybind_Recipes.cpp
@@ -36,6 +36,17 @@ void init_Recipes(py::module &m)
     :rtype: bool
     )mydelimiter");
 
+  m.def("constant_shape_folding", static_cast<bool(*)(std::shared_ptr<GraphView>, const std::vector<std::vector<DimSize_t>>&)>(constantShapeFolding), py::arg("graph_view"), py::arg("dims") = std::vector<std::vector<DimSize_t>>(), R"mydelimiter(
+      Retrieve part of the graph that can be pre-computed by setting Shape as constant and replace them by a Producer.
+
+      :param graph_view: Graph view on which we want to apply the recipe
+      :type graph_view: :py:class:`aidge_core.GraphView`
+      :param constant_shape: If true, ``Shape`` operator are considered constant, default=False
+      :type constant_shape: bool, optional
+      :return: True if the graph has been modified
+      :rtype: bool
+      )mydelimiter");
+
   m.def("matmul_to_fc", static_cast<void(*)(std::shared_ptr<GraphView>)>(matMulToFC), py::arg("graph_view"), R"mydelimiter(
     Recipe to Fuse MatMul and Add operators into an :py:class:`aidge_core.FC` operator.
 
diff --git a/src/graph/GraphView.cpp b/src/graph/GraphView.cpp
index 533c9dc25..2fbc264e4 100644
--- a/src/graph/GraphView.cpp
+++ b/src/graph/GraphView.cpp
@@ -445,7 +445,7 @@ void Aidge::GraphView::compile(const std::string& backend, const Aidge::DataType
     forwardDims(dims);
 }
 
-bool Aidge::GraphView::forwardDims(const std::vector<std::vector<Aidge::DimSize_t>>& dims, bool allowDataDependency, bool shapeAsConstant) {
+bool Aidge::GraphView::forwardDims(const std::vector<std::vector<Aidge::DimSize_t>>& dims, bool allowDataDependency) {
     Log::debug("Starting dimension forward propagation for GraphView");
     // remove current Data connections and use dummy inputs to propagate dimensions
     // setInputs
@@ -570,33 +570,6 @@ bool Aidge::GraphView::forwardDims(const std::vector<std::vector<Aidge::DimSize_
                     Log::debug("Dimensions forwarded for node {} (of type {})",
                         nodePtr->name(), nodePtr->type());
 
-                    if (shapeAsConstant && (nodePtr->type() == Shape_Op::Type)) {
-                        Log::debug("Trying to constant fold the graph");
-                        // Shape are folded if we don't find back the node in the graph
-                        if(constantFolding(shared_from_this(), true)){
-                            Log::notice("Constant folding worked, resetting list of nodes to graph inputs.");
-                            // Graph was modified during constant folding
-                            // We re-propagate dims starting from the entry of the graph
-                            nextList = inputNodes();
-                            for (const auto& currentNodePtr : getNodes()) {
-                                if (currentNodePtr->type() == Producer_Op::Type) {
-                                    // Producers are already dims forwarded!
-                                    dimsForwarded.insert(currentNodePtr);
-                                    // Producers childs are dims forwardable
-                                    for (const auto& child : currentNodePtr->getChildren()) {
-                                        if (inView(child)) {
-                                            nextList.insert(child);
-                                        }
-                                    }
-                                }
-                            }
-
-                            Log::debug("Breaking loop to restart from the beginning");
-                            break;
-                        }else{
-                            Log::debug("Constant folding fail to fold any nodes.");
-                        }
-                    }
                     // Recompute every time, even if it was already computed in a
                     // previous call of forwardDims(), as the graph may have changed!
                     dimsForwarded.insert(nodePtr);
diff --git a/src/recipes/ShapeFolding.cpp b/src/recipes/ShapeFolding.cpp
new file mode 100644
index 000000000..f869e646b
--- /dev/null
+++ b/src/recipes/ShapeFolding.cpp
@@ -0,0 +1,40 @@
+/********************************************************************************
+ * Copyright (c) 2023 CEA-List
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License 2.0 which is available at
+ * http://www.eclipse.org/legal/epl-2.0.
+ *
+ * SPDX-License-Identifier: EPL-2.0
+ *
+ ********************************************************************************/
+// #include <cassert>
+#include <memory>
+#include <set>
+#include <string>
+
+#include "aidge/graph/GraphView.hpp"
+#include "aidge/graph/Node.hpp"
+#include "aidge/operator/Shape.hpp"
+#include "aidge/recipes/Recipes.hpp"
+#include "aidge/utils/Log.hpp"
+// #include "aidge/utils/Types.h"
+
+bool Aidge::constantShapeFolding(std::shared_ptr<GraphView> graph, const std::vector<std::vector<DimSize_t>>& dims) {
+    bool modified      = false;
+    bool forwarded     = false;
+    bool not_shape_present = true;
+    for (auto nodePtr: graph->getNodes())
+        not_shape_present &= (nodePtr->type() != Shape_Op::Type);
+    if (not_shape_present)
+        return false;
+    do{
+        forwarded = graph->forwardDims(dims, true);
+        modified = constantFolding(graph, true);
+    } while(modified);
+    if (!forwarded){
+        Log::warn("Failed to forward GraphView.");
+    }
+
+    return modified;
+}
-- 
GitLab


From 61163dbf6152f90ed08d2081b4d8dab56d00487e Mon Sep 17 00:00:00 2001
From: cmoineau <cyril.moineau@cea.fr>
Date: Tue, 4 Mar 2025 10:25:46 +0000
Subject: [PATCH 11/14] [GraphView.cpp] Remove unecessary include.

---
 src/graph/GraphView.cpp | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/src/graph/GraphView.cpp b/src/graph/GraphView.cpp
index 2fbc264e4..e193a0af4 100644
--- a/src/graph/GraphView.cpp
+++ b/src/graph/GraphView.cpp
@@ -32,8 +32,6 @@
 #include "aidge/operator/MetaOperator.hpp"
 #include "aidge/operator/OperatorTensor.hpp"
 #include "aidge/operator/Producer.hpp"
-#include "aidge/operator/Shape.hpp"
-#include "aidge/recipes/Recipes.hpp" // constantFolding
 #include "aidge/utils/Directories.hpp"
 #include "aidge/utils/FileManagement.hpp"
 #include "aidge/utils/ErrorHandling.hpp"
@@ -829,7 +827,7 @@ bool Aidge::GraphView::add(std::set<std::shared_ptr<Node>> otherNodes, bool incl
     if (mNodeRegistry.find(node->name()) != mNodeRegistry.end()) {
         std::string newName = node->createUniqueName(node->name());
         while (mNodeRegistry.find(newName) != mNodeRegistry.end()) {
-            newName = node->createUniqueName(newName + "_1"); 
+            newName = node->createUniqueName(newName + "_1");
         }
         Log::notice("node name \"{}\" is a duplicate, renaming to {}.\n", node->name(), newName);
         node->setName(newName);
-- 
GitLab


From c82911a563a9cf6cabe0b725ebe8d548f7f3c27a Mon Sep 17 00:00:00 2001
From: cmoineau <cyril.moineau@cea.fr>
Date: Tue, 4 Mar 2025 15:50:03 +0000
Subject: [PATCH 12/14] Fix issue with return value of constantShapeFolding

---
 src/recipes/ShapeFolding.cpp | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/src/recipes/ShapeFolding.cpp b/src/recipes/ShapeFolding.cpp
index f869e646b..ca6ef5087 100644
--- a/src/recipes/ShapeFolding.cpp
+++ b/src/recipes/ShapeFolding.cpp
@@ -24,6 +24,7 @@ bool Aidge::constantShapeFolding(std::shared_ptr<GraphView> graph, const std::ve
     bool modified      = false;
     bool forwarded     = false;
     bool not_shape_present = true;
+    bool was_modified = false;
     for (auto nodePtr: graph->getNodes())
         not_shape_present &= (nodePtr->type() != Shape_Op::Type);
     if (not_shape_present)
@@ -31,10 +32,11 @@ bool Aidge::constantShapeFolding(std::shared_ptr<GraphView> graph, const std::ve
     do{
         forwarded = graph->forwardDims(dims, true);
         modified = constantFolding(graph, true);
+        was_modified = true;
     } while(modified);
     if (!forwarded){
         Log::warn("Failed to forward GraphView.");
     }
 
-    return modified;
+    return was_modified;
 }
-- 
GitLab


From 27aa855cafb671fdab264a2a54dae02a522b0b2c Mon Sep 17 00:00:00 2001
From: cmoineau <cyril.moineau@cea.fr>
Date: Tue, 4 Mar 2025 21:27:22 +0000
Subject: [PATCH 13/14] Fix merge conflict artifact.

---
 include/aidge/graph/Node.hpp | 4 ----
 1 file changed, 4 deletions(-)

diff --git a/include/aidge/graph/Node.hpp b/include/aidge/graph/Node.hpp
index 15d8aaf4e..cd2ca38df 100644
--- a/include/aidge/graph/Node.hpp
+++ b/include/aidge/graph/Node.hpp
@@ -507,11 +507,7 @@ private:
    * @param outId
    * @param otherInId
    *
-<<<<<<< HEAD
-   * @note otherNode shared_ptr is passed by refenrece in order to be able to detect
-=======
    * @note otherNode shared_ptr is passed by reference in order to be able to detect
->>>>>>> 3a88aa7d (Update some log messages and fix small typo in Node.hpp)
    * possible dangling connection situations in debug using ref counting.
    */
   void addChildOp(const NodePtr& otherNode, const IOIndex_t outId,
-- 
GitLab


From 1fcaee4bfd3014c3cf86299064c49ed0a906a250 Mon Sep 17 00:00:00 2001
From: cmoineau <cyril.moineau@cea.fr>
Date: Tue, 4 Mar 2025 21:58:32 +0000
Subject: [PATCH 14/14] Fix test for shapeFolding.

---
 aidge_core/unit_tests/test_forward_dims_constant_shape.py | 2 +-
 src/recipes/ShapeFolding.cpp                              | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/aidge_core/unit_tests/test_forward_dims_constant_shape.py b/aidge_core/unit_tests/test_forward_dims_constant_shape.py
index aea0260c8..fd3fee04b 100644
--- a/aidge_core/unit_tests/test_forward_dims_constant_shape.py
+++ b/aidge_core/unit_tests/test_forward_dims_constant_shape.py
@@ -89,7 +89,7 @@ class test_forward_dims_constant_shape(unittest.TestCase):
     def tearDown(self):
         pass
 
-    def test_attributes_name(self):
+    def test_constant_shape_folding(self):
         # Note: Except Div every operator are backend independent
         self.graph.set_backend("cpu")
         self.graph.set_datatype(aidge_core.dtype.float32)
diff --git a/src/recipes/ShapeFolding.cpp b/src/recipes/ShapeFolding.cpp
index ca6ef5087..72d13ab52 100644
--- a/src/recipes/ShapeFolding.cpp
+++ b/src/recipes/ShapeFolding.cpp
@@ -30,7 +30,7 @@ bool Aidge::constantShapeFolding(std::shared_ptr<GraphView> graph, const std::ve
     if (not_shape_present)
         return false;
     do{
-        forwarded = graph->forwardDims(dims, true);
+        forwarded = graph->forwardDims(dims, false);
         modified = constantFolding(graph, true);
         was_modified = true;
     } while(modified);
-- 
GitLab