From 1e614f0a464f9543de9a47e2e78167b7d52f1e28 Mon Sep 17 00:00:00 2001
From: Olivier BICHLER <olivier.bichler@cea.fr>
Date: Wed, 6 Mar 2024 12:22:37 +0100
Subject: [PATCH] Improved error messages

---
 include/aidge/graph/GraphView.hpp | 11 +++++------
 src/backend/OperatorImpl.cpp      | 20 ++++++++++++++-----
 src/graph/GraphView.cpp           | 32 +++++++++++++++----------------
 src/graph/Node.cpp                | 20 +++++++++++++------
 src/operator/Operator.cpp         |  5 ++++-
 src/recipes/FuseMulAdd.cpp        |  2 +-
 6 files changed, 54 insertions(+), 36 deletions(-)

diff --git a/include/aidge/graph/GraphView.hpp b/include/aidge/graph/GraphView.hpp
index 3311797d8..4194ed4d3 100644
--- a/include/aidge/graph/GraphView.hpp
+++ b/include/aidge/graph/GraphView.hpp
@@ -62,10 +62,10 @@ public:
         return mNodes == gv.mNodes;
     }
 
-    NodePtr operator[](const std::string& name)
+    NodePtr operator[](const std::string& nodeName)
     {
-        assert(mNodeRegistry.find(name) != mNodeRegistry.end() && "Could not find Node in the GraphView.");
-        return mNodeRegistry.at(name);
+        AIDGE_ASSERT(mNodeRegistry.find(nodeName) != mNodeRegistry.end(), "No node named {} in graph {}.", nodeName, name());
+        return mNodeRegistry.at(nodeName);
     }
 
 ///////////////////////////////////////////////////////
@@ -379,11 +379,10 @@ public:
      * @param toTensor Input Tensor ID of the new Node. Default to gk_IODefaultIndex, meaning
      * first available data input for the Node.
      */
-    inline void addChild(NodePtr toOtherNode, std::string fromOutNodeName,
+    inline void addChild(NodePtr toOtherNode, const std::string& fromOutNodeName,
                          const IOIndex_t fromTensor = IOIndex_t(0),
                          IOIndex_t toTensor = gk_IODefaultIndex) {
-        assert(mNodeRegistry.find(fromOutNodeName) != mNodeRegistry.end() &&
-               "No Node with this name found in the GraphView.");
+        AIDGE_ASSERT(mNodeRegistry.find(fromOutNodeName) != mNodeRegistry.end(), "No node named {} in graph {}.", fromOutNodeName, name());
         addChild(toOtherNode, mNodeRegistry.at(fromOutNodeName), fromTensor, toTensor);
     }
 
diff --git a/src/backend/OperatorImpl.cpp b/src/backend/OperatorImpl.cpp
index 1911da228..1439391b2 100644
--- a/src/backend/OperatorImpl.cpp
+++ b/src/backend/OperatorImpl.cpp
@@ -25,14 +25,18 @@ Aidge::OperatorImpl::OperatorImpl(const Operator& op):
 }
 
 Aidge::NbElts_t Aidge::OperatorImpl::getNbRequiredData(const Aidge::IOIndex_t inputIdx) const {
-    assert(mOp.getRawInput(inputIdx) && "requires valid input");
+    AIDGE_ASSERT(mOp.getRawInput(inputIdx),
+        "a valid input is required at index {} for operator type {}",
+        inputIdx, mOp.type());
 
     // Requires the whole tensor by default
     return std::static_pointer_cast<Tensor>(mOp.getRawInput(inputIdx))->size();
 }
 
 Aidge::NbElts_t Aidge::OperatorImpl::getNbRequiredProtected(IOIndex_t inputIdx) const {
-    assert(mOp.getRawInput(inputIdx) && "requires valid input");
+    AIDGE_ASSERT(mOp.getRawInput(inputIdx),
+        "a valid input is required at index {} for operator type {}",
+        inputIdx, mOp.type());
 
     // Protect the whole tensor by default
     return std::static_pointer_cast<Tensor>(mOp.getRawInput(inputIdx))->size();
@@ -40,19 +44,25 @@ Aidge::NbElts_t Aidge::OperatorImpl::getNbRequiredProtected(IOIndex_t inputIdx)
 
 Aidge::NbElts_t Aidge::OperatorImpl::getRequiredMemory(const Aidge::IOIndex_t outputIdx,
                                                          const std::vector<Aidge::DimSize_t> &/*inputsSize*/) const {
-    assert(mOp.getRawOutput(outputIdx) && "requires valid output");
+    AIDGE_ASSERT(mOp.getRawOutput(outputIdx),
+        "a valid output is required at index {} for operator type {}",
+        outputIdx, mOp.type());
 
     // Requires the whole tensor by default, regardless of available data on inputs
     return std::static_pointer_cast<Tensor>(mOp.getRawOutput(outputIdx))->size();
 }
 
 Aidge::NbElts_t Aidge::OperatorImpl::getNbConsumedData(Aidge::IOIndex_t inputIdx) const {
-    assert(static_cast<std::size_t>(inputIdx) < mNbConsumedData.size());
+    AIDGE_ASSERT(static_cast<std::size_t>(inputIdx) < mNbConsumedData.size(),
+        "input index ({}) is out of bound ({}) for operator type {}",
+        inputIdx, mNbConsumedData.size(), mOp.type());
     return mNbConsumedData[static_cast<std::size_t>(inputIdx)];
 }
 
 Aidge::NbElts_t Aidge::OperatorImpl::getNbProducedData(Aidge::IOIndex_t outputIdx) const {
-    assert(static_cast<std::size_t>(outputIdx) < mNbProducedData.size());
+    AIDGE_ASSERT(static_cast<std::size_t>(outputIdx) < mNbProducedData.size(),
+        "output index ({}) is out of bound ({}) for operator type {}",
+        outputIdx, mNbProducedData.size(), mOp.type());
     return mNbProducedData[static_cast<std::size_t>(outputIdx)];
 }
 
diff --git a/src/graph/GraphView.cpp b/src/graph/GraphView.cpp
index 3681ac533..42eb410fd 100644
--- a/src/graph/GraphView.cpp
+++ b/src/graph/GraphView.cpp
@@ -334,7 +334,7 @@ void Aidge::GraphView::forwardDims(const std::vector<std::vector<Aidge::DimSize_
     // Link every tensor to the right pointer
     // following parent - children informations
     if (!dims.empty()){
-      AIDGE_ASSERT(dims.size() == mInputNodes.size(), "GraphView forwardDims error - Inconsistent number of dimensions and graph inputs");
+      AIDGE_ASSERT(dims.size() == mInputNodes.size(), "GraphView forwardDims error - Inconsistent number of given dimensions ({}) and graph inputs ({})", dims.size(), mInputNodes.size());
       for (std::size_t i = 0; i < dims.size(); ++i){
         auto tensor = std::make_shared<Tensor>(dims[i]);
         mInputNodes[i].first->getOperator()->setInput(mInputNodes[i].second, tensor);
@@ -352,7 +352,7 @@ void Aidge::GraphView::forwardDims(const std::vector<std::vector<Aidge::DimSize_
                         nodePtr->getOperator()->associateInput(i, inputI.first->getOperator()->getRawOutput(inputI.second));
                     }
                     else {
-                        AIDGE_ASSERT(false, "Non-tensor entries not handled yet.\n");
+                        AIDGE_ASSERT(false, "Non-tensor entries not handled yet, for node {} (of type {}).", nodePtr->name(), nodePtr->type());
                     }
                 }
             } else {
@@ -405,7 +405,13 @@ void Aidge::GraphView::_forwardDims(std::set<std::shared_ptr<Node>> listNodes) {
     }
 
     // Internal check to make sure we won't enter in an infinite loop!
-    AIDGE_ASSERT(nextList != listNodes, "Unable to forward dimensions (circular dependency and/or wrong dimensions?)");
+    if (nextList == listNodes) {
+      std::vector<std::string> nodesName;
+      std::transform(nextList.begin(), nextList.end(),
+          std::back_inserter(nodesName),
+          [](auto val){ return val->name() + " (" + val->type() + ")"; });
+      AIDGE_THROW_OR_ABORT(std::runtime_error, "Unable to forward dimensions (circular dependency and/or wrong dimensions?). Unable to compute output dims for nodes {}.", nodesName);
+    }
 
     if (!nextList.empty()) {
         _forwardDims(nextList);
@@ -458,7 +464,7 @@ Aidge::GraphView::outputs(const std::string& nodeName) const {
 
 void Aidge::GraphView::setInputId(Aidge::IOIndex_t /*inID*/,
                                Aidge::IOIndex_t /*newNodeOutID*/) {
-  fmt::print("Not implemented yet.\n");
+  AIDGE_THROW_OR_ABORT(std::runtime_error, "Not implemented yet.");
 }
 
 void Aidge::GraphView::add(std::shared_ptr<Node> node, bool includeLearnableParam) {
@@ -714,10 +720,7 @@ std::set<std::shared_ptr<Aidge::Node>> Aidge::GraphView::getParents() const {
 
 std::vector<std::shared_ptr<Aidge::Node>> Aidge::GraphView::getParents(const std::string nodeName) const {
   std::map<std::string, std::shared_ptr<Node>>::const_iterator it = mNodeRegistry.find(nodeName);
-  if (it == mNodeRegistry.end()) {
-    fmt::print("No such node a {} in {} graph.\n", nodeName, name());
-    exit(-1);
-  }
+  AIDGE_ASSERT(it != mNodeRegistry.end(), "No node named {} in graph {}.", nodeName, name());
   return (it->second)->getParents();
 }
 
@@ -743,20 +746,15 @@ std::vector<std::vector<std::shared_ptr<Aidge::Node>>>
 Aidge::GraphView::getChildren(const std::string nodeName) const {
   std::map<std::string, std::shared_ptr<Node>>::const_iterator it =
       mNodeRegistry.find(nodeName);
-  if (it == mNodeRegistry.end()) {
-    fmt::print("No such node a {} in {} graph.\n", nodeName, name());
-    exit(-1);
-  }
+  AIDGE_ASSERT(it != mNodeRegistry.end(), "No node named {} in graph {}.", nodeName, name());
   return (it->second)->getOrderedChildren();
 }
 
 std::set<std::shared_ptr<Aidge::Node>>
 Aidge::GraphView::getChildren(const std::shared_ptr<Node> otherNode) const {
   std::set<std::shared_ptr<Node>>::const_iterator it = mNodes.find(otherNode);
-  if (it == mNodes.end()) {
-    fmt::print("No such node in graph.\n");
-    exit(-1);
-  }
+  AIDGE_ASSERT(it != mNodes.end(), "The node {} (of type {}) is not in graph {}.",
+    (otherNode) ? otherNode->name() : "#nullptr", (otherNode) ? otherNode->type() : "", name());
   return (*it)->getChildren();
 }
 
@@ -768,7 +766,7 @@ Aidge::GraphView::getNode(const std::string& nodeName) const {
   if (it != mNodeRegistry.cend()) {
     return it->second;
   } else {
-    fmt::print("No Node named {} in the current GraphView.\n", nodeName);
+    Log::warn("No Node named {} in the current GraphView {}.", nodeName, name());
     return nullptr;
   }
 }
diff --git a/src/graph/Node.cpp b/src/graph/Node.cpp
index 5d210144e..14e166402 100644
--- a/src/graph/Node.cpp
+++ b/src/graph/Node.cpp
@@ -169,7 +169,9 @@ Aidge::IOIndex_t Aidge::Node::nbValidOutputs() const {
 }
 
 void Aidge::Node::setInputId(const IOIndex_t inId, const IOIndex_t newNodeoutId) {
-    assert(inId != gk_IODefaultIndex && (inId < nbInputs()) && "Must be a valid index");
+    AIDGE_ASSERT(inId != gk_IODefaultIndex && inId < nbInputs(),
+        "Input index ({}) is out of bound ({}) for node {} (of type {})",
+        inId, nbInputs(), name(), type());
     if (mIdOutParents[inId] != gk_IODefaultIndex) {
         fmt::print("Warning: filling a Tensor already attributed\n");
         auto originalParent = input(inId);
@@ -194,7 +196,7 @@ void Aidge::Node::addChildOp(std::shared_ptr<Node> otherNode, const IOIndex_t ou
         "Output index (#{}) of the node {} (of type {}) is out of bound (it has {} outputs), when trying to add the child node {} (of type {})",
         outId, name(), type(), nbOutputs(), otherNode->name(), otherNode->type());
     if (otherNode->input(otherInId).second != gk_IODefaultIndex) {
-        fmt::print("Warning, the {}-th Parent of the child node already existed.\n", otherInId);
+        Log::notice("Notice: the {}-th Parent of the child node {} (of type {}) already existed", otherInId, otherNode->name(), otherNode->type());
     }
     // manage tensors and potential previous parent
     otherNode->setInputId(otherInId, outId);
@@ -239,23 +241,29 @@ void Aidge::Node::addChild(std::shared_ptr<GraphView> otherView, const IOIndex_t
 
 void Aidge::Node::addParent(const std::shared_ptr<Node> other_node, const IOIndex_t inId) {
     if (getParent(inId) != nullptr) {
-        fmt::print("Warning, you're replacing a Parent.\n");
+        Log::notice("Notice: you are replacing an existing parent for node {} (of type {})", name(), type());
     }
-    assert((inId != gk_IODefaultIndex) && (inId < nbInputs()) && "Input index out of bound.");
+    AIDGE_ASSERT(inId != gk_IODefaultIndex && inId < nbInputs(),
+        "Input index ({}) is out of bound ({}) for node {} (of type {})",
+        inId, nbInputs(), name(), type());
     mParents[inId] = other_node;
 }
 
 std::vector<std::shared_ptr<Aidge::Node>> Aidge::Node::getParents() const { return mParents; }
 
 std::shared_ptr<Aidge::Node> Aidge::Node::popParent(const IOIndex_t inId) {
-    assert((inId != gk_IODefaultIndex) && (inId < nbInputs()) && "Input index out of bound.");
+    AIDGE_ASSERT(inId != gk_IODefaultIndex && inId < nbInputs(),
+        "Input index ({}) is out of bound ({}) for node {} (of type {})",
+        inId, nbInputs(), name(), type());
     std::shared_ptr<Node> val = mParents[inId];
     removeParent(inId);
     return val;
 }
 
 bool Aidge::Node::removeParent(const IOIndex_t inId) {
-    assert((inId != gk_IODefaultIndex) && (inId < nbInputs()) && "Parent index out of bound.");
+    AIDGE_ASSERT(inId != gk_IODefaultIndex && inId < nbInputs(),
+        "Input index ({}) is out of bound ({}) for node {} (of type {})",
+        inId, nbInputs(), name(), type());
     if (mParents[inId]) {
         mParents[inId] = nullptr;
         mIdOutParents[inId] = gk_IODefaultIndex;
diff --git a/src/operator/Operator.cpp b/src/operator/Operator.cpp
index 289b2be90..e4213cad8 100644
--- a/src/operator/Operator.cpp
+++ b/src/operator/Operator.cpp
@@ -75,4 +75,7 @@ void Aidge::Operator::forward() {
     runHooks();
 }
 
-void Aidge::Operator::backward() { mImpl->backward(); }
+void Aidge::Operator::backward() {
+    AIDGE_ASSERT(mImpl != nullptr, "backward(): an implementation is required for {}!", type());
+    mImpl->backward(); 
+}
diff --git a/src/recipes/FuseMulAdd.cpp b/src/recipes/FuseMulAdd.cpp
index f408959a1..b57c1c3fc 100644
--- a/src/recipes/FuseMulAdd.cpp
+++ b/src/recipes/FuseMulAdd.cpp
@@ -64,7 +64,7 @@ void Aidge::fuseMulAdd(std::shared_ptr<Aidge::Node> matmulNode, std::shared_ptr<
     {
         // If both inputs are producers, there is an ambiguity, but both options
         // result in a correct solution.
-        fmt::print("Warning: both MatMul inputs are Producers, assume data at input#0 and weights at input#1.\n");
+        Log::notice("Notice: both MatMul inputs are Producers, assume data at input#0 and weights at input#1.");
         weight = matmulNode->getParent(1)->cloneSharedOperators();
     }
     AIDGE_ASSERT(weight != nullptr, "Could not deduce weight input for MatMul operator.");
-- 
GitLab