From c0cc9273030cf492e863b15ccafed022d03b20e9 Mon Sep 17 00:00:00 2001
From: cmoineau <cyril.moineau@cea.fr>
Date: Fri, 3 May 2024 15:25:50 +0000
Subject: [PATCH] Remove multi-line comments.

---
 src/operator/Producer.cpp             |  3 ---
 src/scheduler/SequentialScheduler.cpp | 15 ---------------
 2 files changed, 18 deletions(-)

diff --git a/src/operator/Producer.cpp b/src/operator/Producer.cpp
index c063c0b8d..7059ea7e9 100644
--- a/src/operator/Producer.cpp
+++ b/src/operator/Producer.cpp
@@ -72,9 +72,6 @@ void Aidge::Producer_Op::forward() {
     if (!backend().empty()) {
         mImpl->forward();
     }
-    // else {
-    //     fmt::print("Basic Producer forward() function.\n");
-    // }
 
     runHooks();
 }
diff --git a/src/scheduler/SequentialScheduler.cpp b/src/scheduler/SequentialScheduler.cpp
index cbd2f173d..6e3df1bb3 100644
--- a/src/scheduler/SequentialScheduler.cpp
+++ b/src/scheduler/SequentialScheduler.cpp
@@ -74,21 +74,6 @@ void Aidge::SequentialScheduler::forward(bool forwardDims, const std::vector<std
 }
 
 void Aidge::SequentialScheduler::backward() {
-    // create ad set Grad values
-    // if (instanciateGrad) { compile_gradient(mGraphView); }
-
-    // const auto& ordered_outputs = mGraphView->getOrderedOutputs();
-    // AIDGE_ASSERT(ordered_outputs.size() == data.size(), "You must provide the \
-    //                right number of data objects to run the backward function. \
-    //                {} outputs detected for the current GraphView when {} were \
-    //                provided.", ordered_outputs.size(), data.size());
-    // for (std::size_t i = 0; i < ordered_outputs.size(); ++i) {
-    //     const std::shared_ptr<OperatorTensor> op_ = std::dynamic_pointer_cast<OperatorTensor>(ordered_outputs[i].first->getOperator());
-    //     const std::shared_ptr<Tensor> t_grad = op_->getOutput(ordered_outputs[i].second)->grad();
-    //     AIDGE_ASSERT(data[i]->dims() == t_grad->dims(), "Wrong gradient size (expected {}, got {}).", t_grad->dims(), data[i]->dims());
-    //     *t_grad = data[i]->clone();
-    // }
-
     // Generate scheduling *only if empty*
     // If scheduling was already generated (in one or several steps, i.e. one or
     // several successive call to generateScheduling()), do not generate it twice
-- 
GitLab