From c9d221320c69ccbc5b3f99e4a4bb1764048a2ba1 Mon Sep 17 00:00:00 2001
From: cmoineau <cyril.moineau@cea.fr>
Date: Mon, 6 May 2024 13:44:13 +0000
Subject: [PATCH] Rename initGradiant->initGrad.

---
 include/aidge/learning/optimizer/Optimizer.hpp | 2 +-
 unit_tests/optimizer/Test_SGD.cpp              | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/include/aidge/learning/optimizer/Optimizer.hpp b/include/aidge/learning/optimizer/Optimizer.hpp
index 9e62187..195d649 100644
--- a/include/aidge/learning/optimizer/Optimizer.hpp
+++ b/include/aidge/learning/optimizer/Optimizer.hpp
@@ -49,7 +49,7 @@ public:
     virtual void setParameters(const std::vector<std::shared_ptr<Tensor>>& parameters) {
         mParameters = parameters;
         for (const auto& param : parameters) {
-            param->initGradient(); // create gradient and set it to zeros
+            param->initGrad(); // create gradient and set it to zeros
         }
     }
 
diff --git a/unit_tests/optimizer/Test_SGD.cpp b/unit_tests/optimizer/Test_SGD.cpp
index 17f946a..df9924d 100644
--- a/unit_tests/optimizer/Test_SGD.cpp
+++ b/unit_tests/optimizer/Test_SGD.cpp
@@ -77,7 +77,7 @@ TEST_CASE("[learning/SGD] update", "[Optimizer][SGD]") {
             optim_tensors[i] = std::make_shared<Tensor>(dims);
             optim_tensors[i]->setBackend("cpu");
             optim_tensors[i]->getImpl()->copy(val_tensors[i].get(), size_tensors[i]);
-            optim_tensors[i]->initGradient();
+            optim_tensors[i]->initGrad();
 
             grad_tensors[i] = std::make_shared<Tensor>(dims);
             grad_tensors[i]->setBackend("cpu");
-- 
GitLab