From e729a198c03c75ef3543eb3147606a43fe5eca63 Mon Sep 17 00:00:00 2001
From: NAUD Maxence <maxence.naud@cea.fr>
Date: Wed, 20 Mar 2024 16:26:46 +0000
Subject: [PATCH] Upd LR unit-test with learning namespace

---
 unit_tests/learningRate/Test_LRScheduler.cpp | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/unit_tests/learningRate/Test_LRScheduler.cpp b/unit_tests/learningRate/Test_LRScheduler.cpp
index 206f23e..12c7f69 100644
--- a/unit_tests/learningRate/Test_LRScheduler.cpp
+++ b/unit_tests/learningRate/Test_LRScheduler.cpp
@@ -16,11 +16,11 @@
 #include <vector>
 
 // #include "aidge/data/Tensor.hpp"
-#include "aidge/optimizer/LR/LRScheduler.hpp"
-#include "aidge/optimizer/LR/LRSchedulerList.hpp"
+#include "aidge/learning/learningRate/LRScheduler.hpp"
+#include "aidge/learning/learningRate/LRSchedulerList.hpp"
 
 namespace Aidge {
-TEST_CASE("[core/optimizer/LR] LRSchduler(computeOutputDims)", "[LRScheduler]") {
+TEST_CASE("[learning/LR] Construction & evolution", "[LRScheduler]") {
     constexpr std::uint16_t NBTRIALS = 10;
 
     // Create a random number generator
@@ -37,7 +37,7 @@ TEST_CASE("[core/optimizer/LR] LRSchduler(computeOutputDims)", "[LRScheduler]")
             const float truth = initValDist(gen);
 
             // create learning rate scheduler
-            LRScheduler myLR = ConstantLR(truth);
+            LRScheduler myLR = learning::ConstantLR(truth);
 
             // prediction
             std::vector<float> profile = myLR.lr_profiling(nbSteps);
@@ -61,7 +61,7 @@ TEST_CASE("[core/optimizer/LR] LRSchduler(computeOutputDims)", "[LRScheduler]")
             const std::size_t nbSteps = nbStepsDist(gen);
             const float gamma = gammaDist(gen);
             const std::size_t stepSize = stepSizeDist(gen);
-            LRScheduler myLR = StepLR(initialLR, stepSize, gamma);
+            LRScheduler myLR = learning::StepLR(initialLR, stepSize, gamma);
 
             // truth
             std::vector<float> truth(nbSteps);
-- 
GitLab