From 84e1e2adf8e5d6c72b5121d9296e226df0882c86 Mon Sep 17 00:00:00 2001
From: Maxence Naud <maxence.naud@cea.fr>
Date: Wed, 9 Apr 2025 12:47:59 +0000
Subject: [PATCH] upd: remove 'Aidge::' scope from ConvImpl::forward functions

---
 src/operator/ConvImpl.cpp | 15 ++++++++++-----
 1 file changed, 10 insertions(+), 5 deletions(-)

diff --git a/src/operator/ConvImpl.cpp b/src/operator/ConvImpl.cpp
index d23a9968..eae5f109 100644
--- a/src/operator/ConvImpl.cpp
+++ b/src/operator/ConvImpl.cpp
@@ -12,15 +12,18 @@
 #include "aidge/backend/cpu/operator/ConvImpl.hpp"
 #include "aidge/backend/cpu/operator/ConvImpl_kernels.hpp"
 
-#include <cassert>
+#include <memory>
+#include <vector>
 
 #include "aidge/backend/cpu/data/GetCPUPtr.h"
 #include "aidge/operator/Conv.hpp"
+#include "aidge/utils/ErrorHandling.hpp"
+#include "aidge/utils/Types.h"
 
 namespace Aidge {
 
 template <>
-void Aidge::ConvImpl1D_cpu::forward() {
+void ConvImpl1D_cpu::forward() {
     const auto& op_ = static_cast<const Conv_Op<1>&>(mOp);
 
     // FIXME: uncomment the following code once memory handling will work
@@ -53,7 +56,8 @@ void Aidge::ConvImpl1D_cpu::forward() {
     );
 }
 
-template <> void ConvImpl1D_cpu::backward() {
+template <>
+void ConvImpl1D_cpu::backward() {
     const auto &op = dynamic_cast<const Conv1D_Op &>(mOp);
     const auto &outputGrad = op.getOutput(0)->grad();
     AIDGE_ASSERT(outputGrad, "{}: missing ouput #0 gradient", op.type());
@@ -97,7 +101,7 @@ template <> void ConvImpl1D_cpu::backward() {
 }
 
 template <>
-void Aidge::ConvImpl2D_cpu::forward() {
+void ConvImpl2D_cpu::forward() {
     const auto& op_ = dynamic_cast<const Conv_Op<2>&>(mOp);
 
     // FIXME: uncomment the following code once memory handling will work
@@ -130,7 +134,8 @@ void Aidge::ConvImpl2D_cpu::forward() {
 }
 
 
-template <> void ConvImpl2D_cpu::backward() {
+template <>
+void ConvImpl2D_cpu::backward() {
     const auto &op = dynamic_cast<const Conv2D_Op &>(mOp);
     const auto &outputGrad = op.getOutput(0)->grad();
     AIDGE_ASSERT(outputGrad, "{}: missing ouput #0 gradient", op.type());
-- 
GitLab