From 39d17c4ca8d050ee600f7acfffd11f79870eba80 Mon Sep 17 00:00:00 2001 From: Jerome Hue <jerome.hue@cea.fr> Date: Mon, 3 Mar 2025 09:08:01 +0100 Subject: [PATCH] Fix warnings by removing unused parameters in Sub backward kernel --- include/aidge/backend/cpu/operator/SubImpl.hpp | 6 +----- include/aidge/backend/cpu/operator/SubImpl_kernels.hpp | 2 -- src/operator/SubImpl.cpp | 10 ++++------ unit_tests/operator/Test_MetaOperator.cpp | 4 ++-- 4 files changed, 7 insertions(+), 15 deletions(-) diff --git a/include/aidge/backend/cpu/operator/SubImpl.hpp b/include/aidge/backend/cpu/operator/SubImpl.hpp index 064b5329..1f94ff13 100644 --- a/include/aidge/backend/cpu/operator/SubImpl.hpp +++ b/include/aidge/backend/cpu/operator/SubImpl.hpp @@ -15,9 +15,7 @@ #include "aidge/backend/cpu/operator/OperatorImpl.hpp" #include "aidge/operator/Sub.hpp" #include "aidge/utils/Registrar.hpp" -#include "aidge/utils/Types.h" -#include "aidge/backend/cpu/data/GetCPUPtr.h" -#include <memory> + #include <vector> namespace Aidge { @@ -31,8 +29,6 @@ using SubImpl_cpu = OperatorImpl_cpu<Sub_Op, const std::vector<std::size_t>&, const std::vector<std::size_t>&, const void*, - const void*, - const void*, void*, void*) >; diff --git a/include/aidge/backend/cpu/operator/SubImpl_kernels.hpp b/include/aidge/backend/cpu/operator/SubImpl_kernels.hpp index cb16c037..8d3d80e9 100644 --- a/include/aidge/backend/cpu/operator/SubImpl_kernels.hpp +++ b/include/aidge/backend/cpu/operator/SubImpl_kernels.hpp @@ -157,8 +157,6 @@ void SubImpl_cpu_backward_kernel(const std::size_t input0Length, const std::vector<std::size_t>& dims0, const std::vector<std::size_t>& dims1, const std::vector<std::size_t>& outputDims, - const void* input0_, - const void* input1_, const void* grad_output_, void* gradientInput0_, void* gradientInput1_) diff --git a/src/operator/SubImpl.cpp b/src/operator/SubImpl.cpp index cce4e27a..7f57bf2f 100644 --- a/src/operator/SubImpl.cpp +++ b/src/operator/SubImpl.cpp @@ -59,11 +59,9 @@ void Aidge::SubImpl_cpu::backward() { /* grad0Length */ out0grad->size(), /* input0Dims */ in0->dims(), /* input1Dims */ in1->dims(), - out0grad->dims(), - getCPUPtr(in0), - getCPUPtr(in1), - getCPUPtr(out0grad), - getCPUPtr(in0grad), - getCPUPtr(in1grad)); + /* outputDims */ out0grad->dims(), + /* gradOutput */ getCPUPtr(out0grad), + /* gradInput0 */ getCPUPtr(in0grad), + /* gradInput1 */ getCPUPtr(in1grad)); } diff --git a/unit_tests/operator/Test_MetaOperator.cpp b/unit_tests/operator/Test_MetaOperator.cpp index 23bacda5..64c6886a 100644 --- a/unit_tests/operator/Test_MetaOperator.cpp +++ b/unit_tests/operator/Test_MetaOperator.cpp @@ -705,7 +705,7 @@ TEST_CASE("[cpu/operator] MetaOperator", "[MetaOperator][CPU]") { auto fc2 = FC(outChannels, inChannels, true, "fc2"); // NOTE: Account for init step by adding 1 to the max timestep // parameter. - auto lif1 = Leaky(nbTimeSteps + 1, beta, threshold, "leaky"); + auto lif1 = Leaky(nbTimeSteps + 1, beta, threshold, LeakyReset::Subtraction, "leaky"); // associateInput() does not work fc1->input(1).first->getOperator()->setOutput(0, myWeights); @@ -774,7 +774,7 @@ TEST_CASE("[cpu/operator] MetaOperator", "[MetaOperator][CPU]") { const auto nbTimeSteps = dims[0]; const auto beta = betaDist(gen); - auto myLeaky = Leaky(nbTimeSteps, beta, 1.0, "leaky"); + auto myLeaky = Leaky(nbTimeSteps, beta, 1.0, LeakyReset::Subtraction, "leaky"); auto op = std::static_pointer_cast<MetaOperator_Op>(myLeaky->getOperator()); // auto stack = Stack(2); -- GitLab