-
Maxence Naud authoredMaxence Naud authored
Code owners
Assign users and groups as approvers for specific file changes. Learn more.
Test_SoftmaxImpl.cpp 5.64 KiB
/********************************************************************************
* Copyright (c) 2023 CEA-List
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0.
*
* SPDX-License-Identifier: EPL-2.0
*
********************************************************************************/
#include <catch2/catch_test_macros.hpp>
#include "aidge/data/Tensor.hpp"
#include "aidge/data/TensorImpl.hpp"
#include "aidge_cpu.hpp"
#include "aidge/operator/Softmax.hpp"
#include <memory>
using namespace Aidge;
TEST_CASE("[cpu/operator] Softmax(forward)") {
SECTION("2D Tensor") {
std::shared_ptr<Tensor> input = std::make_shared<Tensor>(Array2D<float,2,10> {
{
{-0.21908280, 0.62226844, -0.01738115, 0.49075750, 0.42159843,
-0.70403218, 0.95780319, 1.39435363, 0.25255841, 0.20038256},
{ 0.23626225, 1.84539008, 1.89050162, -0.64871430, 0.37908587,
0.35077620, -0.78156322, -0.98952234, 0.04166317, 1.34357309}
}
});
std::shared_ptr<Tensor> expectedOutput = std::make_shared<Tensor>(Array2D<float,2,10> {
{
{0.04883239, 0.11326669, 0.05974559, 0.09930880, 0.09267281, 0.03006749,
0.15842478, 0.24514021, 0.07825989, 0.07428131},
{0.05429055, 0.27136859, 0.28389078, 0.02240700, 0.06262558, 0.06087753,
0.01961952, 0.01593576, 0.04469007, 0.16429459}
}
});
std::shared_ptr<Node> mySoftmax = Softmax();
mySoftmax->getOperator()->setDatatype(DataType::Float32);
mySoftmax->getOperator()->setBackend("cpu");
mySoftmax->getOperator()->associateInput(0,input);
mySoftmax->getOperator()->computeOutputDims();
mySoftmax->forward();
float* resPtr = static_cast<float*>(mySoftmax->getOperator()->getOutput(0)->getImpl()->rawPtr());
float* expectedPtr = static_cast<float*>(expectedOutput->getImpl()->rawPtr());
for (std::size_t i = 0; i< 20; ++i) {
REQUIRE(std::abs(resPtr[i]-expectedPtr[i]) < 0.00001);
}
}
SECTION("4D Tensor") {
std::shared_ptr<Tensor> input = std::make_shared<Tensor>(Array4D<float,2,3,3,3> {
{
{
{{8.28257084e-01, 7.99335480e-01, 7.36702740e-01},
{2.36729562e-01, 8.61912668e-01, 9.93067741e-01},
{1.63514376e-01, 8.95773172e-02, 2.96533108e-01}},
{{2.20776618e-01, 5.89067876e-01, 2.03930080e-01},
{1.31294072e-01, 7.10182846e-01, 1.08420849e-04},
{7.21750259e-01, 4.38212037e-01, 5.08823872e-01}},
{{4.30953979e-01, 1.51903450e-01, 3.76343548e-01},
{8.07861805e-01, 7.79679358e-01, 5.01209974e-01},
{9.31280375e-01, 9.94207084e-01, 1.74868107e-03}}
},
{
{{6.22058094e-01, 2.32256651e-02, 6.18222237e-01},
{9.58304763e-01, 2.11395025e-02, 4.95614648e-01},
{2.50825584e-01, 4.50860739e-01, 3.80362332e-01}},
{{9.91703272e-02, 5.06073236e-01, 4.88969564e-01},
{1.12059772e-01, 7.64178872e-01, 7.60362148e-01},
{2.84135342e-02, 4.29610193e-01, 1.27862811e-01}},
{{9.57209170e-01, 8.22797656e-01, 1.91352129e-01},
{9.52722490e-01, 6.35501027e-01, 5.67592978e-02},
{2.00799644e-01, 4.00822222e-01, 9.14380193e-01}}
}
}
});
std::shared_ptr<Tensor> expectedOutput = std::make_shared<Tensor>(Array4D<float,2,3,3,3> {
{
{
{{0.45109013, 0.42849392, 0.43775153},
{0.27246451, 0.35967633, 0.50454903},
{0.20397615, 0.20457645, 0.33543545}},
{{0.24571852, 0.34723747, 0.25694931},
{0.24519968, 0.30904123, 0.18692467},
{0.35646603, 0.28991172, 0.41476840}},
{{0.30319133, 0.22426860, 0.30529919},
{0.48233581, 0.33128241, 0.30852637},
{0.43955776, 0.50551182, 0.24979614}}
},
{
{{0.33434108, 0.20638679, 0.39505392},
{0.41263384, 0.20198789, 0.33922729},
{0.36339980, 0.34127754, 0.28713942}},
{{0.19819947, 0.33448750, 0.34715438},
{0.17702937, 0.42464229, 0.44204772},
{0.29093260, 0.33410171, 0.22306615}},
{{0.46745953, 0.45912567, 0.25779176},
{0.41033682, 0.37336978, 0.21872495},
{0.34566763, 0.32462072, 0.48979440}}
}
}
});
std::shared_ptr<Node> mySoftmax = Softmax();
mySoftmax->getOperator()->setDatatype(DataType::Float32);
mySoftmax->getOperator()->setBackend("cpu");
mySoftmax->getOperator()->associateInput(0,input);
mySoftmax->getOperator()->computeOutputDims();
mySoftmax->forward();
float* resPtr = static_cast<float*>(mySoftmax->getOperator()->getOutput(0)->getImpl()->rawPtr());
float* expectedPtr = static_cast<float*>(expectedOutput->getImpl()->rawPtr());
for (std::size_t i = 0; i< 54; ++i) {
REQUIRE(std::abs(resPtr[i]-expectedPtr[i]) < 0.00001);
}
// REQUIRE(*mySoftmax->getOperator()->getOutput(0) == *expectedOutput);
}
}