-
Maxence Naud authoredMaxence Naud authored
Code owners
Assign users and groups as approvers for specific file changes. Learn more.
Test_PaddedConv.cpp 12.62 KiB
/********************************************************************************
* Copyright (c) 2023 CEA-List
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0.
*
* SPDX-License-Identifier: EPL-2.0
*
********************************************************************************/
#include <memory>
#include <catch2/catch_test_macros.hpp>
#include "aidge/backend/cpu/data/TensorImpl.hpp"
#include "aidge/backend/cpu/operator/PaddedConvImpl.hpp"
#include "aidge/data/DataType.hpp"
#include "aidge/data/Tensor.hpp"
#include "aidge/graph/Node.hpp"
#include "aidge/operator/MetaOperatorDefs.hpp"
using namespace Aidge;
TEST_CASE("[cpu/operator] PaddedConv(forward)", "[PaddedConv][CPU]") {
SECTION("Classic Conv") {
std::shared_ptr<Node> myConv = PaddedConv(3,4,{3,3}, "myconv");
auto op = std::static_pointer_cast<OperatorTensor>(myConv -> getOperator());
std::shared_ptr<Tensor> myWeights = std::make_shared<Tensor>(Array4D<int,4,3,3,3> {
{
{
{{ 0, 1, 2},
{ 3, 4, 5},
{ 6, 7, 8}},
{{ 9, 10, 11},
{ 12, 13, 14},
{ 15, 16, 17}},
{{ 18, 19, 20},
{ 21, 22, 23},
{ 24, 25, 26}}
},
{
{{ 27, 28, 29},
{ 30, 31, 32},
{ 33, 34, 35}},
{{ 36, 37, 38},
{ 39, 40, 41},
{ 42, 43, 44}},
{{ 45, 46, 47},
{ 48, 49, 50},
{ 51, 52, 53}}
},
{
{{ 54, 55, 56},
{ 57, 58, 59},
{ 60, 61, 62}},
{{ 63, 64, 65},
{ 66, 67, 68},
{ 69, 70, 71}},
{{ 72, 73, 74},
{ 75, 76, 77},
{ 78, 79, 80}}
},
{
{{ 81, 82, 83},
{ 84, 85, 86},
{ 87, 88, 89}},
{{ 90, 91, 92},
{ 93, 94, 95},
{ 96, 97, 98}},
{{ 99, 100, 101},
{102, 103, 104},
{105, 106, 107}}
}
}
});
std::shared_ptr<Tensor> myBias = std::make_shared<Tensor>(Array1D<int,4> {{7,0,9,0}});
std::shared_ptr<Tensor> myInput = std::make_shared<Tensor>(Array4D<int,2,3,5,5> { //NCHW
{
{
{{ 0, 1, 2, 3, 4},
{ 5, 6, 7, 8, 9},
{ 10, 11, 12, 13, 14},
{ 15, 16, 17, 18, 19},
{ 20, 21, 22, 23, 24}},
{{ 25, 26, 27, 28, 29},
{ 30, 31, 32, 33, 34},
{ 35, 36, 37, 38, 39},
{ 40, 41, 42, 43, 44},
{ 45, 46, 47, 48, 49}},
{{ 50, 51, 52, 53, 54},
{ 55, 56, 57, 58, 59},
{ 60, 61, 62, 63, 64},
{ 65, 66, 67, 68, 69},
{ 70, 71, 72, 73, 74}}
},
{
{{ 75, 76, 77, 78, 79},
{ 80, 81, 82, 83, 84},
{ 85, 86, 87, 88, 89},
{ 90, 91, 92, 93, 94},
{ 95, 96, 97, 98, 99}},
{{100, 101, 102, 103, 104},
{105, 106, 107, 108, 109},
{110, 111, 112, 113, 114},
{115, 116, 117, 118, 119},
{120, 121, 122, 123, 124}},
{{125, 126, 127, 128, 129},
{130, 131, 132, 133, 134},
{135, 136, 137, 138, 139},
{140, 141, 142, 143, 144},
{145, 146, 147, 148, 149}}
}
}
});
std::shared_ptr<Tensor> myOutput = std::make_shared<Tensor>(Array4D<int,2,4,3,3> {
{
{
{{ 15226, 15577, 15928},
{ 16981, 17332, 17683},
{ 18736, 19087, 19438}},
{{ 37818, 38898, 39978},
{ 43218, 44298, 45378},
{ 48618, 49698, 50778}},
{{ 60426, 62235, 64044},
{ 69471, 71280, 73089},
{ 78516, 80325, 82134}},
{{ 83016, 85554, 88092},
{ 95706, 98244, 100782},
{108396, 110934, 113472}}
},
{
{{ 41551, 41902, 42253},
{ 43306, 43657, 44008},
{ 45061, 45412, 45763}},
{{118818, 119898, 120978},
{124218, 125298, 126378},
{129618, 130698, 131778}},
{{196101, 197910, 199719},
{205146, 206955, 208764},
{214191, 216000, 217809}},
{{273366, 275904, 278442},
{286056, 288594, 291132},
{298746, 301284, 303822}}
}
}
});
myConv->getOperator()->associateInput(0,myInput);
myConv->input(1).first->getOperator()->setOutput(0, myWeights);
myConv->input(2).first->getOperator()->setOutput(0, myBias);
auto g = getConnectedGraphView(myConv);
g->setDataType(DataType::Int32);
g->setBackend("cpu");
auto scheduler = SequentialScheduler(g);
scheduler.forward();
REQUIRE(*(op->getOutput(0)) == *myOutput);
}
SECTION("test Padding") {
std::shared_ptr<Node> myConv = PaddedConv(3,4,{3,3}, "myconv", {1,1}, {1,1,1,1});
auto op = std::static_pointer_cast<OperatorTensor>(myConv -> getOperator());
std::shared_ptr<Tensor> myWeights = std::make_shared<Tensor>(Array4D<int,4,3,3,3> {
{
{
{{ 0, 1, 2},
{ 3, 4, 5},
{ 6, 7, 8}},
{{ 9, 10, 11},
{ 12, 13, 14},
{ 15, 16, 17}},
{{ 18, 19, 20},
{ 21, 22, 23},
{ 24, 25, 26}}
},
{
{{ 27, 28, 29},
{ 30, 31, 32},
{ 33, 34, 35}},
{{ 36, 37, 38},
{ 39, 40, 41},
{ 42, 43, 44}},
{{ 45, 46, 47},
{ 48, 49, 50},
{ 51, 52, 53}}
},
{
{{ 54, 55, 56},
{ 57, 58, 59},
{ 60, 61, 62}},
{{ 63, 64, 65},
{ 66, 67, 68},
{ 69, 70, 71}},
{{ 72, 73, 74},
{ 75, 76, 77},
{ 78, 79, 80}}
},
{
{{ 81, 82, 83},
{ 84, 85, 86},
{ 87, 88, 89}},
{{ 90, 91, 92},
{ 93, 94, 95},
{ 96, 97, 98}},
{{ 99, 100, 101},
{102, 103, 104},
{105, 106, 107}}
}
}
});
std::shared_ptr<Tensor> myBias = std::make_shared<Tensor>(Array1D<int,4> {{7,0,9,0}});
std::shared_ptr<Tensor> myInput = std::make_shared<Tensor>(Array4D<int,2,3,5,5> { //NCHW
{
{
{{ 0, 1, 2, 3, 4},
{ 5, 6, 7, 8, 9},
{ 10, 11, 12, 13, 14},
{ 15, 16, 17, 18, 19},
{ 20, 21, 22, 23, 24}},
{{ 25, 26, 27, 28, 29},
{ 30, 31, 32, 33, 34},
{ 35, 36, 37, 38, 39},
{ 40, 41, 42, 43, 44},
{ 45, 46, 47, 48, 49}},
{{ 50, 51, 52, 53, 54},
{ 55, 56, 57, 58, 59},
{ 60, 61, 62, 63, 64},
{ 65, 66, 67, 68, 69},
{ 70, 71, 72, 73, 74}}
},
{
{{ 75, 76, 77, 78, 79},
{ 80, 81, 82, 83, 84},
{ 85, 86, 87, 88, 89},
{ 90, 91, 92, 93, 94},
{ 95, 96, 97, 98, 99}},
{{100, 101, 102, 103, 104},
{105, 106, 107, 108, 109},
{110, 111, 112, 113, 114},
{115, 116, 117, 118, 119},
{120, 121, 122, 123, 124}},
{{125, 126, 127, 128, 129},
{130, 131, 132, 133, 134},
{135, 136, 137, 138, 139},
{140, 141, 142, 143, 144},
{145, 146, 147, 148, 149}}
}
}
});
std::shared_ptr<Tensor> myOutput = std::make_shared<Tensor>(Array4D<int,2,4,5,5> {
{
{
{{ 6895, 10225, 10486, 10747, 7063},
{ 10303, 15226, 15577, 15928, 10429},
{ 11518, 16981, 17332, 17683, 11554},
{ 12733, 18736, 19087, 19438, 12679},
{ 8047, 11791, 11998, 12205, 7927}},
{{ 15960, 24069, 24816, 25563, 17100},
{ 25119, 37818, 38898, 39978, 26703},
{ 28764, 43218, 44298, 45378, 30258},
{ 32409, 48618, 49698, 50778, 33813},
{ 21972, 32925, 33618, 34311, 22824}},
{{ 25041, 37929, 39162, 40395, 27153},
{ 39951, 60426, 62235, 64044, 42993},
{ 46026, 69471, 71280, 73089, 48978},
{ 52101, 78516, 80325, 82134, 54963},
{ 35913, 54075, 55254, 56433, 37737}},
{{ 34104, 51771, 53490, 55209, 37188},
{ 54765, 83016, 85554, 88092, 59265},
{ 63270, 95706, 98244, 100782, 67680},
{ 71775, 108396, 110934, 113472, 76095},
{ 49836, 75207, 76872, 78537, 52632}}
},
{
{{ 20395, 29800, 30061, 30322, 19663},
{ 28528, 41551, 41902, 42253, 27304},
{ 29743, 43306, 43657, 44008, 28429},
{ 30958, 45061, 45412, 45763, 29554},
{ 18847, 27316, 27523, 27730, 17827}},
{{ 53760, 80094, 80841, 81588, 54000},
{ 79794, 118818, 119898, 120978, 80028},
{ 83439, 124218, 125298, 126378, 83583},
{ 87084, 129618, 130698, 131778, 87138},
{ 57072, 84900, 85593, 86286, 57024}},
{{ 87141, 130404, 131637, 132870, 88353},
{131076, 196101, 197910, 199719, 132768},
{137151, 205146, 206955, 208764, 138753},
{143226, 214191, 216000, 217809, 144738},
{ 95313, 142500, 143679, 144858, 96237}},
{{120504, 180696, 182415, 184134, 122688},
{182340, 273366, 275904, 278442, 185490},
{190845, 286056, 288594, 291132, 193905},
{199350, 298746, 301284, 303822, 202320},
{133536, 200082, 201747, 203412, 135432}}
}
}
});
myConv->getOperator()->associateInput(0,myInput);
myConv->input(1).first->getOperator()->setOutput(0, myWeights);
myConv->input(2).first->getOperator()->setOutput(0, myBias);
auto g = getConnectedGraphView(myConv);
g->setDataType(DataType::Int32);
g->setBackend("cpu");
auto scheduler = SequentialScheduler(g);
scheduler.forward();
REQUIRE(*(op->getOutput(0)) == *myOutput);
}
}