Skip to content
Snippets Groups Projects
Commit 30a052a3 authored by Olivier BICHLER's avatar Olivier BICHLER
Browse files

Improved error messages

parent fd5a7414
No related branches found
No related tags found
1 merge request!212Version 0.3.0
Pipeline #54739 passed
......@@ -25,6 +25,7 @@ void init_Scheduler(py::module& m){
.def(py::init<std::shared_ptr<GraphView>&>(), py::arg("graph_view"))
.def("graph_view", &Scheduler::graphView)
.def("save_scheduling_diagram", &Scheduler::saveSchedulingDiagram, py::arg("file_name"))
.def("save_static_scheduling_diagram", &Scheduler::saveStaticSchedulingDiagram, py::arg("file_name"))
.def("resetScheduling", &Scheduler::resetScheduling)
.def("generate_scheduling", &Scheduler::generateScheduling)
.def("get_static_scheduling", &Scheduler::getStaticScheduling, py::arg("step") = 0)
......
......@@ -43,16 +43,17 @@ bool Aidge::Conv_Op<DIM>::forwardDims(bool /*allowDataDependency*/) {
if (inputsAssociated()) {
// first check weight since it defines inChannels and outChannels
AIDGE_ASSERT((getInput(1)->nbDims() == (DIM+2)),
"Wrong weight Tensor dimension: {} for Conv{}D operator.", getInput(1)->nbDims(), DIM);
"Wrong weight Tensor dimension: {} for Conv{}D operator. Expected number of dimensions is {}.", getInput(1)->nbDims(), DIM, DIM+2);
// check data
AIDGE_ASSERT((getInput(0)->nbDims() == (DIM+2)) &&
(getInput(0)->template dims<DIM+2>()[1] == inChannels()),
"Wrong input size for Conv operator.");
"Wrong input size ({}) for Conv operator. Expected dims are [x, {}, {}].", getInput(0)->dims(), inChannels(), fmt::join(std::vector<std::string>(DIM, "x"), ", "));
// check optional bias
if(getInput(2))
AIDGE_ASSERT((getInput(2)->nbDims() == (1)) &&
(getInput(2)->template dims<1>()[0] == outChannels()),
"Wrong bias size for Conv operator.");
"Wrong bias size ({}) for Conv operator. Expected dims are [{}].", getInput(2)->dims(), outChannels());
std::array<DimSize_t, DIM + 2> outputDims{};
const std::array<DimSize_t, DIM + 2> inputDims(getInput(0)->template dims<DIM+2>());
......
......@@ -44,16 +44,17 @@ bool Aidge::ConvDepthWise_Op<DIM>::forwardDims(bool /*allowDataDependency*/) {
if (inputsAssociated()) {
// first check weight since it defines nbChannels
AIDGE_ASSERT((getInput(1)->nbDims() == (DIM+2)),
"Wrong weight Tensor dimension: {} for Conv{}D operator.", getInput(1)->nbDims(), DIM);
"Wrong weight Tensor dimension: {} for ConvDepthWise{}D operator. Expected number of dimensions is {}.", getInput(1)->nbDims(), DIM, DIM+2);
// check data
AIDGE_ASSERT((getInput(0)->nbDims() == (DIM+2)) &&
(getInput(0)->template dims<DIM+2>()[1] == nbChannels()),
"Wrong input size for Conv operator.");
"Wrong input size ({}) for ConvDepthWise operator. Expected dims are [x, {}, {}].", getInput(0)->dims(), nbChannels(), fmt::join(std::vector<std::string>(DIM, "x"), ", "));
// check optional bias
if(getInput(2))
AIDGE_ASSERT((getInput(2)->nbDims() == (1)) &&
(getInput(2)->template dims<1>()[0] == nbChannels()),
"Wrong bias size for Conv operator.");
"Wrong bias size ({}) for ConvDepthWise operator. Expected dims are [{}].", getInput(2)->dims(), nbChannels());
std::array<DimSize_t, DIM + 2> outputDims = {};
const std::array<DimSize_t, DIM + 2> inputDims(getInput(0)->template dims<DIM+2>());
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment