Skip to content
Snippets Groups Projects

Update how loss function work

Merged Cyril Moineau requested to merge bindLoss into dev
1 file
+ 4
1
Compare changes
  • Side-by-side
  • Inline
+ 4
1
@@ -72,7 +72,10 @@ void Aidge::FCImpl_cpu::backward()
@@ -72,7 +72,10 @@ void Aidge::FCImpl_cpu::backward()
{
{
const FC_Op& op_ = dynamic_cast<const FC_Op&>(mOp);
const FC_Op& op_ = dynamic_cast<const FC_Op&>(mOp);
const auto& fc_grad = op_.getOutput(0)->grad();
const auto& fc_grad = op_.getOutput(0)->grad();
assert(fc_grad && "missing ouput #0 gradient");
AIDGE_ASSERT(fc_grad, "missing ouput #0 gradient");
 
AIDGE_ASSERT(op_.getInput(0)->grad(), "missing input #0 gradient");
 
AIDGE_ASSERT(op_.getInput(1)->grad(), "missing input #1 gradient");
 
AIDGE_ASSERT(op_.getInput(2)->grad(), "missing input #2 gradient");
// Find the correct kernel type
// Find the correct kernel type
const Registrar<FCImplBackward_cpu>::registrar_key registrarKey = {
const Registrar<FCImplBackward_cpu>::registrar_key registrarKey = {
Loading