Skip to content
Snippets Groups Projects
Commit c0cc9273 authored by Cyril Moineau's avatar Cyril Moineau
Browse files

Remove multi-line comments.

parent 6ec7a518
No related branches found
No related tags found
No related merge requests found
......@@ -72,9 +72,6 @@ void Aidge::Producer_Op::forward() {
if (!backend().empty()) {
mImpl->forward();
}
// else {
// fmt::print("Basic Producer forward() function.\n");
// }
runHooks();
}
......@@ -74,21 +74,6 @@ void Aidge::SequentialScheduler::forward(bool forwardDims, const std::vector<std
}
void Aidge::SequentialScheduler::backward() {
// create ad set Grad values
// if (instanciateGrad) { compile_gradient(mGraphView); }
// const auto& ordered_outputs = mGraphView->getOrderedOutputs();
// AIDGE_ASSERT(ordered_outputs.size() == data.size(), "You must provide the \
// right number of data objects to run the backward function. \
// {} outputs detected for the current GraphView when {} were \
// provided.", ordered_outputs.size(), data.size());
// for (std::size_t i = 0; i < ordered_outputs.size(); ++i) {
// const std::shared_ptr<OperatorTensor> op_ = std::dynamic_pointer_cast<OperatorTensor>(ordered_outputs[i].first->getOperator());
// const std::shared_ptr<Tensor> t_grad = op_->getOutput(ordered_outputs[i].second)->grad();
// AIDGE_ASSERT(data[i]->dims() == t_grad->dims(), "Wrong gradient size (expected {}, got {}).", t_grad->dims(), data[i]->dims());
// *t_grad = data[i]->clone();
// }
// Generate scheduling *only if empty*
// If scheduling was already generated (in one or several steps, i.e. one or
// several successive call to generateScheduling()), do not generate it twice
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment