Skip to content
Snippets Groups Projects

Add backward functions for ReLU, Sigmoid and Tanh

Merged Olivier Antoni requested to merge (removed):dev into dev
3 files
+ 3
3
Compare changes
  • Side-by-side
  • Inline
Files
3
@@ -23,7 +23,7 @@ void ReLUImpl_cpu_backward_kernel(const std::size_t inputLenght,
@@ -23,7 +23,7 @@ void ReLUImpl_cpu_backward_kernel(const std::size_t inputLenght,
const void* input_, const void* output_, const void* grad_output_,
const void* input_, const void* output_, const void* grad_output_,
void* grad_input_) {
void* grad_input_) {
const I* input = static_cast<const I*>(input_);
const I* input = static_cast<const I*>(input_);
#const O* output = static_cast<const O*>(output_);
//const O* output = static_cast<const O*>(output_);
const GO* grad_output = static_cast<const GO*>(grad_output_);
const GO* grad_output = static_cast<const GO*>(grad_output_);
GI* grad_input = static_cast<GI*>(grad_input_);
GI* grad_input = static_cast<GI*>(grad_input_);
for (std::size_t i = 0; i < inputLenght; ++i) {
for (std::size_t i = 0; i < inputLenght; ++i) {
Loading