Skip to content
Snippets Groups Projects
Commit c9d22132 authored by Cyril Moineau's avatar Cyril Moineau
Browse files

Rename initGradiant->initGrad.

parent d5e82429
No related branches found
No related tags found
2 merge requests!6version 0.1.1,!5Update how loss function work
Pipeline #45160 failed
...@@ -49,7 +49,7 @@ public: ...@@ -49,7 +49,7 @@ public:
virtual void setParameters(const std::vector<std::shared_ptr<Tensor>>& parameters) { virtual void setParameters(const std::vector<std::shared_ptr<Tensor>>& parameters) {
mParameters = parameters; mParameters = parameters;
for (const auto& param : parameters) { for (const auto& param : parameters) {
param->initGradient(); // create gradient and set it to zeros param->initGrad(); // create gradient and set it to zeros
} }
} }
......
...@@ -77,7 +77,7 @@ TEST_CASE("[learning/SGD] update", "[Optimizer][SGD]") { ...@@ -77,7 +77,7 @@ TEST_CASE("[learning/SGD] update", "[Optimizer][SGD]") {
optim_tensors[i] = std::make_shared<Tensor>(dims); optim_tensors[i] = std::make_shared<Tensor>(dims);
optim_tensors[i]->setBackend("cpu"); optim_tensors[i]->setBackend("cpu");
optim_tensors[i]->getImpl()->copy(val_tensors[i].get(), size_tensors[i]); optim_tensors[i]->getImpl()->copy(val_tensors[i].get(), size_tensors[i]);
optim_tensors[i]->initGradient(); optim_tensors[i]->initGrad();
grad_tensors[i] = std::make_shared<Tensor>(dims); grad_tensors[i] = std::make_shared<Tensor>(dims);
grad_tensors[i]->setBackend("cpu"); grad_tensors[i]->setBackend("cpu");
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment