Skip to content
Snippets Groups Projects
Commit 53af86fc authored by Grégoire Kubler's avatar Grégoire Kubler
Browse files

chore : added documentation + cleaned up prints

parent 47734f88
No related branches found
No related tags found
2 merge requests!212Version 0.3.0,!194Feat/operator squeeze operator unsqueeze
......@@ -12,13 +12,12 @@
namespace Aidge{
/**
* type for recipes function use in query and resolve
*/
* @brief type for recipes function use in query and resolve
*/
using RecipesFunctionType = std::function<void(std::shared_ptr<MatchSolution>)>;
/**
* @brief class which is the hight level interface for graph matching, used to simplify match definition
*
* @brief high level interface for graph matching, used to simplify match definition
*/
class GraphRegex{
......
......@@ -40,6 +40,14 @@ protected:
public:
OperatorTensor() = delete;
/**
* @brief Operator tensor constructor. This function is not meant to be called directly but by a derived class constructor
* every operator class derive from this class.
*
* @param[in] type : type of operator (i.e. "Add", "AveragePool",...)
* @param[in] inputsCategory : describes the type of each input.
* @param[in] nbOut : Number of tensors this operator will output
*/
OperatorTensor(const std::string& type, const std::vector<InputCategory>& inputsCategory,
const IOIndex_t nbOut);
......@@ -79,6 +87,15 @@ public:
* For each dataInput Tensor of the Operator, the first index and dimensions of the feature area.
*/
virtual std::vector<std::pair<std::vector<Aidge::DimSize_t>, std::vector<DimSize_t>>> computeReceptiveField(const std::vector<DimSize_t>& firstEltDims, const std::vector<DimSize_t>& outputDims, const IOIndex_t outputIdx = 0) const;
/**
* @brief Will compute the dimensions of operator's output tensor given the input sizes
* If the output dimensions cannot be computed because it depends on some undefined inputs then forwardDims will return false and enter in TOKEN mode for subsequent tensors.
* - TOKEN mode means that forwarddims will only ensure that all inputs and outputs of the graph the node is within are connected.
* @param[in] allowDataDependency if set to true, this means that this operator output dimensions depends on the dimensions of optionnal parameter tensors.
* @return true if dims have been properly forwarded. false otherwise. If set to false, then forwardDims will enter in token mode.
*
*/
virtual bool forwardDims(bool allowDataDependency = false);
virtual bool dimsForwarded() const;
///////////////////////////////////////////////////
......
......@@ -103,7 +103,7 @@ public:
void forward() override final;
void backward() override final {
// fmt::print("Basic Producer backward() function.\n");
Log::debug("Basic Producer backward() function.");
}
void setOutput(const IOIndex_t outputIdx, const std::shared_ptr<Data>& data) const override;
......
......@@ -78,6 +78,13 @@ void init_Log(py::module& m){
.def_static("set_console_level", &Log::setConsoleLevel, py::arg("level"),
R"mydelimiter(
Set the minimum log level displayed in the console.
Available `Level`s in ascending order :
- Level.Debug
- Level.Info
- Level.Notice
- Level.Warn
- Level.Error
- Level.Fatal
:param level: Log level.
:type level: Level
......@@ -93,6 +100,13 @@ void init_Log(py::module& m){
.def_static("set_file_level", &Log::setFileLevel, py::arg("level"),
R"mydelimiter(
Set the minimum log level saved in the log file.
Available `Level`s in ascending order :
- Level.Debug
- Level.Info
- Level.Notice
- Level.Warn
- Level.Error
- Level.Fatal
:param level: Log level.
:type level: Level
......
......@@ -211,7 +211,7 @@ void Aidge::Node::setInputId(const IOIndex_t inId, const IOIndex_t newNodeoutId)
"Input index ({}) is out of bound ({}) for node {} (of type {})",
inId, nbInputs(), name(), type());
if (mIdOutParents[inId] != gk_IODefaultIndex) {
Log::notice("Notice: filling a Tensor already attributed");
Log::notice("Filling a Tensor already attributed.");
auto originalParent = input(inId);
// remove original parent reference to child
// find the output ID for original Parent
......@@ -279,7 +279,7 @@ void Aidge::Node::addChild(std::shared_ptr<GraphView> otherView, const IOIndex_t
void Aidge::Node::addParent(const std::shared_ptr<Node> other_node, const IOIndex_t inId) {
if (getParent(inId) != nullptr) {
Log::notice("Notice: you are replacing an existing parent for node {} (of type {})", name(), type());
Log::notice("You are replacing an existing parent for node {} (of type {}).", name(), type());
}
AIDGE_ASSERT(inId != gk_IODefaultIndex && inId < nbInputs(),
"Input index ({}) is out of bound ({}) for node {} (of type {})",
......
......@@ -33,6 +33,7 @@
#include "aidge/operator/MetaOperator.hpp"
#include "aidge/operator/OperatorTensor.hpp"
#include "aidge/operator/Producer.hpp"
#include "aidge/utils/Log.hpp"
#include "aidge/utils/Types.h"
......@@ -665,7 +666,7 @@ Aidge::Elts_t Aidge::Scheduler::getNbAvailableData(const std::shared_ptr<Node>&
// => This means data was fed manually to the input, without a Producer
// In this case, we assume a single-use data (unlike a Producer, which
// keep producing the data each time it is needed).
fmt::print("No producer node attached to input#{} for node {} ({})\n", inputIdx, node->name(), node->type());
Log::warn("No producer node attached to input#{} for node {} ({})\n", inputIdx, node->name(), node->type());
return Elts_t::DataElts(std::static_pointer_cast<Tensor>(node->getOperator()->getRawInput(inputIdx))->size());
}
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment