Forked from
Eclipse Projects / aidge / aidge_core
2103 commits behind the upstream repository.
-
Maxence Naud authored
- Change parent class from Operator to OperatorTensor - Remove shared and not customed functions from operators - Uniformize operators behaviour: - inputs are set to nullptr at initialization by default - parameters whose size can be computed at initialization are (FC, ConvDepthWise) - Many more checks in functions with AIDGE_THROW_OR_ABORT()
Maxence Naud authored- Change parent class from Operator to OperatorTensor - Remove shared and not customed functions from operators - Uniformize operators behaviour: - inputs are set to nullptr at initialization by default - parameters whose size can be computed at initialization are (FC, ConvDepthWise) - Many more checks in functions with AIDGE_THROW_OR_ABORT()
Code owners
Assign users and groups as approvers for specific file changes. Learn more.
Scaling.hpp 3.39 KiB
/********************************************************************************
* Copyright (c) 2023 CEA-List
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0.
*
* SPDX-License-Identifier: EPL-2.0
*
********************************************************************************/
#ifndef __AIDGE_CORE_OPERATOR_Scaling_H__
#define __AIDGE_CORE_OPERATOR_Scaling_H__
#include <vector>
#include <memory>
#include "aidge/utils/StaticAttributes.hpp"
#include "aidge/utils/Registrar.hpp"
#include "aidge/operator/OperatorTensor.hpp"
#include "aidge/backend/OperatorImpl.hpp"
#include "aidge/data/Tensor.hpp"
#include "aidge/graph/Node.hpp"
#include "aidge/utils/Types.h"
namespace Aidge {
enum class ScalingAttr {
scalingFactor, quantizedNbBits, isOutputUnsigned
};
class Scaling_Op : public OperatorTensor,
public Registrable<Scaling_Op, std::string, std::unique_ptr<OperatorImpl>(const Scaling_Op&)>,
public StaticAttributes<ScalingAttr, float, size_t, bool> {
public:
static constexpr const char* Type = "Scaling";
Scaling_Op() = delete;
using Attributes_ = StaticAttributes<ScalingAttr, float, std::size_t, bool>;
template <ScalingAttr e> using attr = typename Attributes_::template attr<e>;
Scaling_Op(float scalingFactor, std::size_t nbBits, bool isOutputUnsigned)
: OperatorTensor(Type, 1, 0, 1),
Attributes_(
attr<ScalingAttr::scalingFactor>(scalingFactor),
attr<ScalingAttr::quantizedNbBits>(nbBits),
attr<ScalingAttr::isOutputUnsigned>(isOutputUnsigned))
{}
/**
* @brief Copy-constructor. Copy the operator attributes and its output tensor(s), but not its input tensors (the new operator has no input associated).
* @param op Operator to copy.
*/
Scaling_Op(const Scaling_Op& op)
: OperatorTensor(op),
Attributes_(op)
{
mImpl = op.mImpl ? Registrar<Scaling_Op>::create(mOutputs[0]->getImpl()->backend())(*this) : nullptr;
}
/**
* @brief Clone the operator using its copy-constructor.
* @see Operator::Scaling_Op
*/
std::shared_ptr<Operator> clone() const override {
return std::make_shared<Scaling_Op>(*this);
}
void setBackend(const std::string& name) override {
mImpl = Registrar<Scaling_Op>::create(name)(*this);
mOutputs[0]->setBackend(name);
// FIXME: temporary workaround
mInputs[0]->setBackend(name);
}
static const std::vector<std::string> getInputsName() {
return {"data_input"};
}
static const std::vector<std::string> getOutputsName() {
return {"data_output"};
}
};
/*
inline std::shared_ptr<Node> Scaling(float scalingFactor = 1.0f, const std::string& name = "") {
return std::make_shared<Node>(std::make_shared<Scaling_Op>(scalingFactor), name);
}
*/
inline std::shared_ptr<Node> Scaling(float scalingFactor = 1.0f, std::size_t quantizedNbBits=8, bool isOutputUnsigned=true, const std::string& name = "") {
return std::make_shared<Node>(std::make_shared<Scaling_Op>(scalingFactor,quantizedNbBits, isOutputUnsigned), name);
}
} // namespace Aidge
namespace {
template <>
const char* const EnumStrings<Aidge::ScalingAttr>::data[]
= {"scalingFactor", "quantizedNbBits", "isOutputUnsigned"};
}
#endif /* __AIDGE_CORE_OPERATOR_RELU_H__ */