Skip to content
Snippets Groups Projects
Commit c3e8588e authored by Inna Kucher's avatar Inna Kucher
Browse files

adding scaling operator

parent 1d0c8d19
No related branches found
No related tags found
No related merge requests found
/********************************************************************************
* Copyright (c) 2023 CEA-List
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0.
*
* SPDX-License-Identifier: EPL-2.0
*
********************************************************************************/
#ifndef __AIDGE_CORE_OPERATOR_Scaling_H__
#define __AIDGE_CORE_OPERATOR_Scaling_H__
#include <vector>
#include <memory>
#include <memory>
#include <iostream>
#include "aidge/utils/Parameter.hpp"
#include "aidge/utils/Registrar.hpp"
#include "aidge/operator/Operator.hpp"
#include "aidge/backend/OperatorImpl.hpp"
#include "aidge/data/Tensor.hpp"
#include "aidge/data/Data.hpp"
#include "aidge/graph/Node.hpp"
#include "aidge/utils/Types.h"
namespace Aidge {
enum class ScalingParam {
scalingFactor
};
class Scaling_Op : public Operator,
public Registrable<Scaling_Op, std::string, std::unique_ptr<OperatorImpl>(const Scaling_Op&)>,
public Parameterizable<ScalingParam, float> {
public:
// FIXME: change accessibility
std::shared_ptr<Tensor> mInput = std::make_shared<Tensor>();
const std::shared_ptr<Tensor> mOutput = std::make_shared<Tensor>();
public:
static constexpr const char* Type = "Scaling";
Scaling_Op() = delete;
using Parameterizable_ = Parameterizable<ScalingParam, float>;
template <ScalingParam e> using param = typename Parameterizable_::template param<e>;
Scaling_Op(float scalingFactor)
: Operator(Type),
Parameterizable_(
param<ScalingParam::scalingFactor>(scalingFactor))
{
setDatatype(DataType::Float32);
}
void associateInput(__attribute__((unused)) const IOIndex_t inputIdx, std::shared_ptr<Data> data) override final {
assert(inputIdx == 0 && "operator supports only 1 input");
assert(strcmp(data->type(), Tensor::Type)==0 && "input data must be of Tensor type");
mInput = std::dynamic_pointer_cast<Tensor>(data);
}
void computeOutputDims() override final {
if (!mInput->empty())
mOutput->resize(mInput->dims());
}
bool outputDimsForwarded() const override final {
return !(mOutput->empty());
}
inline Tensor& input(__attribute__((unused)) const IOIndex_t inputIdx) const override final { return *(mInput.get()); }
inline Tensor& output(__attribute__((unused)) const IOIndex_t outputIdx) const override final { return *(mOutput.get()); }
inline std::shared_ptr<Tensor> getInput(__attribute__((unused)) const IOIndex_t inputIdx) const override final {
assert((inputIdx == 0) && "Scaling Operator has only 1 input");
return mInput;
}
inline std::shared_ptr<Tensor> getOutput(__attribute__((unused)) const IOIndex_t outputIdx) const override final {
assert((outputIdx == 0) && "Scaling Operator has only 1 output");
return mOutput;
}
std::shared_ptr<Data> getRawInput(__attribute__((unused)) const IOIndex_t inputIdx) const override final {
assert(inputIdx == 0 && "operator supports only 1 input");
return std::static_pointer_cast<Data>(mInput);
}
std::shared_ptr<Data> getRawOutput(__attribute__((unused)) const IOIndex_t outputIdx) const override final {
assert(outputIdx == 0 && "operator supports only 1 output");
return mOutput;
}
void setBackend(const std::string& name) {
mImpl = Registrar<Scaling_Op>::create(name)(*this);
mOutput->setBackend(name);
// FIXME: temporary workaround
mInput->setBackend(name);
}
void setDatatype(const DataType& datatype) {
mOutput->setDatatype(datatype);
// FIXME: temporary workaround
mInput->setDatatype(datatype);
}
inline IOIndex_t nbInputs() const noexcept override final { return 1; }
inline IOIndex_t nbDataInputs() const noexcept override final { return 1; }
inline IOIndex_t nbOutputs() const noexcept override final { return 1; }
};
inline std::shared_ptr<Node> Scaling(float scalingFactor = 1.0f, const char* name = nullptr) {
// FIXME: properly handle default w&b initialization in every cases
return std::make_shared<Node>(std::make_shared<Scaling_Op>(scalingFactor), name);
}
}
namespace {
template <>
const char* const EnumStrings<Aidge::ScalingParam>::data[]
= {"scalingFactor"};
}
#endif /* __AIDGE_CORE_OPERATOR_RELU_H__ */
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment