Skip to content
Snippets Groups Projects

I-ViT Integration

Merged Lucas RAKOTOARIVONY requested to merge lrakotoarivony/aidge_core:main into main
5 files
+ 138
2
Compare changes
  • Side-by-side
  • Inline
Files
5
+ 80
0
 
/********************************************************************************
 
* Copyright (c) 2024 Thales
 
*
 
* This program and the accompanying materials are made available under the
 
* terms of the Eclipse Public License 2.0 which is available at
 
* http://www.eclipse.org/legal/epl-2.0.
 
*
 
* SPDX-License-Identifier: EPL-2.0
 
* Author: Lucas RAKOTOARIVONY, Thales Research & Technology France
 
* Date: 10.09.2024
 
*
 
********************************************************************************/
 
 
#ifndef AIDGE_CORE_OPERATOR_ILAYERNORM_H_
 
#define AIDGE_CORE_OPERATOR_ILAYERNORM_H_
 
 
#include <cassert>
 
#include <memory>
 
#include <vector>
 
 
#include "aidge/backend/OperatorImpl.hpp"
 
#include "aidge/graph/Node.hpp"
 
#include "aidge/operator/OperatorTensor.hpp"
 
#include "aidge/utils/ErrorHandling.hpp"
 
#include "aidge/utils/Registrar.hpp"
 
#include "aidge/utils/Types.h"
 
 
namespace Aidge {
 
 
class ILayerNorm_Op : public OperatorTensor,
 
public Registrable<ILayerNorm_Op, std::string, std::shared_ptr<OperatorImpl>(const ILayerNorm_Op&)> {
 
public:
 
static const std::string Type;
 
 
ILayerNorm_Op()
 
: OperatorTensor(Type, {InputCategory::Data, InputCategory::Param, InputCategory::Param}, 1)
 
{}
 
 
/**
 
* @brief Copy-constructor. Copy the operator attributes and its output tensor(s), but not its input tensors (the new operator has no input associated).
 
* @param op Operator to copy.
 
*/
 
ILayerNorm_Op(const ILayerNorm_Op& op)
 
: OperatorTensor(op)
 
{
 
if (op.mImpl){
 
SET_IMPL_MACRO(ILayerNorm_Op, *this, op.backend());
 
}else{
 
mImpl = nullptr;
 
}
 
}
 
 
/**
 
* @brief Clone the operator using its copy-constructor.
 
* @see Operator::ILayerNorm_Op
 
*/
 
std::shared_ptr<Operator> clone() const override {
 
return std::make_shared<ILayerNorm_Op>(*this);
 
}
 
 
void associateInput(const IOIndex_t inputIdx, const std::shared_ptr<Data>& data) override final;
 
 
bool forwardDims(bool allowDataDependency = false) override final;
 
 
void setBackend(const std::string& name, DeviceIdx_t device = 0) override final;
 
 
static const std::vector<std::string> getInputsName(){
 
return {"data_input", "weight", "bias"};
 
}
 
static const std::vector<std::string> getOutputsName(){
 
return {"data_output"};
 
}
 
};
 
 
inline std::shared_ptr<Node> ILayerNorm(const std::string& name = "") {
 
return std::make_shared<Node>(std::make_shared<ILayerNorm_Op>(), name);
 
}
 
}
 
 
#endif /* AIDGE_CORE_OPERATOR_ILAYERNORM_H_ */
Loading