Skip to content
Snippets Groups Projects
Commit e217069c authored by Olivier BICHLER's avatar Olivier BICHLER
Browse files

Merge remote-tracking branch 'origin/main' into dev

parents 0a40c485 49751392
No related branches found
No related tags found
1 merge request!212Version 0.3.0
...@@ -76,6 +76,10 @@ ...@@ -76,6 +76,10 @@
#include "aidge/scheduler/Scheduler.hpp" #include "aidge/scheduler/Scheduler.hpp"
#include "aidge/stimuli/Stimulus.hpp" #include "aidge/stimuli/Stimulus.hpp"
#include "aidge/operator/ShiftMax.hpp"
#include "aidge/scheduler/ShiftGELU.hpp"
#include "aidge/stimuli/ILayerNorm.hpp"
#include "aidge/recipes/Recipes.hpp" #include "aidge/recipes/Recipes.hpp"
#include "aidge/utils/Attributes.hpp" #include "aidge/utils/Attributes.hpp"
......
/********************************************************************************
* Copyright (c) 2024 Thales
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0.
*
* SPDX-License-Identifier: EPL-2.0
* Author: Lucas RAKOTOARIVONY, Thales Research & Technology France
* Date: 10.09.2024
*
********************************************************************************/
#ifndef AIDGE_CORE_OPERATOR_ILAYERNORM_H_
#define AIDGE_CORE_OPERATOR_ILAYERNORM_H_
#include <cassert>
#include <memory>
#include <vector>
#include "aidge/backend/OperatorImpl.hpp"
#include "aidge/graph/Node.hpp"
#include "aidge/operator/OperatorTensor.hpp"
#include "aidge/utils/ErrorHandling.hpp"
#include "aidge/utils/Registrar.hpp"
#include "aidge/utils/Types.h"
namespace Aidge {
class ILayerNorm_Op : public OperatorTensor,
public Registrable<ILayerNorm_Op, std::string, std::shared_ptr<OperatorImpl>(const ILayerNorm_Op&)> {
public:
static const std::string Type;
ILayerNorm_Op()
: OperatorTensor(Type, {InputCategory::Data, InputCategory::Param, InputCategory::Param}, 1)
{}
/**
* @brief Copy-constructor. Copy the operator attributes and its output tensor(s), but not its input tensors (the new operator has no input associated).
* @param op Operator to copy.
*/
ILayerNorm_Op(const ILayerNorm_Op& op)
: OperatorTensor(op)
{
if (op.mImpl){
SET_IMPL_MACRO(ILayerNorm_Op, *this, op.backend());
}else{
mImpl = nullptr;
}
}
/**
* @brief Clone the operator using its copy-constructor.
* @see Operator::ILayerNorm_Op
*/
std::shared_ptr<Operator> clone() const override {
return std::make_shared<ILayerNorm_Op>(*this);
}
void associateInput(const IOIndex_t inputIdx, const std::shared_ptr<Data>& data) override final;
bool forwardDims(bool allowDataDependency = false) override final;
void setBackend(const std::string& name, DeviceIdx_t device = 0) override final;
static const std::vector<std::string> getInputsName(){
return {"data_input", "weight", "bias"};
}
static const std::vector<std::string> getOutputsName(){
return {"data_output"};
}
};
inline std::shared_ptr<Node> ILayerNorm(const std::string& name = "") {
return std::make_shared<Node>(std::make_shared<ILayerNorm_Op>(), name);
}
}
#endif /* AIDGE_CORE_OPERATOR_ILAYERNORM_H_ */
...@@ -7,7 +7,7 @@ ...@@ -7,7 +7,7 @@
* *
* SPDX-License-Identifier: EPL-2.0 * SPDX-License-Identifier: EPL-2.0
* Author: Lucas RAKOTOARIVONY, Thales Research & Technology France * Author: Lucas RAKOTOARIVONY, Thales Research & Technology France
* Date: 25.06.2024 * Date: 10.09.2024
* *
********************************************************************************/ ********************************************************************************/
......
...@@ -7,7 +7,7 @@ ...@@ -7,7 +7,7 @@
* *
* SPDX-License-Identifier: EPL-2.0 * SPDX-License-Identifier: EPL-2.0
* Author: Lucas RAKOTOARIVONY, Thales Research & Technology France * Author: Lucas RAKOTOARIVONY, Thales Research & Technology France
* Date: 25.06.2024 * Date: 10.09.2024
* *
********************************************************************************/ ********************************************************************************/
......
/********************************************************************************
* Copyright (c) 2024 Thales
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0.
*
* SPDX-License-Identifier: EPL-2.0
* Author: Lucas RAKOTOARIVONY, Thales Research & Technology France
* Date: 10.09.2024
*
********************************************************************************/
#include "aidge/operator/ILayerNorm.hpp"
#include <memory>
#include <string>
#include "aidge/data/Tensor.hpp"
#include "aidge/utils/Types.h"
const std::string Aidge::ILayerNorm_Op::Type = "ILayerNorm";
void Aidge::ILayerNorm_Op::associateInput(const Aidge::IOIndex_t inputIdx, const std::shared_ptr<Aidge::Data>& data) {
AIDGE_ASSERT(inputIdx < 3, "Operators {} supports only {} inputs", type(), nbInputs());
AIDGE_ASSERT(data->type() == Tensor::Type, "input data must be of Tensor type");
mInputs[inputIdx] = std::dynamic_pointer_cast<Tensor>(data);
if (inputIdx == 0 && getInput(0)->nbDims() == 1)
mInputs[inputIdx]->resize({1, getInput(inputIdx)->size()});
}
bool Aidge::ILayerNorm_Op::forwardDims(bool /*allowDataDependency*/) {
if (inputsAssociated()) {
const DimSize_t nbFeatures = getInput(0)->dims()[1];
for (std::size_t i = 0; i < nbInputs(); ++i) {
if(inputCategory(i) == InputCategory::Param && getInput(i)->size() != nbFeatures) {
getInput(i)->resize({getInput(0)->dims()[1]});
}
}
mOutputs[0]->resize(getInput(0)->dims());
return true;
}
return false;
}
void Aidge::ILayerNorm_Op::setBackend(const std::string& name, DeviceIdx_t device) {
SET_IMPL_MACRO(ILayerNorm_Op, *this, name);
mOutputs[0]->setBackend(name, device);
getInput(1)->setBackend(name, device);
getInput(2)->setBackend(name, device);
}
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment