diff --git a/include/aidge/aidge.hpp b/include/aidge/aidge.hpp
index 7b6a3191543e375dba54d51eee08265e881695db..cadd8c85ca541862cc6f298fa055713a6f65e3ed 100644
--- a/include/aidge/aidge.hpp
+++ b/include/aidge/aidge.hpp
@@ -76,6 +76,10 @@
 #include "aidge/scheduler/Scheduler.hpp"
 #include "aidge/stimuli/Stimulus.hpp"
 
+#include "aidge/operator/ShiftMax.hpp"
+#include "aidge/scheduler/ShiftGELU.hpp"
+#include "aidge/stimuli/ILayerNorm.hpp"
+
 #include "aidge/recipes/Recipes.hpp"
 
 #include "aidge/utils/Attributes.hpp"
diff --git a/include/aidge/operator/ILayerNorm.hpp b/include/aidge/operator/ILayerNorm.hpp
new file mode 100644
index 0000000000000000000000000000000000000000..9eec045922643d9e0d1ae80e66e8ab61436b3abd
--- /dev/null
+++ b/include/aidge/operator/ILayerNorm.hpp
@@ -0,0 +1,80 @@
+/********************************************************************************
+ * Copyright (c) 2024 Thales
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License 2.0 which is available at
+ * http://www.eclipse.org/legal/epl-2.0.
+ *
+ * SPDX-License-Identifier: EPL-2.0
+ * Author: Lucas RAKOTOARIVONY, Thales Research & Technology France
+ * Date: 10.09.2024
+ *
+ ********************************************************************************/
+
+#ifndef AIDGE_CORE_OPERATOR_ILAYERNORM_H_
+#define AIDGE_CORE_OPERATOR_ILAYERNORM_H_
+
+#include <cassert>
+#include <memory>
+#include <vector>
+
+#include "aidge/backend/OperatorImpl.hpp"
+#include "aidge/graph/Node.hpp"
+#include "aidge/operator/OperatorTensor.hpp"
+#include "aidge/utils/ErrorHandling.hpp"
+#include "aidge/utils/Registrar.hpp"
+#include "aidge/utils/Types.h"
+
+namespace Aidge {
+
+class ILayerNorm_Op : public OperatorTensor,
+    public Registrable<ILayerNorm_Op, std::string, std::shared_ptr<OperatorImpl>(const ILayerNorm_Op&)> {
+public:
+    static const std::string Type;
+
+    ILayerNorm_Op()
+    : OperatorTensor(Type, {InputCategory::Data, InputCategory::Param, InputCategory::Param}, 1)
+    {}
+
+    /**
+     * @brief Copy-constructor. Copy the operator attributes and its output tensor(s), but not its input tensors (the new operator has no input associated).
+     * @param op Operator to copy.
+     */
+    ILayerNorm_Op(const ILayerNorm_Op& op)
+        : OperatorTensor(op)
+    {
+        if (op.mImpl){
+            SET_IMPL_MACRO(ILayerNorm_Op, *this, op.backend());
+        }else{
+            mImpl = nullptr;
+        }
+    }
+
+    /**
+     * @brief Clone the operator using its copy-constructor.
+     * @see Operator::ILayerNorm_Op
+     */
+    std::shared_ptr<Operator> clone() const override {
+        return std::make_shared<ILayerNorm_Op>(*this);
+    }
+
+    void associateInput(const IOIndex_t inputIdx, const std::shared_ptr<Data>& data) override final;
+
+    bool forwardDims(bool allowDataDependency = false) override final;
+
+    void setBackend(const std::string& name, DeviceIdx_t device = 0) override final;
+
+    static const std::vector<std::string> getInputsName(){
+        return {"data_input", "weight", "bias"};
+    }
+    static const std::vector<std::string> getOutputsName(){
+        return {"data_output"};
+    }
+};
+
+inline std::shared_ptr<Node> ILayerNorm(const std::string& name = "") {
+    return std::make_shared<Node>(std::make_shared<ILayerNorm_Op>(), name);
+}
+}
+
+#endif /* AIDGE_CORE_OPERATOR_ILAYERNORM_H_ */
diff --git a/include/aidge/operator/ShiftGELU.hpp b/include/aidge/operator/ShiftGELU.hpp
index ee69ceb1e031d7182b721449bad7e6f5a57492e0..30f1d71e0a56d92a70830a5def81040e0c5a186c 100644
--- a/include/aidge/operator/ShiftGELU.hpp
+++ b/include/aidge/operator/ShiftGELU.hpp
@@ -7,7 +7,7 @@
  *
  * SPDX-License-Identifier: EPL-2.0
  * Author: Lucas RAKOTOARIVONY, Thales Research & Technology France
- * Date: 25.06.2024
+ * Date: 10.09.2024
  *
  ********************************************************************************/
 
diff --git a/include/aidge/operator/ShiftMax.hpp b/include/aidge/operator/ShiftMax.hpp
index 1e33f60687ebcde2194fa2cf3b074ba48ee32654..9fbd81aedef1eb640a7ce805d745297edb640560 100644
--- a/include/aidge/operator/ShiftMax.hpp
+++ b/include/aidge/operator/ShiftMax.hpp
@@ -7,7 +7,7 @@
  *
  * SPDX-License-Identifier: EPL-2.0
  * Author: Lucas RAKOTOARIVONY, Thales Research & Technology France
- * Date: 25.06.2024
+ * Date: 10.09.2024
  *
  ********************************************************************************/
 
diff --git a/src/operator/ILayerNorm.cpp b/src/operator/ILayerNorm.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..dafe7ed0fcea7164c085c1f1d2913a7f461be12f
--- /dev/null
+++ b/src/operator/ILayerNorm.cpp
@@ -0,0 +1,52 @@
+/********************************************************************************
+ * Copyright (c) 2024 Thales
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License 2.0 which is available at
+ * http://www.eclipse.org/legal/epl-2.0.
+ *
+ * SPDX-License-Identifier: EPL-2.0
+ * Author: Lucas RAKOTOARIVONY, Thales Research & Technology France
+ * Date: 10.09.2024
+ *
+ ********************************************************************************/
+
+#include "aidge/operator/ILayerNorm.hpp"
+
+#include <memory>
+#include <string>
+
+#include "aidge/data/Tensor.hpp"
+#include "aidge/utils/Types.h"
+
+const std::string Aidge::ILayerNorm_Op::Type = "ILayerNorm";
+
+void Aidge::ILayerNorm_Op::associateInput(const Aidge::IOIndex_t inputIdx, const std::shared_ptr<Aidge::Data>& data) {
+    AIDGE_ASSERT(inputIdx < 3, "Operators {} supports only {} inputs", type(), nbInputs());
+    AIDGE_ASSERT(data->type() == Tensor::Type, "input data must be of Tensor type");
+    mInputs[inputIdx] = std::dynamic_pointer_cast<Tensor>(data);
+    if (inputIdx == 0 && getInput(0)->nbDims() == 1)
+        mInputs[inputIdx]->resize({1, getInput(inputIdx)->size()});
+}
+
+bool Aidge::ILayerNorm_Op::forwardDims(bool /*allowDataDependency*/) {
+    if (inputsAssociated()) {
+        const DimSize_t nbFeatures =  getInput(0)->dims()[1];
+        for (std::size_t i = 0; i < nbInputs(); ++i) {
+            if(inputCategory(i) == InputCategory::Param && getInput(i)->size() != nbFeatures) {
+                getInput(i)->resize({getInput(0)->dims()[1]});
+            }
+        }
+        mOutputs[0]->resize(getInput(0)->dims());
+        return true;
+    }
+    return false;
+}
+
+
+void Aidge::ILayerNorm_Op::setBackend(const std::string& name, DeviceIdx_t device) {
+    SET_IMPL_MACRO(ILayerNorm_Op, *this, name);
+    mOutputs[0]->setBackend(name, device);
+    getInput(1)->setBackend(name, device);
+    getInput(2)->setBackend(name, device);
+}
\ No newline at end of file