diff --git a/aidge_core/unit_tests/test_parameters.py b/aidge_core/unit_tests/test_parameters.py
index 02c7598820d2429bc49ff9a2f02c8ee841783173..1e24276745312d4483c268156963e0efe413b46c 100644
--- a/aidge_core/unit_tests/test_parameters.py
+++ b/aidge_core/unit_tests/test_parameters.py
@@ -40,7 +40,7 @@ class test_parameters(unittest.TestCase):
 
     def test_matmul(self):
         out_channels = 8
-        matmul_op = aidge_core.Matmul(out_channels).get_operator()
+        matmul_op = aidge_core.MatMul(out_channels).get_operator()
         self.assertEqual(matmul_op.get("OutChannels"), out_channels)
 
     def test_producer_1D(self):
diff --git a/include/aidge/aidge.hpp b/include/aidge/aidge.hpp
index cfda3ac7fa024f8cf80b4589d978b9b5bff5b4f0..9c0d8c0b321892d60f40d52eb2a44d4d0fec3a2c 100644
--- a/include/aidge/aidge.hpp
+++ b/include/aidge/aidge.hpp
@@ -33,7 +33,7 @@
 #include "aidge/operator/ConvDepthWise.hpp"
 #include "aidge/operator/FC.hpp"
 #include "aidge/operator/GenericOperator.hpp"
-#include "aidge/operator/Matmul.hpp"
+#include "aidge/operator/MatMul.hpp"
 #include "aidge/operator/MaxPooling.hpp"
 //#include "aidge/operator/MetaOperator.hpp"
 #include "aidge/operator/Operator.hpp"
diff --git a/include/aidge/operator/Matmul.hpp b/include/aidge/operator/MatMul.hpp
similarity index 78%
rename from include/aidge/operator/Matmul.hpp
rename to include/aidge/operator/MatMul.hpp
index 639b366912060b3e085510f312d94568e6b65f03..77ab2c972a636aefd8aede428c025dba2bc0c545 100644
--- a/include/aidge/operator/Matmul.hpp
+++ b/include/aidge/operator/MatMul.hpp
@@ -27,29 +27,29 @@
 #include "aidge/utils/Registrar.hpp"
 
 namespace Aidge {
-enum class MatmulParam { OutChannels };
+enum class MatMulParam { OutChannels };
 
-class Matmul_Op : public Operator,
-              public Registrable<Matmul_Op,
+class MatMul_Op : public Operator,
+              public Registrable<MatMul_Op,
                                  std::string,
-                                 std::unique_ptr<OperatorImpl>(const Matmul_Op &)>,
-              public Parameterizable<MatmulParam, DimSize_t> {
+                                 std::unique_ptr<OperatorImpl>(const MatMul_Op &)>,
+              public Parameterizable<MatMulParam, DimSize_t> {
 public:
     std::array<std::shared_ptr<Tensor>, 2> mInputs = {std::make_shared<Tensor>(), std::make_shared<Tensor>()};
     const std::shared_ptr<Tensor> mOutput = std::make_shared<Tensor>();
 
 public:
-    static constexpr const char* Type = "Matmul";
+    static constexpr const char* Type = "MatMul";
 
-    Matmul_Op() = delete;
+    MatMul_Op() = delete;
 
-    using Parameterizable_ = Parameterizable<MatmulParam, DimSize_t>;
-    template <MatmulParam e> using param = typename Parameterizable_::template param<e>;
+    using Parameterizable_ = Parameterizable<MatMulParam, DimSize_t>;
+    template <MatMulParam e> using param = typename Parameterizable_::template param<e>;
 
-    Matmul_Op(DimSize_t out_channels)
+    MatMul_Op(DimSize_t out_channels)
             : Operator(Type),
             Parameterizable_(
-                param<MatmulParam::OutChannels>(out_channels)),
+                param<MatMulParam::OutChannels>(out_channels)),
             mOutput(std::make_shared<Tensor>())
     {
         setDatatype(DataType::Float32);
@@ -64,9 +64,9 @@ public:
     void computeOutputDims() override final {
         if (!mInputs[0]->empty()) {
             // <in_features**, out_channels>
-            std::array<DimSize_t, 2> weightDims = {static_cast<DimSize_t>(mInputs[0]->size()), this->template get<MatmulParam::OutChannels>()};
+            std::array<DimSize_t, 2> weightDims = {this->template get<MatMulParam::OutChannels>(), static_cast<DimSize_t>(mInputs[0]->sizeM1())};
             // <out_channels, batch>
-            std::array<DimSize_t, 1> outputDims = {this->template get<MatmulParam::OutChannels>()};
+            std::array<DimSize_t, 2> outputDims = {mInputs[0]->dims()[0], this->template get<MatMulParam::OutChannels>()};
 
             mInputs[1]->resize(weightDims);
             mOutput->resize(outputDims);
@@ -107,7 +107,7 @@ public:
 
 
     void setBackend(const std::string& name) {
-        mImpl = Registrar<Matmul_Op>::create(name)(*this);
+        mImpl = Registrar<MatMul_Op>::create(name)(*this);
         mOutput->setBackend(name);
 
         // FIXME: temporary workaround
@@ -129,17 +129,17 @@ public:
     inline IOIndex_t nbOutputs() const noexcept override final { return 1; }
 };
 
-inline std::shared_ptr<Node> Matmul(DimSize_t out_channels, const std::string& name = "") {
-    // FIXME: properly handle default w&b initialization in every cases
-    auto matmul = std::make_shared<Node>(std::make_shared<Matmul_Op>(out_channels), name);
-    addProducer(matmul, 1, {1, out_channels}, "w");
+inline std::shared_ptr<Node> MatMul(DimSize_t out_channels, const std::string& name = "") {
+    // FIXME: properly handle default w initialization in every cases
+    auto matmul = std::make_shared<Node>(std::make_shared<MatMul_Op>(out_channels), name);
+    addProducer(matmul, 1, {out_channels, 1}, "w");
     return matmul;
 }
 } // namespace Aidge
 
 namespace {
 template <>
-const char *const EnumStrings<Aidge::MatmulParam>::data[] = {"OutChannels"};
+const char *const EnumStrings<Aidge::MatMulParam>::data[] = {"OutChannels"};
 }
 
 #endif /* AIDGE_CORE_OPERATOR__MATMUL_H_ */
diff --git a/python_binding/operator/pybind_Matmul.cpp b/python_binding/operator/pybind_Matmul.cpp
index b6ae27289fabe1fe4dbeea60704a61373bc850cf..b0b3c3df6dfbd2c50969da40c2621dbbdf04178b 100644
--- a/python_binding/operator/pybind_Matmul.cpp
+++ b/python_binding/operator/pybind_Matmul.cpp
@@ -11,7 +11,7 @@
 
 #include <pybind11/pybind11.h>
 
-#include "aidge/operator/Matmul.hpp"
+#include "aidge/operator/MatMul.hpp"
 #include "aidge/utils/Parameter.hpp"
 #include "aidge/backend/OperatorImpl.hpp"
 #include "aidge/operator/Operator.hpp"
@@ -20,13 +20,13 @@
 namespace py = pybind11;
 namespace Aidge {
 
-void declare_Matmul(py::module &m) {
-  py::class_<Matmul_Op, std::shared_ptr<Matmul_Op>, Operator, PyAbstractParametrizable>(m, "Matmul_Op", py::multiple_inheritance());
+void declare_MatMul(py::module &m) {
+  py::class_<MatMul_Op, std::shared_ptr<MatMul_Op>, Operator, PyAbstractParametrizable>(m, "MatMul_Op", py::multiple_inheritance());
 
-  m.def("Matmul", &Matmul, py::arg("out_channels"), py::arg("name") = "");
+  m.def("MatMul", &MatMul, py::arg("out_channels"), py::arg("name") = "");
 }
 
-void init_Matmul(py::module &m) {
-  declare_Matmul(m);
+void init_MatMul(py::module &m) {
+  declare_MatMul(m);
 }
 } // namespace Aidge
diff --git a/python_binding/pybind_core.cpp b/python_binding/pybind_core.cpp
index 6627565898eae837a1d9fd8ce0d6cac9f50c25c2..db116d132ec8ffc504b2c0910eafc1a3da34534f 100644
--- a/python_binding/pybind_core.cpp
+++ b/python_binding/pybind_core.cpp
@@ -28,7 +28,7 @@ void init_ConvDepthWise(py::module&);
 void init_FC(py::module&);
 void init_GenericOperator(py::module&);
 void init_LeakyReLU(py::module&);
-void init_Matmul(py::module&);
+void init_MatMul(py::module&);
 void init_MaxPooling(py::module&);
 void init_Producer(py::module&);
 void init_ReLU(py::module&);
@@ -75,7 +75,7 @@ void init_Aidge(py::module& m){
     init_FC(m);
     init_GenericOperator(m);
     init_LeakyReLU(m);
-    init_Matmul(m);
+    init_MatMul(m);
     init_MaxPooling(m);
     init_ReLU(m);
     init_Softmax(m);