From 2dd3428602a4acc98269b3ec3498996ec3f98e3c Mon Sep 17 00:00:00 2001
From: Olivier BICHLER <olivier.bichler@cea.fr>
Date: Fri, 8 Sep 2023 14:34:10 +0200
Subject: [PATCH] Fixed issue: operators cpy-ctor should not copy inputs
 associations

---
 include/aidge/operator/Add.hpp             | 27 +++++++++++++--
 include/aidge/operator/AvgPooling.hpp      | 28 +++++++++++++---
 include/aidge/operator/BatchNorm.hpp       | 28 +++++++++++++---
 include/aidge/operator/Conv.hpp            | 28 +++++++++++++---
 include/aidge/operator/ConvDepthWise.hpp   | 28 +++++++++++++---
 include/aidge/operator/FC.hpp              | 28 +++++++++++++---
 include/aidge/operator/GenericOperator.hpp | 24 +++++++++++++
 include/aidge/operator/LeakyReLU.hpp       | 25 ++++++++++++--
 include/aidge/operator/Matmul.hpp          | 28 +++++++++++++---
 include/aidge/operator/MetaOperator.hpp    | 17 ++++++++++
 include/aidge/operator/Producer.hpp        | 30 +++++++++++++----
 include/aidge/operator/ReLU.hpp            | 18 ++++++++++
 include/aidge/operator/Softmax.hpp         | 18 ++++++++++
 include/aidge/utils/Parameter.hpp          |  6 ++++
 unit_tests/graph/Test_GraphView.cpp        | 39 ++++++++++++++++++++--
 15 files changed, 327 insertions(+), 45 deletions(-)

diff --git a/include/aidge/operator/Add.hpp b/include/aidge/operator/Add.hpp
index 5bc3ef0e1..3502e76ae 100644
--- a/include/aidge/operator/Add.hpp
+++ b/include/aidge/operator/Add.hpp
@@ -32,14 +32,13 @@ class Add_Op : public Operator,
 public:
     // FIXME: change accessibility
     std::array<std::shared_ptr<Tensor>, NUM> mInputs;
-    const std::shared_ptr<Tensor> mOutput = std::make_shared<Tensor>(shared_from_this());
+    const std::shared_ptr<Tensor> mOutput = std::make_shared<Tensor>();
 
 public:
     static constexpr const char* Type = "Add";
 
     constexpr Add_Op()
-            : Operator(Type),
-            mOutput(std::make_shared<Tensor>())
+            : Operator(Type)
     {
         assert(NUM > 0 && "Add should have at least one input");
         for (std::size_t i = 0; i<NUM; ++i) {
@@ -47,6 +46,28 @@ public:
         }
         setDatatype(DataType::Float32);
     }
+
+    /**
+     * @brief Copy-constructor. Copy the operator parameters and its output tensor(s), but not its input tensors (the new operator has no input associated).
+     * @param op Operator to copy.
+     */
+    Add_Op(const Add_Op& op)
+        : Operator(Type),
+          mOutput(std::make_shared<Tensor>(*op.mOutput))
+    {
+        // cpy-ctor
+        assert(NUM > 0 && "Add should have at least one input");
+        for (std::size_t i = 0; i<NUM; ++i) {
+            mInputs[i] = std::make_shared<Tensor>();
+        }
+        setDatatype(op.mOutput->dataType());
+    }
+
+    /**
+     * @brief Clone the operator using its copy-constructor.
+     * @see Operator::Add_Op
+     * @param op Operator to copy.
+     */
     Operator* clone() const override {
         return new Add_Op(*static_cast<const Add_Op*>(this));
     }
diff --git a/include/aidge/operator/AvgPooling.hpp b/include/aidge/operator/AvgPooling.hpp
index 197388ebc..6174fa497 100644
--- a/include/aidge/operator/AvgPooling.hpp
+++ b/include/aidge/operator/AvgPooling.hpp
@@ -44,9 +44,6 @@ public:
     static constexpr const char *Type = "AvgPooling";
 
     AvgPooling_Op() = delete;
-    Operator* clone() const override {
-        return new AvgPooling_Op<DIM>(*static_cast<const AvgPooling_Op<DIM>*>(this));
-    }
 
     using Parameterizable_ = Parameterizable<AvgPoolingParam,
                                              std::array<DimSize_t, DIM>,
@@ -61,11 +58,32 @@ public:
         : Operator(Type),
           Parameterizable_(param<AvgPoolingParam::StrideDims>(stride_dims),
                            param<AvgPoolingParam::KernelDims>(kernel_dims),
-                           param<AvgPoolingParam::PaddingDims>(padding_dims)),
-          mOutput(std::make_shared<Tensor>()) {
+                           param<AvgPoolingParam::PaddingDims>(padding_dims)) {
         setDatatype(DataType::Float32);
     }
 
+    /**
+     * @brief Copy-constructor. Copy the operator parameters and its output tensor(s), but not its input tensors (the new operator has no input associated).
+     * @param op Operator to copy.
+     */
+    AvgPooling_Op(const AvgPooling_Op& op)
+        : Operator(Type),
+          Parameterizable_(op),
+          mOutput(std::make_shared<Tensor>(*op.mOutput))
+    {
+        // cpy-ctor
+        setDatatype(op.mOutput->dataType());
+    }
+
+    /**
+     * @brief Clone the operator using its copy-constructor.
+     * @see Operator::AvgPooling_Op
+     * @param op Operator to copy.
+     */
+    Operator* clone() const override {
+        return new AvgPooling_Op<DIM>(*static_cast<const AvgPooling_Op<DIM>*>(this));
+    }
+
     constexpr void associateInput(const IOIndex_t inputIdx, std::shared_ptr<Data> data) override final {
         assert(inputIdx < 1 && "operators supports only 3 inputs");
         (void) inputIdx; // avoid unused warning
diff --git a/include/aidge/operator/BatchNorm.hpp b/include/aidge/operator/BatchNorm.hpp
index 68589c654..51a2c7832 100644
--- a/include/aidge/operator/BatchNorm.hpp
+++ b/include/aidge/operator/BatchNorm.hpp
@@ -43,9 +43,6 @@ public:
     static constexpr const char *Type = "BatchNorm";
 
     BatchNorm_Op() = delete;
-    Operator* clone() const override {
-        return new BatchNorm_Op<DIM>(*static_cast<const BatchNorm_Op<DIM>*>(this));
-    }
 
     using Parameterizable_ = Parameterizable<BatchNormParam, float, float>;
     template <BatchNormParam e>
@@ -54,11 +51,32 @@ public:
     constexpr BatchNorm_Op(float epsilon, float momentum)
         : Operator(Type),
           Parameterizable_(param<BatchNormParam::Epsilon>(epsilon),
-                           param<BatchNormParam::Momentum>(momentum)),
-          mOutput(std::make_shared<Tensor>()) {
+                           param<BatchNormParam::Momentum>(momentum)) {
         setDatatype(DataType::Float32);
     }
 
+    /**
+     * @brief Copy-constructor. Copy the operator parameters and its output tensor(s), but not its input tensors (the new operator has no input associated).
+     * @param op Operator to copy.
+     */
+    BatchNorm_Op(const BatchNorm_Op<DIM>& op)
+        : Operator(Type),
+          Parameterizable_(op),
+          mOutput(std::make_shared<Tensor>(*op.mOutput))
+    {
+        // cpy-ctor
+        setDatatype(op.mOutput->dataType());
+    }
+
+    /**
+     * @brief Clone the operator using its copy-constructor.
+     * @see Operator::BatchNorm_Op
+     * @param op Operator to copy.
+     */
+    Operator* clone() const override {
+        return new BatchNorm_Op<DIM>(*static_cast<const BatchNorm_Op<DIM>*>(this));
+    }
+
     // Data operator[](const char* inputName) override final {
     //     std::shared_ptr<Tensor> in = (strcmp(inputName, "data")) ? mInputs[0] :
     //         (strcmp(inputName, "weight") ? mInputs[1] :
diff --git a/include/aidge/operator/Conv.hpp b/include/aidge/operator/Conv.hpp
index 11526916a..63a5d59a0 100644
--- a/include/aidge/operator/Conv.hpp
+++ b/include/aidge/operator/Conv.hpp
@@ -43,9 +43,6 @@ public:
     static constexpr const char *Type = "Conv";
 
     Conv_Op() = delete;
-    Operator* clone() const override {
-        return new Conv_Op<DIM>(*static_cast<const Conv_Op<DIM>*>(this));
-    }
 
     using Parameterizable_ = Parameterizable<ConvParam, std::array<DimSize_t, DIM>, std::array<DimSize_t, DIM>,
                                              DimSize_t, DimSize_t, std::array<DimSize_t, DIM>, std::array<DimSize_t, (DIM<<1) >>;
@@ -64,11 +61,32 @@ public:
                            param<ConvParam::InChannels>(in_channels),
                            param<ConvParam::OutChannels>(out_channels),
                            param<ConvParam::KernelDims>(kernel_dims),
-                           param<ConvParam::PaddingDims>(padding_dims)),
-          mOutput(std::make_shared<Tensor>()) {
+                           param<ConvParam::PaddingDims>(padding_dims)) {
         setDatatype(DataType::Float32);
     }
 
+    /**
+     * @brief Copy-constructor. Copy the operator parameters and its output tensor(s), but not its input tensors (the new operator has no input associated).
+     * @param op Operator to copy.
+     */
+    Conv_Op(const Conv_Op<DIM>& op)
+        : Operator(Type),
+          Parameterizable_(op),
+          mOutput(std::make_shared<Tensor>(*op.mOutput))
+    {
+        // cpy-ctor
+        setDatatype(op.mOutput->dataType());
+    }
+
+    /**
+     * @brief Clone the operator using its copy-constructor.
+     * @see Operator::Conv_Op
+     * @param op Operator to copy.
+     */
+    Operator* clone() const override {
+        return new Conv_Op<DIM>(*static_cast<const Conv_Op<DIM>*>(this));
+    }
+
     // Data operator[](const char* inputName) override final {
     //     std::shared_ptr<Tensor> in = (strcmp(inputName, "data")) ? mInputs[0] :
     //         (strcmp(inputName, "weight") ? mInputs[1] :
diff --git a/include/aidge/operator/ConvDepthWise.hpp b/include/aidge/operator/ConvDepthWise.hpp
index 88013c4ea..8485f09ba 100644
--- a/include/aidge/operator/ConvDepthWise.hpp
+++ b/include/aidge/operator/ConvDepthWise.hpp
@@ -47,9 +47,6 @@ class ConvDepthWise_Op : public Operator,
     static constexpr const char *Type = "ConvDepthWise";
 
     ConvDepthWise_Op() = delete;
-    Operator* clone() const override {
-        return new ConvDepthWise_Op<DIM>(*static_cast<const ConvDepthWise_Op<DIM>*>(this));
-    }
 
     using Parameterizable_ = Parameterizable<ConvDepthWiseParam,
                                              std::array<DimSize_t, DIM>,
@@ -69,11 +66,32 @@ class ConvDepthWise_Op : public Operator,
                            param<ConvDepthWiseParam::DilationDims>(dilation_dims),
                            param<ConvDepthWiseParam::Channels>(0),
                            param<ConvDepthWiseParam::KernelDims>(kernel_dims),
-                           param<ConvDepthWiseParam::PaddingDims>(padding_dims)),
-          mOutput(std::make_shared<Tensor>()) {
+                           param<ConvDepthWiseParam::PaddingDims>(padding_dims)) {
         setDatatype(DataType::Float32);
     }
 
+    /**
+     * @brief Copy-constructor. Copy the operator parameters and its output tensor(s), but not its input tensors (the new operator has no input associated).
+     * @param op Operator to copy.
+     */
+    ConvDepthWise_Op(const ConvDepthWise_Op<DIM>& op)
+        : Operator(Type),
+          Parameterizable_(op),
+          mOutput(std::make_shared<Tensor>(*op.mOutput))
+    {
+        // cpy-ctor
+        setDatatype(op.mOutput->dataType());
+    }
+
+    /**
+     * @brief Clone the operator using its copy-constructor.
+     * @see Operator::ConvDepthWise_Op
+     * @param op Operator to copy.
+     */
+    Operator* clone() const override {
+        return new ConvDepthWise_Op<DIM>(*static_cast<const ConvDepthWise_Op<DIM>*>(this));
+    }
+
     constexpr void associateInput(const IOIndex_t inputIdx, std::shared_ptr<Data> data) override final {
         assert(inputIdx < 3 && "operators supports only 3 inputs");
         assert(strcmp(data->type(), Tensor::Type) == 0 && "input data must be of Tensor type");
diff --git a/include/aidge/operator/FC.hpp b/include/aidge/operator/FC.hpp
index 244b0322a..8a9592369 100644
--- a/include/aidge/operator/FC.hpp
+++ b/include/aidge/operator/FC.hpp
@@ -43,9 +43,6 @@ public:
     static constexpr const char* Type = "FC";
 
     FC_Op() = delete;
-    Operator* clone() const override {
-        return new FC_Op(*static_cast<const FC_Op*>(this));
-    }
 
     using Parameterizable_ = Parameterizable<FCParam, DimSize_t, bool>;
     template <FCParam e> using param = typename Parameterizable_::template param<e>;
@@ -54,12 +51,33 @@ public:
             : Operator(Type),
             Parameterizable_(
                 param<FCParam::OutChannels>(out_channels),
-                param<FCParam::NoBias>(noBias)),
-            mOutput(std::make_shared<Tensor>())
+                param<FCParam::NoBias>(noBias))
     {
         setDatatype(DataType::Float32);
     }
 
+    /**
+     * @brief Copy-constructor. Copy the operator parameters and its output tensor(s), but not its input tensors (the new operator has no input associated).
+     * @param op Operator to copy.
+     */
+    FC_Op(const FC_Op& op)
+        : Operator(Type),
+          Parameterizable_(op),
+          mOutput(std::make_shared<Tensor>(*op.mOutput))
+    {
+        // cpy-ctor
+        setDatatype(op.mOutput->dataType());
+    }
+
+    /**
+     * @brief Clone the operator using its copy-constructor.
+     * @see Operator::FC_Op
+     * @param op Operator to copy.
+     */
+    Operator* clone() const override {
+        return new FC_Op(*static_cast<const FC_Op*>(this));
+    }
+
     void associateInput(const IOIndex_t inputIdx, std::shared_ptr<Data> data) override final {
         assert(inputIdx < 3 && "operators supports only 3 inputs");
         assert(strcmp(data->type(), Tensor::Type)==0 && "input data must be of Tensor type");
diff --git a/include/aidge/operator/GenericOperator.hpp b/include/aidge/operator/GenericOperator.hpp
index ba56746ac..073482658 100644
--- a/include/aidge/operator/GenericOperator.hpp
+++ b/include/aidge/operator/GenericOperator.hpp
@@ -48,6 +48,30 @@ class GenericOperator_Op
             mOutputs[i] = std::make_shared<Tensor>();
         }
     }
+
+    /**
+     * @brief Copy-constructor. Copy the operator parameters and its output tensor(s), but not its input tensors (the new operator has no input associated).
+     * @param op Operator to copy.
+     */
+    GenericOperator_Op(const GenericOperator_Op& op)
+        : Operator(op.type().c_str()), mParams(op.mParams), mNbDataIn(op.mNbDataIn), mNbIn(op.mNbIn), mNbOut(op.mNbOut)
+    {
+        // cpy-ctor
+        mInputs = std::vector<std::shared_ptr<Tensor>>(mNbIn);
+        for (std::size_t i = 0; i < mNbIn; ++i) {
+            mInputs[i] = std::make_shared<Tensor>();
+        }
+        mOutputs = std::vector<std::shared_ptr<Tensor>>(mNbOut);
+        for (std::size_t i = 0; i < mNbOut; ++i) {
+            mOutputs[i] = std::make_shared<Tensor>(*op.mOutputs[i]);
+        }
+    }
+
+    /**
+     * @brief Clone the operator using its copy-constructor.
+     * @see Operator::GenericOperator_Op
+     * @param op Operator to copy.
+     */
     Operator* clone() const override {
         return new GenericOperator_Op(*static_cast<const GenericOperator_Op*>(this));
     }
diff --git a/include/aidge/operator/LeakyReLU.hpp b/include/aidge/operator/LeakyReLU.hpp
index e3476d3fd..e57fdff70 100644
--- a/include/aidge/operator/LeakyReLU.hpp
+++ b/include/aidge/operator/LeakyReLU.hpp
@@ -41,9 +41,6 @@ public:
     static constexpr const char* Type = "LeakyReLU";
 
     LeakyReLU_Op() = delete;
-    Operator* clone() const override {
-        return new LeakyReLU_Op(*static_cast<const LeakyReLU_Op*>(this));
-    }
 
     using Parameterizable_ = Parameterizable<LeakyReLUParam, float>;
     template <LeakyReLUParam e> using param = typename Parameterizable_::template param<e>;
@@ -56,6 +53,28 @@ public:
         setDatatype(DataType::Float32);
     }
 
+    /**
+     * @brief Copy-constructor. Copy the operator parameters and its output tensor(s), but not its input tensors (the new operator has no input associated).
+     * @param op Operator to copy.
+     */
+    LeakyReLU_Op(const LeakyReLU_Op& op)
+        : Operator(Type),
+          Parameterizable_(op),
+          mOutput(std::make_shared<Tensor>(*op.mOutput))
+    {
+        // cpy-ctor
+        setDatatype(op.mOutput->dataType());
+    }
+
+    /**
+     * @brief Clone the operator using its copy-constructor.
+     * @see Operator::LeakyReLU_Op
+     * @param op Operator to copy.
+     */
+    Operator* clone() const override {
+        return new LeakyReLU_Op(*static_cast<const LeakyReLU_Op*>(this));
+    }
+
     void associateInput(const IOIndex_t inputIdx, std::shared_ptr<Data> data) override final {
         assert(inputIdx == 0 && "operator supports only 1 input");
         (void) inputIdx; // avoid unused warning
diff --git a/include/aidge/operator/Matmul.hpp b/include/aidge/operator/Matmul.hpp
index 5dbae2e70..5536b70ef 100644
--- a/include/aidge/operator/Matmul.hpp
+++ b/include/aidge/operator/Matmul.hpp
@@ -42,9 +42,6 @@ public:
     static constexpr const char* Type = "Matmul";
 
     Matmul_Op() = delete;
-    Operator* clone() const override {
-        return new Matmul_Op(*static_cast<const Matmul_Op*>(this));
-    }
 
     using Parameterizable_ = Parameterizable<MatmulParam, DimSize_t>;
     template <MatmulParam e> using param = typename Parameterizable_::template param<e>;
@@ -52,12 +49,33 @@ public:
     Matmul_Op(DimSize_t out_channels)
             : Operator(Type),
             Parameterizable_(
-                param<MatmulParam::OutChannels>(out_channels)),
-            mOutput(std::make_shared<Tensor>())
+                param<MatmulParam::OutChannels>(out_channels))
     {
         setDatatype(DataType::Float32);
     }
 
+    /**
+     * @brief Copy-constructor. Copy the operator parameters and its output tensor(s), but not its input tensors (the new operator has no input associated).
+     * @param op Operator to copy.
+     */
+    Matmul_Op(const Matmul_Op& op)
+        : Operator(Type),
+          Parameterizable_(op),
+          mOutput(std::make_shared<Tensor>(*op.mOutput))
+    {
+        // cpy-ctor
+        setDatatype(op.mOutput->dataType());
+    }
+
+    /**
+     * @brief Clone the operator using its copy-constructor.
+     * @see Operator::Matmul_Op
+     * @param op Operator to copy.
+     */
+    Operator* clone() const override {
+        return new Matmul_Op(*static_cast<const Matmul_Op*>(this));
+    }
+
     void associateInput(const IOIndex_t inputIdx, std::shared_ptr<Data> data) override final {
         assert(inputIdx < 2 && "operators supports only 2 inputs");
         assert(strcmp(data->type(), Tensor::Type)==0 && "input data must be of Tensor type");
diff --git a/include/aidge/operator/MetaOperator.hpp b/include/aidge/operator/MetaOperator.hpp
index f2bd00118..20dfb2e12 100644
--- a/include/aidge/operator/MetaOperator.hpp
+++ b/include/aidge/operator/MetaOperator.hpp
@@ -21,9 +21,26 @@ public:
         : Operator("MetaOp")
     {
     }
+
+    /**
+     * @brief Copy-constructor. Copy the operator parameters and its output tensor(s), but not its input tensors (the new operator has no input associated).
+     * @param op Operator to copy.
+     */
+    MetaOperator(const MetaOperator& op)
+        : Operator("MetaOp")
+    {
+        // cpy-ctor
+    }
+
+    /**
+     * @brief Clone the operator using its copy-constructor.
+     * @see Operator::Matmul_Op
+     * @param op Operator to copy.
+     */
     Operator* clone() const override {
         return new MetaOperator(*static_cast<const MetaOperator*>(this));
     }
+
     ~MetaOperator() = default;
 };
 }
diff --git a/include/aidge/operator/Producer.hpp b/include/aidge/operator/Producer.hpp
index e8e831e15..907dff559 100644
--- a/include/aidge/operator/Producer.hpp
+++ b/include/aidge/operator/Producer.hpp
@@ -29,25 +29,20 @@ class Producer_Op
       public Registrable<Producer_Op, std::string, std::unique_ptr<OperatorImpl>(
                                           const Producer_Op &)> {
 private:
-    std::shared_ptr<Tensor> mOutput;
+    std::shared_ptr<Tensor> mOutput = std::make_shared<Tensor>();
 
 public:
     static constexpr const char* Type = "Producer";
 
     template <std::size_t DIM>
     Producer_Op(const std::array<DimSize_t, DIM>& dims)
-        : Operator(Type),
-          mOutput(std::make_shared<Tensor>())
+        : Operator(Type)
     {
         //ctor
         setDatatype(DataType::Float32);
         mOutput->resize(dims);
     }
 
-    Operator* clone() const override {
-        return new Producer_Op(*static_cast<const Producer_Op*>(this));
-    }
-
     Producer_Op(const std::shared_ptr<Tensor> tensor)
         : Operator(Type),
           mOutput(tensor)
@@ -55,6 +50,27 @@ public:
         setDatatype(tensor->dataType());
     }
 
+    /**
+     * @brief Copy-constructor. Copy the operator parameters and its output tensor(s), but not its input tensors (the new operator has no input associated).
+     * @param op Operator to copy.
+     */
+    Producer_Op(const Producer_Op& op)
+        : Operator(Type),
+          mOutput(std::make_shared<Tensor>(*op.mOutput))
+    {
+        // cpy-ctor
+        setDatatype(op.mOutput->dataType());
+    }
+
+    /**
+     * @brief Clone the operator using its copy-constructor.
+     * @see Operator::Producer_Op
+     * @param op Operator to copy.
+     */
+    Operator* clone() const override {
+        return new Producer_Op(*static_cast<const Producer_Op*>(this));
+    }
+
     void associateInput(const IOIndex_t /*inputIdx*/, std::shared_ptr<Data> /*data*/) override final {
         assert(false && "Producer operator takes no input");
     }
diff --git a/include/aidge/operator/ReLU.hpp b/include/aidge/operator/ReLU.hpp
index b3983557c..8178ad9fb 100644
--- a/include/aidge/operator/ReLU.hpp
+++ b/include/aidge/operator/ReLU.hpp
@@ -41,6 +41,24 @@ public:
     {
         setDatatype(DataType::Float32);
     }
+
+    /**
+     * @brief Copy-constructor. Copy the operator parameters and its output tensor(s), but not its input tensors (the new operator has no input associated).
+     * @param op Operator to copy.
+     */
+    ReLU_Op(const ReLU_Op& op)
+        : Operator(Type),
+          mOutput(std::make_shared<Tensor>(*op.mOutput))
+    {
+        // cpy-ctor
+        setDatatype(op.mOutput->dataType());
+    }
+
+    /**
+     * @brief Clone the operator using its copy-constructor.
+     * @see Operator::ReLU_Op
+     * @param op Operator to copy.
+     */
     Operator* clone() const override {
         return new ReLU_Op(*static_cast<const ReLU_Op*>(this));
     }
diff --git a/include/aidge/operator/Softmax.hpp b/include/aidge/operator/Softmax.hpp
index d6ba3f1fc..1aa7da902 100644
--- a/include/aidge/operator/Softmax.hpp
+++ b/include/aidge/operator/Softmax.hpp
@@ -41,6 +41,24 @@ public:
     {
         setDatatype(DataType::Float32);
     }
+
+    /**
+     * @brief Copy-constructor. Copy the operator parameters and its output tensor(s), but not its input tensors (the new operator has no input associated).
+     * @param op Operator to copy.
+     */
+    Softmax_Op(const Softmax_Op& op)
+        : Operator(Type),
+          mOutput(std::make_shared<Tensor>(*op.mOutput))
+    {
+        // cpy-ctor
+        setDatatype(op.mOutput->dataType());
+    }
+
+    /**
+     * @brief Clone the operator using its copy-constructor.
+     * @see Operator::Softmax_Op
+     * @param op Operator to copy.
+     */
     Operator* clone() const override {
         return new Softmax_Op(*static_cast<const Softmax_Op*>(this));
     }
diff --git a/include/aidge/utils/Parameter.hpp b/include/aidge/utils/Parameter.hpp
index b0c6e3595..a47557617 100644
--- a/include/aidge/utils/Parameter.hpp
+++ b/include/aidge/utils/Parameter.hpp
@@ -94,6 +94,12 @@ public:
         (void)p; // avoid unused warning
     }
 
+    Parameterizable(const Parameterizable& params):
+        mParams(params.mParams)
+    {
+        // cpy-ctor (required for Operator cpy-ctor)
+    }
+
     // Compile-time access with enum
     template <PARAM_ENUM paramEnum>
     constexpr typename std::tuple_element<static_cast<std::size_t>(paramEnum),std::tuple<T...>>::type& get() {
diff --git a/unit_tests/graph/Test_GraphView.cpp b/unit_tests/graph/Test_GraphView.cpp
index db57989b1..795cbc1eb 100644
--- a/unit_tests/graph/Test_GraphView.cpp
+++ b/unit_tests/graph/Test_GraphView.cpp
@@ -357,6 +357,11 @@ TEST_CASE("[GraphView] clone") {
     }
 
     auto g2 = g1->clone();
+
+    auto dataProvider2 = Producer({16, 3, 224, 224}, "dataProvider");
+    dataProvider2->addChild(g2->getNode("conv1"), 0);
+
+    g2->forwardDims();
     g2->save("clone_g2");
 
     SECTION("Check node cloning") {
@@ -383,8 +388,20 @@ TEST_CASE("[GraphView] clone") {
         REQUIRE(g1->getNode("conv3_b")->getOperator() != g2->getNode("conv3_b")->getOperator());
     }
 
+    SECTION("Check new connections") {
+        REQUIRE(dataProvider->getOperator()->getOutput(0) != g2->getNode("conv1")->getOperator()->getInput(0));
+        REQUIRE(g1->getNode("conv1")->getOperator()->getInput(1) != g2->getNode("conv1_w")->getOperator()->getOutput(0));
+        REQUIRE(g1->getNode("conv1")->getOperator()->getInput(2) != g2->getNode("conv1_b")->getOperator()->getOutput(0));
+        REQUIRE(g1->getNode("conv1")->getOperator()->getOutput(0) != g2->getNode("conv2")->getOperator()->getInput(0));
+        REQUIRE(g1->getNode("conv2")->getOperator()->getInput(1) != g2->getNode("conv2_w")->getOperator()->getOutput(0));
+        REQUIRE(g1->getNode("conv2")->getOperator()->getInput(2) != g2->getNode("conv2_b")->getOperator()->getOutput(0));
+        REQUIRE(g1->getNode("conv2")->getOperator()->getOutput(0) != g2->getNode("conv3")->getOperator()->getInput(0));
+        REQUIRE(g1->getNode("conv3")->getOperator()->getInput(1) != g2->getNode("conv3_w")->getOperator()->getOutput(0));
+        REQUIRE(g1->getNode("conv3")->getOperator()->getInput(2) != g2->getNode("conv3_b")->getOperator()->getOutput(0));
+    }
+
     SECTION("Check input-output connections") {
-        REQUIRE(dataProvider->getOperator()->getOutput(0) == g2->getNode("conv1")->getOperator()->getInput(0));
+        REQUIRE(dataProvider2->getOperator()->getOutput(0) == g2->getNode("conv1")->getOperator()->getInput(0));
         REQUIRE(g2->getNode("conv1")->getOperator()->getInput(1) == g2->getNode("conv1_w")->getOperator()->getOutput(0));
         REQUIRE(g2->getNode("conv1")->getOperator()->getInput(2) == g2->getNode("conv1_b")->getOperator()->getOutput(0));
         REQUIRE(g2->getNode("conv1")->getOperator()->getOutput(0) == g2->getNode("conv2")->getOperator()->getInput(0));
@@ -421,6 +438,11 @@ TEST_CASE("[GraphView] cloneSharedProducers") {
     }
 
     auto g2 = g1->cloneSharedProducers();
+
+    auto dataProvider2 = Producer({16, 3, 224, 224}, "dataProvider");
+    dataProvider2->addChild(g2->getNode("conv1"), 0);
+
+    g2->forwardDims();
     g2->save("cloneSharedProducers_g2");
 
     SECTION("Check node cloning") {
@@ -447,8 +469,20 @@ TEST_CASE("[GraphView] cloneSharedProducers") {
         REQUIRE(g1->getNode("conv3_b")->getOperator() == g2->getNode("conv3_b")->getOperator());
     }
 
+    SECTION("Check new connections") {
+        REQUIRE(dataProvider->getOperator()->getOutput(0) != g2->getNode("conv1")->getOperator()->getInput(0));
+        REQUIRE(g1->getNode("conv1")->getOperator()->getInput(1) == g2->getNode("conv1_w")->getOperator()->getOutput(0));
+        REQUIRE(g1->getNode("conv1")->getOperator()->getInput(2) == g2->getNode("conv1_b")->getOperator()->getOutput(0));
+        REQUIRE(g1->getNode("conv1")->getOperator()->getOutput(0) != g2->getNode("conv2")->getOperator()->getInput(0));
+        REQUIRE(g1->getNode("conv2")->getOperator()->getInput(1) == g2->getNode("conv2_w")->getOperator()->getOutput(0));
+        REQUIRE(g1->getNode("conv2")->getOperator()->getInput(2) == g2->getNode("conv2_b")->getOperator()->getOutput(0));
+        REQUIRE(g1->getNode("conv2")->getOperator()->getOutput(0) != g2->getNode("conv3")->getOperator()->getInput(0));
+        REQUIRE(g1->getNode("conv3")->getOperator()->getInput(1) == g2->getNode("conv3_w")->getOperator()->getOutput(0));
+        REQUIRE(g1->getNode("conv3")->getOperator()->getInput(2) == g2->getNode("conv3_b")->getOperator()->getOutput(0));
+    }
+
     SECTION("Check input-output connections") {
-        REQUIRE(dataProvider->getOperator()->getOutput(0) == g2->getNode("conv1")->getOperator()->getInput(0));
+        REQUIRE(dataProvider2->getOperator()->getOutput(0) == g2->getNode("conv1")->getOperator()->getInput(0));
         REQUIRE(g2->getNode("conv1")->getOperator()->getInput(1) == g2->getNode("conv1_w")->getOperator()->getOutput(0));
         REQUIRE(g2->getNode("conv1")->getOperator()->getInput(2) == g2->getNode("conv1_b")->getOperator()->getOutput(0));
         REQUIRE(g2->getNode("conv1")->getOperator()->getOutput(0) == g2->getNode("conv2")->getOperator()->getInput(0));
@@ -485,6 +519,7 @@ TEST_CASE("[GraphView] cloneSharedOperators") {
     }
 
     auto g2 = g1->cloneSharedOperators();
+    g2->forwardDims();
     g2->save("cloneSharedOperators_g2");
 
     SECTION("Check node cloning") {
-- 
GitLab