diff --git a/include/aidge/operator/MetaOperator.hpp b/include/aidge/operator/MetaOperator.hpp index f7f1cdfd5bc0d799adc87bd7b7e1be999363627f..c6ab45290265617850c01bb00abd7f972cd3525c 100644 --- a/include/aidge/operator/MetaOperator.hpp +++ b/include/aidge/operator/MetaOperator.hpp @@ -69,10 +69,7 @@ public: * * @param op The operator to copy. */ - MetaOperator_Op(const MetaOperator_Op& op) - : OperatorTensor(op), - mGraph(op.mGraph->clone()) // Clone the micro-graph for isolation - {} + MetaOperator_Op(const MetaOperator_Op& op); /** * @brief Set the node for scheduling. diff --git a/src/operator/MetaOperator.cpp b/src/operator/MetaOperator.cpp index ae3c3ed6ca85c059204c524f467f5387f656e30b..9a8a943fc905fcb43808bf44b96aa32dd055cbd0 100644 --- a/src/operator/MetaOperator.cpp +++ b/src/operator/MetaOperator.cpp @@ -54,8 +54,28 @@ Aidge::MetaOperator_Op::MetaOperator_Op(const std::string& type, const std::shar } } +Aidge::MetaOperator_Op::MetaOperator_Op(const MetaOperator_Op& op) + : OperatorTensor(op), + mGraph(op.mGraph->clone()), // Clone the micro-graph for isolation + mAttributes(std::make_shared<DynamicAttributes>(*op.mAttributes)) // Clone attributes +{ + // Associate outputs to micro-graph outputs for custom implementation + for (size_t outputIdx = 0; outputIdx < mOutputs.size(); ++outputIdx) { + const auto& outputOp = mGraph->getOrderedOutputs()[outputIdx]; + if (outputOp.first) { + mOutputs[outputIdx] = std::dynamic_pointer_cast<Tensor>(outputOp.first->getOperator()->getRawOutput(outputOp.second)); + } + } + + // Attributes are already cloned. +} + std::shared_ptr<Aidge::Operator> Aidge::MetaOperator_Op::clone() const { - return std::make_shared<MetaOperator_Op>(type(), mGraph->clone()); + auto metaOp = std::make_shared<MetaOperator_Op>(*this); + if (mImpl) { + metaOp->setBackend(mImpl->backend()); + } + return metaOp; } void Aidge::MetaOperator_Op::associateInput(const IOIndex_t inputIdx, const std::shared_ptr<Data>& data) {