Newer
Older
/********************************************************************************
* Copyright (c) 2023 CEA-List
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0.
*
* SPDX-License-Identifier: EPL-2.0
*
********************************************************************************/

Maxence Naud
committed
#include "aidge/operator/FC.hpp"
#include <memory>
#include <string>

Maxence Naud
committed
#include <vector>

Maxence Naud
committed
#include "aidge/data/Data.hpp"
#include "aidge/data/Tensor.hpp"
#include "aidge/utils/ErrorHandling.hpp"
#include "aidge/utils/StaticAttributes.hpp"
#include "aidge/utils/Types.h"
const std::string Aidge::FC_Op::Type = "FC";
void Aidge::FC_Op::associateInput(const Aidge::IOIndex_t inputIdx, const std::shared_ptr<Aidge::Data>& data) {
AIDGE_ASSERT(inputIdx < 3, "Operators {} supports only {} inputs", type(), nbInputs());
AIDGE_ASSERT(data->type() == Tensor::Type, "input data must be of Tensor type");
// TODO: FIXME: check this, because data dims may not be initialized at this point...
//if (inputIdx == 2) {
// assert(std::dynamic_pointer_cast<Tensor>(data)->size() == ((this->template getAttr<FCAttr::NoBias>()) == false ? static_cast<std::size_t>(this->template getAttr<FCAttr::OutChannels>()) : 0));
// assert(std::dynamic_pointer_cast<Tensor>(data)->nbDims() == 1);
//}
mInputs[inputIdx] = std::dynamic_pointer_cast<Tensor>(data);
if (inputIdx == 0 && getInput(0)->nbDims() == 1)
mInputs[inputIdx]->resize({1, getInput(inputIdx)->size()});
}
bool Aidge::FC_Op::forwardDims(bool /*allowDataDependency*/) {

Maxence Naud
committed
bool associated = true;
for (IOIndex_t i = 0; i < nbInputs(); ++i) {
if (!getInput(i)) {
AIDGE_THROW_OR_ABORT(std::runtime_error, "{}: input #{} should be associated with a Tensor", type(), i);
}
associated &= !(getInput(i)->empty());
}
if (associated) {

Maxence Naud
committed
// first check weight since it defines inChannels and outChannels
AIDGE_ASSERT((getInput(1)->nbDims() == 2),
"Wrong weight Tensor dimension: {} for FC operator (should have 2 dimensions).", getInput(1)->nbDims());
const DimSize_t outChannels = getInput(1)->template dims<2>()[0];
const DimSize_t inChannels = getInput(1)->template dims<2>()[1];
// check data
const std::vector<DimSize_t>& inputDims = getInput(0)->dims();
if (getInput(0)->nbDims() == 1) {
AIDGE_ASSERT(inputDims[0] == inChannels,
"Wrong number of input features for input data ({}), expected {}",
inputDims[0], inChannels);
} else {
AIDGE_ASSERT(getInput(0)->nbDims() > 1, "FC input data must have at least one dimension");
const DimSize_t nbInputFeatures = std::accumulate(inputDims.cbegin() + 1, inputDims.cend(), DimSize_t(1), std::multiplies<DimSize_t>());
AIDGE_ASSERT(nbInputFeatures == inChannels,
"Wrong number of input features for input data ({}), expected {}",
nbInputFeatures, inChannels);
}
// check optional bias
if(!this->template getAttr<FCAttr::NoBias>())
AIDGE_ASSERT((getInput(2)->nbDims() == 1) &&
(getInput(2)->template dims<1>()[0] == outChannels),
"Wrong bias size for FC operator.");

Maxence Naud
committed
// <batch, OutChannels>

Maxence Naud
committed
mOutputs[0]->resize({getInput(0)->dims()[0], outChannels});

Maxence Naud
committed
}

Maxence Naud
committed
}
void Aidge::FC_Op::setBackend(const std::string& name, Aidge::DeviceIdx_t device) {
SET_IMPL_MACRO(FC_Op, *this, name);
mOutputs[0]->setBackend(name, device);

Maxence Naud
committed
// By default, automatically set backend for weight and bias inputs
getInput(1)->setBackend(name, device);
getInput(2)->setBackend(name, device);
}