const SoftmaxOptions *opts);
std::vector<INode::Ref> createReshape(InputOps inputs, InputParams params,
const ReshapeOptions *opts);
+ std::vector<INode::Ref> createFullyConnected(InputOps inputs, InputParams params,
+ const FullyConnectedOptions *opts);
private:
Graph *graph = nullptr;
case BuiltinOperator_RESHAPE:
outputs = opCreator->createReshape(inputs, params, op->builtin_options_as<ReshapeOptions>());
break;
+ case BuiltinOperator_FULLY_CONNECTED:
+ outputs = opCreator->createFullyConnected(inputs, params, op->builtin_options_as<FullyConnectedOptions>());
+ break;
case BuiltinOperator_SOFTMAX:
outputs = opCreator->createSoftmax(inputs, params, op->builtin_options_as<SoftmaxOptions>());
break;
// don't forget to change this if tensor shape processing architecture changes.
paramsForOp.emplace_back(transposeTensor<1, 2, 3, 0>(tensor));
}
+ else if (opcode == BuiltinOperator_FULLY_CONNECTED && t->shape()->size() == 2)
+ {
+ paramsForOp.emplace_back(transposeTensor<1, 0>(tensor));
+ }
else
{
paramsForOp.push_back(tensor);
#include "nnc/core/IR/model/operations/concat_op.h"
#include "nnc/core/IR/model/operations/conv_2d_op.h"
#include "nnc/core/IR/model/operations/depthwise_conv2d_op.h"
+#include "nnc/core/IR/model/operations/fully_connected_op.h"
#include "nnc/core/IR/model/operations/relu_op.h"
#include "nnc/core/IR/model/operations/capped_relu_op.h"
#include "nnc/core/IR/model/operations/softmax_op.h"
return outputs;
}
+std::vector<INode::Ref> OpCreator::createFullyConnected(InputOps &inputs, InputParams ¶ms,
+ const FullyConnectedOptions *opts)
+{
+ // Add Reshape operation to make sure the input for FC operation has shape [1, fcInputSize]
+ auto outputs = createOp<ops::ReshapeOp>(inputs, ActivationFunctionType_NONE);
+ uint32_t fcInputSize = params[0]->getShape().dim(0);
+ outputs[0]->getOperation()->setOutputShape(0, {1, fcInputSize});
+
+ auto fcOutputs = createOp<ops::FullyConnectedOp>(outputs, ActivationFunctionType_NONE, std::move(*params[0]));
+ return createOp<ops::BiasAddOp>(fcOutputs, opts->fused_activation_function(), std::move(*params[1]));
+}
+
INode::Ref OpCreator::addFusedActivation(INode::Ref input, ActivationFunctionType activationType)
{
INode::Ref activation;