return new operation::ConcatNode{inputs, outputs, param};
};
+ _map[ANEURALNETWORKS_RESHAPE] = [](const OperationFactory::Param &init_param) {
+ assert(init_param.input_count == 2 && init_param.output_count == 1);
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> A tensor, specifying the tensor to be reshaped.
+ // 1 -> A 1-D tensor of type ANEURALNETWORKS_TENSOR_INT32, defining the shape of the output
+ // tensor
+
+ // TODO Second input should be shape tensor (init_param.inputs[1])
+ // Currently unused since assume that it is same with output tensor size
+ operand::IndexSet inputs{init_param.inputs[0] /* , init_param.inputs[1] */};
+ operand::IndexSet outputs{init_param.outputs[0]};
+
+ return new operation::ReshapeNode{inputs, outputs};
+ };
+
+ _map[ANEURALNETWORKS_FULLY_CONNECTED] = [](const OperationFactory::Param &init_param) {
+ assert(init_param.input_count == 4 && init_param.output_count == 1);
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> A tensor, specifying the input.
+ // 1 -> A 2-D tensor, specifying the weights
+ // 2 -> A 1-D tensor, specifying the bias
+ // 3 -> An INT32 value, and has to be one of the FuseCode values
+
+ operand::IndexSet inputs{init_param.inputs[0], init_param.inputs[1], init_param.inputs[2]};
+ operand::IndexSet outputs{init_param.outputs[0]};
+
+ operation::FullyConnectedNode::Param param;
+ param.activation_index = operand::Index{init_param.inputs[3]};
+
+ return new operation::FullyConnectedNode{inputs, outputs, param};
+ };
+
+ _map[ANEURALNETWORKS_SOFTMAX] = [](const OperationFactory::Param &init_param) {
+ assert(init_param.input_count == 2 && init_param.output_count == 1);
+
+ // Each input should be interpreted as follows:
+ //
+ // 0 -> A 2-D or 4-D tensor, specifying the tensor to be reshaped.
+ // 1 -> FLOAT32 value, specifying the positive scaling factor for the exponent, beta.
+
+ operand::IndexSet inputs{init_param.inputs[0]};
+ operand::IndexSet outputs{init_param.outputs[0]};
+
+ operation::SoftmaxNode::Param param;
+ param.scale_index = operand::Index{init_param.inputs[1]};
+
+ return new operation::SoftmaxNode{inputs, outputs, param};
+ };
+
_map[ANEURALNETWORKS_CAST_EX] = [](const OperationFactory::Param &init_param) {
assert(init_param.input_count == 1 && init_param.output_count == 1);
}
auto &factory = OperationFactory::instance();
- auto node_param = // TODO Will be removed once all creation is done via factory
- neurun::model::operation::Node::InitParam{inputCount, inputs, outputCount, outputs};
OperationFactory::Param param{inputCount, inputs, outputCount, outputs};
switch (type)
case ANEURALNETWORKS_MAX_POOL_2D:
case ANEURALNETWORKS_AVERAGE_POOL_2D:
case ANEURALNETWORKS_CONCATENATION:
+ case ANEURALNETWORKS_RESHAPE:
+ case ANEURALNETWORKS_FULLY_CONNECTED:
+ case ANEURALNETWORKS_SOFTMAX:
case ANEURALNETWORKS_ADD:
case ANEURALNETWORKS_SUB:
case ANEURALNETWORKS_CONV_2D:
break;
}
- case ANEURALNETWORKS_RESHAPE:
- {
- using GraphNode = neurun::model::operation::ReshapeNode;
-
- _model->addOperation(nnfw::cpp14::make_unique<GraphNode>(node_param));
-
- break;
- }
- case ANEURALNETWORKS_FULLY_CONNECTED:
- {
- using GraphNode = neurun::model::operation::FullyConnectedNode;
-
- _model->addOperation(nnfw::cpp14::make_unique<GraphNode>(node_param));
-
- break;
- }
- case ANEURALNETWORKS_SOFTMAX:
- {
- using GraphNode = neurun::model::operation::SoftmaxNode;
-
- _model->addOperation(nnfw::cpp14::make_unique<GraphNode>(node_param));
- break;
- }
default:
throw std::runtime_error{"Not supported operation"};
};
void FullyConnectedNode::accept(NodeVisitor &&v) const { v.visit(*this); }
-FullyConnectedNode::FullyConnectedNode(const model::operation::Node::InitParam &init_param)
- : model::operation::Node{OperandConstraint::createExact(3u)}
+FullyConnectedNode::FullyConnectedNode(const operand::IndexSet &inputs,
+ const operand::IndexSet &outputs, const Param ¶m)
+ : model::operation::Node{OperandConstraint::createExact(3u), inputs, outputs}, _param{param}
{
- assert(init_param.input_count == 4 && init_param.output_count == 1);
-
- // Each input should be interpreted as follows:
- //
- // 0 -> A tensor, specifying the input.
- // 1 -> A 2-D tensor, specifying the weights
- // 2 -> A 1-D tensor, specifying the bias
- // 3 -> An INT32 value, and has to be one of the FuseCode values
-
- setInputs({init_param.inputs[0], init_param.inputs[1], init_param.inputs[2]});
- setOutputs({init_param.outputs[0]});
-
- _param.activation_index = operand::Index{init_param.inputs[3]};
}
} // namespace operation
class FullyConnectedNode : public model::operation::Node
{
public:
- FullyConnectedNode(const model::operation::Node::InitParam &init_param);
-
enum Input
{
INPUT = 0,
};
public:
+ FullyConnectedNode(const operand::IndexSet &inputs, const operand::IndexSet &outputs,
+ const Param ¶m);
+
+public:
virtual void accept(NodeVisitor &&) const override;
virtual std::string getName() const override { return "FullyConnected"; }
void ReshapeNode::accept(NodeVisitor &&v) const { v.visit(*this); }
-ReshapeNode::ReshapeNode(const model::operation::Node::InitParam &init_param)
- : model::operation::Node{OperandConstraint::createExact(1u)}
+ReshapeNode::ReshapeNode(const operand::IndexSet &inputs, const operand::IndexSet &outputs)
+ : model::operation::Node{OperandConstraint::createExact(1u), inputs, outputs}
{
- assert(init_param.input_count == 2 && init_param.output_count == 1);
-
- // Each input should be interpreted as follows:
- //
- // 0 -> A tensor, specifying the tensor to be reshaped.
- // 1 -> A 1-D tensor of type ANEURALNETWORKS_TENSOR_INT32, defining the shape of the output
- // tensor
-
- // TODO Second input should be shape tensor (init_param.inputs[1])
- setInputs({init_param.inputs[0] /* , init_param.inputs[1] */});
- setOutputs({init_param.outputs[0]});
}
} // namespace operation
class ReshapeNode : public model::operation::Node
{
public:
- ReshapeNode(const model::operation::Node::InitParam &init_param);
-
enum Input
{
INPUT = 0
};
public:
+ ReshapeNode(const operand::IndexSet &inputs, const operand::IndexSet &outputs);
+
+public:
virtual void accept(NodeVisitor &&) const override;
virtual std::string getName() const override { return "Reshape"; }
};
void SoftmaxNode::accept(NodeVisitor &&v) const { v.visit(*this); }
-SoftmaxNode::SoftmaxNode(const model::operation::Node::InitParam &init_param)
- : model::operation::Node{OperandConstraint::createExact(1u)}
+SoftmaxNode::SoftmaxNode(const operand::IndexSet &inputs, const operand::IndexSet &outputs,
+ const Param ¶m)
+ : model::operation::Node{OperandConstraint::createExact(1u), inputs, outputs}, _param{param}
{
- assert(init_param.input_count == 2 && init_param.output_count == 1);
-
- // Each input should be interpreted as follows:
- //
- // 0 -> A 2-D or 4-D tensor, specifying the tensor to be reshaped.
- // 1 -> FLOAT32 value, specifying the positive scaling factor for the exponent, beta.
-
- setInputs({init_param.inputs[0]});
- setOutputs({init_param.outputs[0]});
-
- _param.scale_index = operand::Index{init_param.inputs[1]};
}
} // namespace operation
class SoftmaxNode : public model::operation::Node
{
public:
- SoftmaxNode(const model::operation::Node::InitParam &init_param);
enum Input
{
INPUT = 0
};
public:
+ SoftmaxNode(const operand::IndexSet &inputs, const operand::IndexSet &outputs,
+ const Param ¶m);
+
+public:
virtual void accept(NodeVisitor &&) const override;
virtual std::string getName() const override { return "SoftMax"; }