// DO NOTHING
}
-Initializer InitializerGenerator::generate(const graph::operation::Conv2D::Implicit::Node &node)
+Initializer InitializerGenerator::generate(const graph::operation::Conv2DNode &node)
{
- using graph::operation::Conv2D::Implicit::Input;
+ using graph::operation::Conv2DNode;
Initializer ret;
- ret.push_back({node.getInputs().at(Input::KERNEL), generateWeight(node)});
- ret.push_back({node.getInputs().at(Input::BIAS), generateBias(node)});
+ ret.push_back({node.getInputs().at(Conv2DNode::Input::KERNEL), generateWeight(node)});
+ ret.push_back({node.getInputs().at(Conv2DNode::Input::BIAS), generateBias(node)});
return ret;
}
-Initializer InitializerGenerator::generate(const graph::operation::FullyConnected::Node &node)
+Initializer InitializerGenerator::generate(const graph::operation::FullyConnectedNode &node)
{
- using graph::operation::FullyConnected::Input;
+ using graph::operation::FullyConnectedNode;
Initializer ret;
- ret.push_back({node.getInputs().at(Input::WEIGHT), generateWeight(node)});
- ret.push_back({node.getInputs().at(Input::BIAS), generateBias(node)});
+ ret.push_back({node.getInputs().at(FullyConnectedNode::Input::WEIGHT), generateWeight(node)});
+ ret.push_back({node.getInputs().at(FullyConnectedNode::Input::BIAS), generateBias(node)});
return ret;
}
-InitializerFn
-InitializerGenerator::generateWeight(const graph::operation::Conv2D::Implicit::Node &node)
+InitializerFn InitializerGenerator::generateWeight(const graph::operation::Conv2DNode &node)
{
const auto ker_index{node.getInputs().at(1)};
};
}
-InitializerFn
-InitializerGenerator::generateWeight(const graph::operation::FullyConnected::Node &node)
+InitializerFn InitializerGenerator::generateWeight(const graph::operation::FullyConnectedNode &node)
{
const auto weight_index{node.getInputs().at(1)};
const auto input_index{node.getInputs().at(0)};
};
}
-InitializerFn
-InitializerGenerator::generateBias(const graph::operation::Conv2D::Implicit::Node &node)
+InitializerFn InitializerGenerator::generateBias(const graph::operation::Conv2DNode &node)
{
// TODO Refactor so we can reuse the common code
};
}
-InitializerFn InitializerGenerator::generateBias(const graph::operation::FullyConnected::Node &node)
+InitializerFn InitializerGenerator::generateBias(const graph::operation::FullyConnectedNode &node)
{
const auto bias_index{node.getInputs().at(2)};
InitializerGenerator(const neurun::graph::operand::Set &ctx);
public:
- Initializer generate(const graph::operation::Conv2D::Implicit::Node &node) override;
- Initializer generate(const graph::operation::FullyConnected::Node &node) override;
+ Initializer generate(const graph::operation::Conv2DNode &node) override;
+ Initializer generate(const graph::operation::FullyConnectedNode &node) override;
private:
- InitializerFn generateWeight(const graph::operation::Conv2D::Implicit::Node &node);
- InitializerFn generateWeight(const graph::operation::FullyConnected::Node &node);
+ InitializerFn generateWeight(const graph::operation::Conv2DNode &node);
+ InitializerFn generateWeight(const graph::operation::FullyConnectedNode &node);
- InitializerFn generateBias(const graph::operation::Conv2D::Implicit::Node &node);
- InitializerFn generateBias(const graph::operation::FullyConnected::Node &node);
+ InitializerFn generateBias(const graph::operation::Conv2DNode &node);
+ InitializerFn generateBias(const graph::operation::FullyConnectedNode &node);
private:
const neurun::graph::operand::Set &_ctx;
// DO NOTHING
}
-Stage StageGenerator::generate(const graph::operation::Conv2D::Implicit::Node &node)
+Stage StageGenerator::generate(const graph::operation::Conv2DNode &node)
{
- using namespace graph::operation::Conv2D::Implicit;
+ using graph::operation::Conv2DNode;
const auto ofm_index{node.getOutputs().at(0)};
- const auto ifm_index{node.getInputs().at(Input::INPUT)};
- const auto ker_index{node.getInputs().at(Input::KERNEL)};
- const auto bias_index{node.getInputs().at(Input::BIAS)};
+ const auto ifm_index{node.getInputs().at(Conv2DNode::Input::INPUT)};
+ const auto ker_index{node.getInputs().at(Conv2DNode::Input::KERNEL)};
+ const auto bias_index{node.getInputs().at(Conv2DNode::Input::BIAS)};
const auto vstride_index{node.param().vstride_index};
const auto hstride_index{node.param().hstride_index};
};
}
-Stage StageGenerator::generate(const graph::operation::MaxPool2D::Implicit::Node &node)
+Stage StageGenerator::generate(const graph::operation::MaxPool2DNode &node)
{
const auto ofm_index{node.getOutputs().at(0)};
- const auto ifm_index{node.getInputs().at(graph::operation::MaxPool2D::Implicit::Input::INPUT)};
+ const auto ifm_index{node.getInputs().at(graph::operation::MaxPool2DNode::Input::INPUT)};
const auto kh_index{node.param().kh_index};
const auto kw_index{node.param().kw_index};
};
}
-Stage StageGenerator::generate(const graph::operation::AvgPool2D::Implicit::Node &node)
+Stage StageGenerator::generate(const graph::operation::AvgPool2DNode &node)
{
const auto ofm_index{node.getOutputs().at(0)};
- const auto ifm_index{node.getInputs().at(graph::operation::AvgPool2D::Implicit::Input::INPUT)};
+ const auto ifm_index{node.getInputs().at(graph::operation::AvgPool2DNode::Input::INPUT)};
const auto kh_index{node.param().kh_index};
const auto kw_index{node.param().kw_index};
};
}
-Stage StageGenerator::generate(const graph::operation::Concat::Node &node)
+Stage StageGenerator::generate(const graph::operation::ConcatNode &node)
{
const auto ofm_index{node.getOutputs().at(0)};
const auto axis_index{node.param().axis_index};
};
}
-Stage StageGenerator::generate(const graph::operation::FullyConnected::Node &node)
+Stage StageGenerator::generate(const graph::operation::FullyConnectedNode &node)
{
- using namespace graph::operation::FullyConnected;
+ using graph::operation::FullyConnectedNode;
const auto output_index{node.getOutputs().at(0)};
- const auto input_index{node.getInputs().at(Input::INPUT)};
- const auto weight_index{node.getInputs().at(Input::WEIGHT)};
- const auto bias_index{node.getInputs().at(Input::BIAS)};
+ const auto input_index{node.getInputs().at(FullyConnectedNode::Input::INPUT)};
+ const auto weight_index{node.getInputs().at(FullyConnectedNode::Input::WEIGHT)};
+ const auto bias_index{node.getInputs().at(FullyConnectedNode::Input::BIAS)};
const auto activation_index{node.param().activation_index};
// Construct operation parameters
};
}
-Stage StageGenerator::generate(const graph::operation::Reshape::Node &node)
+Stage StageGenerator::generate(const graph::operation::ReshapeNode &node)
{
const auto output_index{node.getOutputs().at(0)};
- const auto input_index{node.getInputs().at(graph::operation::Reshape::Input::INPUT)};
+ const auto input_index{node.getInputs().at(graph::operation::ReshapeNode::Input::INPUT)};
struct Param
{
};
}
-Stage StageGenerator::generate(const graph::operation::Softmax::Node &node)
+Stage StageGenerator::generate(const graph::operation::SoftmaxNode &node)
{
const auto output_index{node.getOutputs().at(0)};
- const auto input_index{node.getInputs().at(graph::operation::Softmax::Input::INPUT)};
+ const auto input_index{node.getInputs().at(graph::operation::SoftmaxNode::Input::INPUT)};
const auto scale_index{node.param().scale_index};
assert(_ctx.at(scale_index).shape().rank() == 0);
};
}
-Stage StageGenerator::generate(const graph::operation::NOP::Node & /* node */)
+Stage StageGenerator::generate(const graph::operation::NOPNode & /* node */)
{
// DO NOTHING
return nullptr;
}
-Stage StageGenerator::generate(const graph::operation::Permute::Node & /* node */)
+Stage StageGenerator::generate(const graph::operation::PermuteNode & /* node */)
{
throw "Unsupported";
}
virtual std::shared_ptr<ITensorBuilder> tensor_builder() override { return _tensor_builder; }
- virtual Stage generate(const graph::operation::Conv2D::Implicit::Node &node) override;
- virtual Stage generate(const graph::operation::MaxPool2D::Implicit::Node &node) override;
- virtual Stage generate(const graph::operation::AvgPool2D::Implicit::Node &node) override;
- virtual Stage generate(const graph::operation::Concat::Node &node) override;
- virtual Stage generate(const graph::operation::FullyConnected::Node &node) override;
- virtual Stage generate(const graph::operation::Reshape::Node &node) override;
- virtual Stage generate(const graph::operation::Softmax::Node &node) override;
- virtual Stage generate(const graph::operation::NOP::Node &node) override;
- virtual Stage generate(const graph::operation::Permute::Node &node) override;
+ virtual Stage generate(const graph::operation::Conv2DNode &node) override;
+ virtual Stage generate(const graph::operation::MaxPool2DNode &node) override;
+ virtual Stage generate(const graph::operation::AvgPool2DNode &node) override;
+ virtual Stage generate(const graph::operation::ConcatNode &node) override;
+ virtual Stage generate(const graph::operation::FullyConnectedNode &node) override;
+ virtual Stage generate(const graph::operation::ReshapeNode &node) override;
+ virtual Stage generate(const graph::operation::SoftmaxNode &node) override;
+ virtual Stage generate(const graph::operation::NOPNode &node) override;
+ virtual Stage generate(const graph::operation::PermuteNode &node) override;
virtual Stage generate(const graph::operation::AddNode &node) override;
private:
// DO NOTHING
}
-Initializer InitializerGenerator::generate(const graph::operation::Conv2D::Implicit::Node &node)
+Initializer InitializerGenerator::generate(const graph::operation::Conv2DNode &node)
{
- using graph::operation::Conv2D::Implicit::Input;
+ using graph::operation::Conv2DNode;
Initializer ret;
- ret.push_back({node.getInputs().at(Input::KERNEL), generateWeight(node)});
- ret.push_back({node.getInputs().at(Input::BIAS), generateBias(node)});
+ ret.push_back({node.getInputs().at(Conv2DNode::Input::KERNEL), generateWeight(node)});
+ ret.push_back({node.getInputs().at(Conv2DNode::Input::BIAS), generateBias(node)});
return ret;
}
-Initializer InitializerGenerator::generate(const graph::operation::FullyConnected::Node &node)
+Initializer InitializerGenerator::generate(const graph::operation::FullyConnectedNode &node)
{
- using graph::operation::FullyConnected::Input;
+ using graph::operation::FullyConnectedNode;
Initializer ret;
- ret.push_back({node.getInputs().at(Input::WEIGHT), generateWeight(node)});
- ret.push_back({node.getInputs().at(Input::BIAS), generateBias(node)});
+ ret.push_back({node.getInputs().at(FullyConnectedNode::Input::WEIGHT), generateWeight(node)});
+ ret.push_back({node.getInputs().at(FullyConnectedNode::Input::BIAS), generateBias(node)});
return ret;
}
-InitializerFn
-InitializerGenerator::generateWeight(const graph::operation::Conv2D::Implicit::Node &node)
+InitializerFn InitializerGenerator::generateWeight(const graph::operation::Conv2DNode &node)
{
const auto ker_index{node.getInputs().at(1)};
};
}
-InitializerFn
-InitializerGenerator::generateWeight(const graph::operation::FullyConnected::Node &node)
+InitializerFn InitializerGenerator::generateWeight(const graph::operation::FullyConnectedNode &node)
{
const auto weight_index{node.getInputs().at(1)};
const auto input_index{node.getInputs().at(0)};
}
}
-InitializerFn
-InitializerGenerator::generateBias(const graph::operation::Conv2D::Implicit::Node &node)
+InitializerFn InitializerGenerator::generateBias(const graph::operation::Conv2DNode &node)
{
// TODO Refactor so we can reuse the common code
};
}
-InitializerFn InitializerGenerator::generateBias(const graph::operation::FullyConnected::Node &node)
+InitializerFn InitializerGenerator::generateBias(const graph::operation::FullyConnectedNode &node)
{
const auto bias_index{node.getInputs().at(2)};
InitializerGenerator(const neurun::graph::operand::Set &ctx);
public:
- Initializer generate(const graph::operation::Conv2D::Implicit::Node &node) override;
- Initializer generate(const graph::operation::FullyConnected::Node &node) override;
+ Initializer generate(const graph::operation::Conv2DNode &node) override;
+ Initializer generate(const graph::operation::FullyConnectedNode &node) override;
private:
- InitializerFn generateWeight(const graph::operation::Conv2D::Implicit::Node &node);
- InitializerFn generateWeight(const graph::operation::FullyConnected::Node &node);
+ InitializerFn generateWeight(const graph::operation::Conv2DNode &node);
+ InitializerFn generateWeight(const graph::operation::FullyConnectedNode &node);
- InitializerFn generateBias(const graph::operation::Conv2D::Implicit::Node &node);
- InitializerFn generateBias(const graph::operation::FullyConnected::Node &node);
+ InitializerFn generateBias(const graph::operation::Conv2DNode &node);
+ InitializerFn generateBias(const graph::operation::FullyConnectedNode &node);
private:
const neurun::graph::operand::Set &_ctx;
// DO NOTHING
}
-Stage StageGenerator::generate(const graph::operation::Conv2D::Implicit::Node &node)
+Stage StageGenerator::generate(const graph::operation::Conv2DNode &node)
{
- using namespace graph::operation::Conv2D::Implicit;
+ using graph::operation::Conv2DNode;
const auto ofm_index{node.getOutputs().at(0)};
- const auto ifm_index{node.getInputs().at(Input::INPUT)};
- const auto ker_index{node.getInputs().at(Input::KERNEL)};
- const auto bias_index{node.getInputs().at(Input::BIAS)};
+ const auto ifm_index{node.getInputs().at(Conv2DNode::Input::INPUT)};
+ const auto ker_index{node.getInputs().at(Conv2DNode::Input::KERNEL)};
+ const auto bias_index{node.getInputs().at(Conv2DNode::Input::BIAS)};
const auto vstride_index{node.param().vstride_index};
const auto hstride_index{node.param().hstride_index};
};
}
-Stage StageGenerator::generate(const graph::operation::MaxPool2D::Implicit::Node &node)
+Stage StageGenerator::generate(const graph::operation::MaxPool2DNode &node)
{
VERBOSE(MaxPool2D) << "generate CPU MaxPool2D" << std::endl;
const auto ofm_index{node.getOutputs().at(0)};
- const auto ifm_index{node.getInputs().at(graph::operation::MaxPool2D::Implicit::Input::INPUT)};
+ const auto ifm_index{node.getInputs().at(graph::operation::MaxPool2DNode::Input::INPUT)};
const auto kh_index{node.param().kh_index};
const auto kw_index{node.param().kw_index};
};
}
-Stage StageGenerator::generate(const graph::operation::AvgPool2D::Implicit::Node &node)
+Stage StageGenerator::generate(const graph::operation::AvgPool2DNode &node)
{
VERBOSE(AvgPool2D) << "generate CPU AvgPool2D" << std::endl;
const auto ofm_index{node.getOutputs().at(0)};
- const auto ifm_index{node.getInputs().at(graph::operation::AvgPool2D::Implicit::Input::INPUT)};
+ const auto ifm_index{node.getInputs().at(graph::operation::AvgPool2DNode::Input::INPUT)};
const auto kh_index{node.param().kh_index};
const auto kw_index{node.param().kw_index};
};
}
-Stage StageGenerator::generate(const graph::operation::Concat::Node &node)
+Stage StageGenerator::generate(const graph::operation::ConcatNode &node)
{
VERBOSE(Concat) << "generate CPU Concat" << std::endl;
};
}
-Stage StageGenerator::generate(const graph::operation::FullyConnected::Node &node)
+Stage StageGenerator::generate(const graph::operation::FullyConnectedNode &node)
{
VERBOSE(FullyConnected) << "generate CPU FullyConnected" << std::endl;
- using namespace graph::operation::FullyConnected;
+ using graph::operation::FullyConnectedNode;
const auto output_index{node.getOutputs().at(0)};
- const auto input_index{node.getInputs().at(Input::INPUT)};
- const auto weight_index{node.getInputs().at(Input::WEIGHT)};
- const auto bias_index{node.getInputs().at(Input::BIAS)};
+ const auto input_index{node.getInputs().at(FullyConnectedNode::Input::INPUT)};
+ const auto weight_index{node.getInputs().at(FullyConnectedNode::Input::WEIGHT)};
+ const auto bias_index{node.getInputs().at(FullyConnectedNode::Input::BIAS)};
const auto activation_index{node.param().activation_index};
// Construct operation parameters
};
}
-Stage StageGenerator::generate(const graph::operation::Reshape::Node &node)
+Stage StageGenerator::generate(const graph::operation::ReshapeNode &node)
{
const auto output_index{node.getOutputs().at(0)};
- const auto input_index{node.getInputs().at(graph::operation::Reshape::Input::INPUT)};
+ const auto input_index{node.getInputs().at(graph::operation::ReshapeNode::Input::INPUT)};
struct Param
{
};
}
-Stage StageGenerator::generate(const graph::operation::Softmax::Node &node)
+Stage StageGenerator::generate(const graph::operation::SoftmaxNode &node)
{
VERBOSE(Softmax) << "generate CPU Softmax" << std::endl;
const auto output_index{node.getOutputs().at(0)};
- const auto input_index{node.getInputs().at(graph::operation::Softmax::Input::INPUT)};
+ const auto input_index{node.getInputs().at(graph::operation::SoftmaxNode::Input::INPUT)};
const auto scale_index{node.param().scale_index};
struct Param
};
}
-Stage StageGenerator::generate(const graph::operation::NOP::Node & /* node */)
+Stage StageGenerator::generate(const graph::operation::NOPNode & /* node */)
{
// DO NOTHING
return nullptr;
}
-Stage StageGenerator::generate(const graph::operation::Permute::Node &node)
+Stage StageGenerator::generate(const graph::operation::PermuteNode &node)
{
VERBOSE(Permute) << "generate CPU Permute" << std::endl;
virtual std::shared_ptr<ITensorBuilder> tensor_builder() override { return _tensor_builder; }
- virtual Stage generate(const graph::operation::Conv2D::Implicit::Node &node) override;
- virtual Stage generate(const graph::operation::MaxPool2D::Implicit::Node &node) override;
- virtual Stage generate(const graph::operation::AvgPool2D::Implicit::Node &node) override;
- virtual Stage generate(const graph::operation::Concat::Node &node) override;
- virtual Stage generate(const graph::operation::FullyConnected::Node &node) override;
- virtual Stage generate(const graph::operation::Reshape::Node &node) override;
- virtual Stage generate(const graph::operation::Softmax::Node &node) override;
- virtual Stage generate(const graph::operation::NOP::Node &node) override;
- virtual Stage generate(const graph::operation::Permute::Node &node) override;
+ virtual Stage generate(const graph::operation::Conv2DNode &node) override;
+ virtual Stage generate(const graph::operation::MaxPool2DNode &node) override;
+ virtual Stage generate(const graph::operation::AvgPool2DNode &node) override;
+ virtual Stage generate(const graph::operation::ConcatNode &node) override;
+ virtual Stage generate(const graph::operation::FullyConnectedNode &node) override;
+ virtual Stage generate(const graph::operation::ReshapeNode &node) override;
+ virtual Stage generate(const graph::operation::SoftmaxNode &node) override;
+ virtual Stage generate(const graph::operation::NOPNode &node) override;
+ virtual Stage generate(const graph::operation::PermuteNode &node) override;
virtual Stage generate(const graph::operation::AddNode &node) override;
private:
{
virtual ~IInitializerGenerator() = default;
- virtual Initializer generate(const graph::operation::Conv2D::Implicit::Node &)
+ virtual Initializer generate(const graph::operation::Conv2DNode &) { return Initializer{}; }
+ virtual Initializer generate(const graph::operation::MaxPool2DNode &) { return Initializer{}; }
+ virtual Initializer generate(const graph::operation::AvgPool2DNode &) { return Initializer{}; }
+ virtual Initializer generate(const graph::operation::ConcatNode &) { return Initializer{}; }
+ virtual Initializer generate(const graph::operation::FullyConnectedNode &)
{
return Initializer{};
}
- virtual Initializer generate(const graph::operation::MaxPool2D::Implicit::Node &)
- {
- return Initializer{};
- }
- virtual Initializer generate(const graph::operation::AvgPool2D::Implicit::Node &)
- {
- return Initializer{};
- }
- virtual Initializer generate(const graph::operation::Concat::Node &) { return Initializer{}; }
- virtual Initializer generate(const graph::operation::FullyConnected::Node &)
- {
- return Initializer{};
- }
- virtual Initializer generate(const graph::operation::Reshape::Node &) { return Initializer{}; }
- virtual Initializer generate(const graph::operation::Softmax::Node &) { return Initializer{}; }
- virtual Initializer generate(const graph::operation::NOP::Node &) { return Initializer{}; }
- virtual Initializer generate(const graph::operation::Permute::Node &) { return Initializer{}; }
+ virtual Initializer generate(const graph::operation::ReshapeNode &) { return Initializer{}; }
+ virtual Initializer generate(const graph::operation::SoftmaxNode &) { return Initializer{}; }
+ virtual Initializer generate(const graph::operation::NOPNode &) { return Initializer{}; }
+ virtual Initializer generate(const graph::operation::PermuteNode &) { return Initializer{}; }
virtual Initializer generate(const graph::operation::AddNode &) { return Initializer{}; }
};
virtual std::shared_ptr<ITensorBuilder> tensor_builder() = 0;
- virtual Stage generate(const graph::operation::Conv2D::Implicit::Node &node) = 0;
- virtual Stage generate(const graph::operation::MaxPool2D::Implicit::Node &node) = 0;
- virtual Stage generate(const graph::operation::AvgPool2D::Implicit::Node &node) = 0;
- virtual Stage generate(const graph::operation::Concat::Node &node) = 0;
- virtual Stage generate(const graph::operation::FullyConnected::Node &node) = 0;
- virtual Stage generate(const graph::operation::Reshape::Node &node) = 0;
- virtual Stage generate(const graph::operation::Softmax::Node &node) = 0;
- virtual Stage generate(const graph::operation::NOP::Node &node) = 0;
- virtual Stage generate(const graph::operation::Permute::Node &node) = 0;
+ virtual Stage generate(const graph::operation::Conv2DNode &node) = 0;
+ virtual Stage generate(const graph::operation::MaxPool2DNode &node) = 0;
+ virtual Stage generate(const graph::operation::AvgPool2DNode &node) = 0;
+ virtual Stage generate(const graph::operation::ConcatNode &node) = 0;
+ virtual Stage generate(const graph::operation::FullyConnectedNode &node) = 0;
+ virtual Stage generate(const graph::operation::ReshapeNode &node) = 0;
+ virtual Stage generate(const graph::operation::SoftmaxNode &node) = 0;
+ virtual Stage generate(const graph::operation::NOPNode &node) = 0;
+ virtual Stage generate(const graph::operation::PermuteNode &node) = 0;
virtual Stage generate(const graph::operation::AddNode &node) = 0;
};
if (backend_all_str.compare("none") != 0)
{
VERBOSE(BackendResolver) << "Use backend for all ops: " << backend_all_str << std::endl;
-#define OP(InternalName, NnApiName) \
- { \
- auto backend = _backend_manager->get(backend_all_str); \
- _gen_map[typeid(graph::operation::InternalName::Node)] = backend; \
+#define OP(InternalName, NnApiName) \
+ { \
+ auto backend = _backend_manager->get(backend_all_str); \
+ _gen_map[typeid(graph::operation::InternalName)] = backend; \
}
#include "graph/operation/Op.lst"
#undef OP
::nnfw::util::EnvVar{std::string("OP_BACKEND_") + #NnApiName}.asString("acl_cl"); \
auto backend = _backend_manager->get(backend_str); \
VERBOSE(BackendResolver) << "backend for " << #NnApiName << ": " << backend_str << std::endl; \
- _gen_map[typeid(graph::operation::InternalName::Node)] = backend; \
+ _gen_map[typeid(graph::operation::InternalName)] = backend; \
}
#include "graph/operation/Op.lst"
namespace codegen
{
-void Planner::visit(const graph::operation::Conv2D::Implicit::Node &node)
+void Planner::visit(const graph::operation::Conv2DNode &node)
{
// backend
auto backend = node.lower_info()->backend();
_builder.addStage(stage_gen->generate(node));
}
-void Planner::visit(const graph::operation::MaxPool2D::Implicit::Node &node)
+void Planner::visit(const graph::operation::MaxPool2DNode &node)
{
// backend
auto backend = node.lower_info()->backend();
_builder.addStage(stage_gen->generate(node));
}
-void Planner::visit(const graph::operation::AvgPool2D::Implicit::Node &node)
+void Planner::visit(const graph::operation::AvgPool2DNode &node)
{
// backend
auto backend = node.lower_info()->backend();
_builder.addStage(stage_gen->generate(node));
}
-void Planner::visit(const graph::operation::Concat::Node &node)
+void Planner::visit(const graph::operation::ConcatNode &node)
{
// NOTE This implementation assumes concat over feature depth
// TODO Remove this assumption
_builder.addStage(stage_gen->generate(node));
}
-void Planner::visit(const graph::operation::FullyConnected::Node &node)
+void Planner::visit(const graph::operation::FullyConnectedNode &node)
{
VERBOSE(FullyConnected) << "Configure FULLY_CONNECTED operation" << std::endl;
_builder.addStage(stage_gen->generate(node));
}
-void Planner::visit(const graph::operation::Reshape::Node &node)
+void Planner::visit(const graph::operation::ReshapeNode &node)
{
const auto output_index{node.getOutputs().at(0)};
const auto input_index{node.getInputs().at(0)};
_builder.addStage(stage_gen->generate(node));
}
-void Planner::visit(const graph::operation::Softmax::Node &node)
+void Planner::visit(const graph::operation::SoftmaxNode &node)
{
VERBOSE(Softmax) << "Configure SOFTMAX operation" << std::endl;
_builder.addStage(stage_gen->generate(node));
}
-void Planner::visit(const graph::operation::NOP::Node &node)
+void Planner::visit(const graph::operation::NOPNode &node)
{
// backend
auto backend = node.lower_info()->backend();
_builder.addStage(stage_gen->generate(node));
}
-void Planner::visit(const graph::operation::Permute::Node &node)
+void Planner::visit(const graph::operation::PermuteNode &node)
{
VERBOSE(Permute) << "Configure Permute operation" << std::endl;
}
public:
- virtual void visit(const graph::operation::Conv2D::Implicit::Node &) override;
- virtual void visit(const graph::operation::MaxPool2D::Implicit::Node &) override;
- virtual void visit(const graph::operation::AvgPool2D::Implicit::Node &) override;
- virtual void visit(const graph::operation::Concat::Node &) override;
- virtual void visit(const graph::operation::Reshape::Node &) override;
- virtual void visit(const graph::operation::FullyConnected::Node &) override;
- virtual void visit(const graph::operation::Softmax::Node &) override;
- virtual void visit(const graph::operation::NOP::Node &) override;
- virtual void visit(const graph::operation::Permute::Node &) override;
+ virtual void visit(const graph::operation::Conv2DNode &) override;
+ virtual void visit(const graph::operation::MaxPool2DNode &) override;
+ virtual void visit(const graph::operation::AvgPool2DNode &) override;
+ virtual void visit(const graph::operation::ConcatNode &) override;
+ virtual void visit(const graph::operation::ReshapeNode &) override;
+ virtual void visit(const graph::operation::FullyConnectedNode &) override;
+ virtual void visit(const graph::operation::SoftmaxNode &) override;
+ virtual void visit(const graph::operation::NOPNode &) override;
+ virtual void visit(const graph::operation::PermuteNode &) override;
virtual void visit(const graph::operation::AddNode &) override;
private:
if (inputCount == 7)
{
- using GraphNode = neurun::graph::operation::Conv2D::Implicit::Node;
+ using GraphNode = neurun::graph::operation::Conv2DNode;
graph.addOperation(nnfw::make_unique<GraphNode>(node_param));
}
if (inputCount == 7)
{
- using GraphNode = neurun::graph::operation::MaxPool2D::Implicit::Node;
+ using GraphNode = neurun::graph::operation::MaxPool2DNode;
graph.addOperation(nnfw::make_unique<GraphNode>(node_param));
}
if (inputCount == 7)
{
- using GraphNode = neurun::graph::operation::AvgPool2D::Implicit::Node;
+ using GraphNode = neurun::graph::operation::AvgPool2DNode;
graph.addOperation(nnfw::make_unique<GraphNode>(node_param));
}
}
case ANEURALNETWORKS_CONCATENATION:
{
- using GraphNode = neurun::graph::operation::Concat::Node;
+ using GraphNode = neurun::graph::operation::ConcatNode;
graph.addOperation(nnfw::make_unique<GraphNode>(node_param));
}
case ANEURALNETWORKS_RESHAPE:
{
- using GraphNode = neurun::graph::operation::Reshape::Node;
+ using GraphNode = neurun::graph::operation::ReshapeNode;
graph.addOperation(nnfw::make_unique<GraphNode>(node_param));
}
case ANEURALNETWORKS_FULLY_CONNECTED:
{
- using GraphNode = neurun::graph::operation::FullyConnected::Node;
+ using GraphNode = neurun::graph::operation::FullyConnectedNode;
graph.addOperation(nnfw::make_unique<GraphNode>(node_param));
}
case ANEURALNETWORKS_SOFTMAX:
{
- using GraphNode = neurun::graph::operation::Softmax::Node;
+ using GraphNode = neurun::graph::operation::SoftmaxNode;
graph.addOperation(nnfw::make_unique<GraphNode>(node_param));
using namespace neurun::graph::operation;
-void Dumper::visit(const Conv2D::Implicit::Node &node)
+void Dumper::visit(const Conv2DNode &node)
{
VERBOSE(LIR) << "* Conv2D(Implicit)" << std::endl;
VERBOSE(LIR) << " - Inputs : IFM(" << node.getInputs().at(0).value() << ") Kernel("
VERBOSE(LIR) << " - Output : OFM(" << node.getOutputs().at(0).value() << ")" << std::endl;
}
-void Dumper::visit(const MaxPool2D::Implicit::Node &node)
+void Dumper::visit(const MaxPool2DNode &node)
{
VERBOSE(LIR) << "* MaxPool2D(Implicit)" << std::endl;
VERBOSE(LIR) << " - Inputs : IFM(" << node.getInputs().at(0).value() << ")" << std::endl;
VERBOSE(LIR) << " - Output : OFM(" << node.getOutputs().at(0).value() << ")" << std::endl;
}
-void Dumper::visit(const AvgPool2D::Implicit::Node &node)
+void Dumper::visit(const AvgPool2DNode &node)
{
VERBOSE(LIR) << "* AvgPool2D(Implicit)" << std::endl;
VERBOSE(LIR) << " - Inputs : IFM(" << node.getInputs().at(0).value() << ")" << std::endl;
VERBOSE(LIR) << " - Output : OFM(" << node.getOutputs().at(0).value() << ")" << std::endl;
}
-void Dumper::visit(const Concat::Node &node)
+void Dumper::visit(const ConcatNode &node)
{
VERBOSE(LIR) << "* Concat" << std::endl;
std::string inputs;
VERBOSE(LIR) << " - Output : OFM(" << node.getOutputs().at(0).value() << ")" << std::endl;
}
-void Dumper::visit(const FullyConnected::Node &node)
+void Dumper::visit(const FullyConnectedNode &node)
{
VERBOSE(LIR) << "* FullyConnected" << std::endl;
VERBOSE(LIR) << " - Inputs : IFM(" << node.getInputs().at(0).value() << ") Weight("
VERBOSE(LIR) << " - Output : OFM(" << node.getOutputs().at(0).value() << ")" << std::endl;
}
-void Dumper::visit(const Reshape::Node &node)
+void Dumper::visit(const ReshapeNode &node)
{
VERBOSE(LIR) << "* Reshape" << std::endl;
// TODO The shape index should be "node.getInputs().at(1).value()" but not valid for now
VERBOSE(LIR) << " - Output : OFM(" << node.getOutputs().at(0).value() << ")" << std::endl;
}
-void Dumper::visit(const Softmax::Node &node)
+void Dumper::visit(const SoftmaxNode &node)
{
VERBOSE(LIR) << "* Softmax" << std::endl;
VERBOSE(LIR) << " - Inputs : IFM(" << node.getInputs().at(0).value() << ")" << std::endl;
VERBOSE(LIR) << " - Output : OFM(" << node.getOutputs().at(0).value() << ")" << std::endl;
}
-void Dumper::visit(const NOP::Node &node)
+void Dumper::visit(const NOPNode &node)
{
VERBOSE(LIR) << "* NOP" << std::endl;
std::string inputs, outputs;
VERBOSE(LIR) << " - Outputs : OFM(" << outputs << ")" << std::endl;
}
-void Dumper::visit(const Permute::Node &node)
+void Dumper::visit(const PermuteNode &node)
{
VERBOSE(LIR) << "* Permute" << std::endl;
VERBOSE(LIR) << " - Inputs : IFM(" << node.getInputs().at(0).value() << ")" << std::endl;
Dumper() = default;
public:
- void visit(const graph::operation::Conv2D::Implicit::Node &node) override;
- void visit(const graph::operation::MaxPool2D::Implicit::Node &node) override;
- void visit(const graph::operation::AvgPool2D::Implicit::Node &node) override;
- void visit(const graph::operation::Concat::Node &node) override;
- void visit(const graph::operation::FullyConnected::Node &node) override;
- void visit(const graph::operation::Reshape::Node &node) override;
- void visit(const graph::operation::Softmax::Node &node) override;
- void visit(const graph::operation::NOP::Node &node) override;
- void visit(const graph::operation::Permute::Node &node) override;
+ void visit(const graph::operation::Conv2DNode &node) override;
+ void visit(const graph::operation::MaxPool2DNode &node) override;
+ void visit(const graph::operation::AvgPool2DNode &node) override;
+ void visit(const graph::operation::ConcatNode &node) override;
+ void visit(const graph::operation::FullyConnectedNode &node) override;
+ void visit(const graph::operation::ReshapeNode &node) override;
+ void visit(const graph::operation::SoftmaxNode &node) override;
+ void visit(const graph::operation::NOPNode &node) override;
+ void visit(const graph::operation::PermuteNode &node) override;
void visit(const graph::operation::AddNode &node) override;
};
namespace operation
{
-enum Input
-{
- LHS = 0,
- RHS
-};
-
-struct Param
-{
- operand::Index activation_index;
-};
-
class AddNode : public graph::operation::Node
{
public:
AddNode(const graph::operation::Node::InitParam &init_param);
+ enum Input
+ {
+ LHS = 0,
+ RHS
+ };
+
+ struct Param
+ {
+ operand::Index activation_index;
+ };
+
public:
virtual void accept(NodeVisitor &&) const override;
virtual std::string getName() const override { return "Add"; }
{
namespace operation
{
-namespace AvgPool2D
-{
-namespace Implicit
-{
-void Node::accept(NodeVisitor &&v) const { v.visit(*this); }
+void AvgPool2DNode::accept(NodeVisitor &&v) const { v.visit(*this); }
-Node::Node(const graph::operation::Node::InitParam &init_param)
+AvgPool2DNode::AvgPool2DNode(const graph::operation::Node::InitParam &init_param)
: operation::Node{OperandConstraint::createExact(1u)}
{
assert(init_param.input_count == 7);
_param.activation_index = operand::Index{init_param.inputs[6]};
}
-} // namespace Implicit
-} // namespace AvgPool2D
} // namespace operation
} // namespace graph
} // namespace neurun
{
namespace operation
{
-namespace AvgPool2D
-{
-namespace Implicit
-{
-enum Input
+class AvgPool2DNode : public graph::operation::Node
{
- INPUT = 0
-};
+public:
+ AvgPool2DNode(const graph::operation::Node::InitParam &init_param);
-struct Param
-{
- operand::Index kw_index;
- operand::Index kh_index;
+ enum Input
+ {
+ INPUT = 0
+ };
- operand::Index hstride_index;
- operand::Index vstride_index;
+ struct Param
+ {
+ operand::Index kw_index;
+ operand::Index kh_index;
- operand::Index padding_index;
- operand::Index activation_index;
-};
+ operand::Index hstride_index;
+ operand::Index vstride_index;
-class Node : public graph::operation::Node
-{
-public:
- Node(const graph::operation::Node::InitParam &init_param);
+ operand::Index padding_index;
+ operand::Index activation_index;
+ };
public:
virtual void accept(NodeVisitor &&) const override;
Param _param;
};
-} // namespace Implicit
-} // namespace AvgPool2D
} // namespace operation
} // namespace graph
} // namespace neurun
{
namespace operation
{
-namespace Concat
-{
-void Node::accept(NodeVisitor &&v) const { v.visit(*this); }
+void ConcatNode::accept(NodeVisitor &&v) const { v.visit(*this); }
-Node::Node(const graph::operation::Node::InitParam &init_param)
+ConcatNode::ConcatNode(const graph::operation::Node::InitParam &init_param)
: operation::Node{OperandConstraint::createAtLeast(2u)}
{
assert(init_param.input_count >= 2); // At least one one input tensor and axis
_param.axis_index = operand::Index{init_param.inputs[init_param.input_count - 1]};
}
-} // namespace Concat
} // namespace operation
} // namespace graph
} // namespace neurun
{
namespace operation
{
-namespace Concat
-{
-struct Param
-{
- operand::Index axis_index;
-};
-
-class Node : public graph::operation::Node
+class ConcatNode : public graph::operation::Node
{
public:
- Node(const graph::operation::Node::InitParam &init_param);
+ ConcatNode(const graph::operation::Node::InitParam &init_param);
+
+ struct Param
+ {
+ operand::Index axis_index;
+ };
public:
virtual void accept(NodeVisitor &&) const override;
Param _param;
};
-} // namespace Concat
} // namespace operation
} // namespace graph
} // namespace neurun
{
namespace operation
{
-namespace Conv2D
-{
-namespace Implicit
-{
-void Node::accept(NodeVisitor &&v) const { v.visit(*this); }
+void Conv2DNode::accept(NodeVisitor &&v) const { v.visit(*this); }
-Node::Node(const graph::operation::Node::InitParam &init_param)
+Conv2DNode::Conv2DNode(const graph::operation::Node::InitParam &init_param)
: operation::Node{OperandConstraint::createExact(3u)}
{
assert(init_param.input_count == 7 && init_param.output_count == 1);
_param.activation_index = operand::Index{init_param.inputs[6]};
}
-} // namespace Implicit
-} // namespace Conv2D
} // namespace operation
} // namespace graph
} // namespace neurun
{
namespace operation
{
-namespace Conv2D
-{
-namespace Implicit
-{
-enum Input
+class Conv2DNode : public graph::operation::Node
{
- INPUT = 0,
- KERNEL,
- BIAS
-};
+public:
+ Conv2DNode(const graph::operation::Node::InitParam &);
-struct Param
-{
- operand::Index hstride_index;
- operand::Index vstride_index;
+ enum Input
+ {
+ INPUT = 0,
+ KERNEL,
+ BIAS
+ };
- operand::Index padding_index;
- operand::Index activation_index;
-};
+ struct Param
+ {
+ operand::Index hstride_index;
+ operand::Index vstride_index;
-class Node : public graph::operation::Node
-{
-public:
- Node(const graph::operation::Node::InitParam &);
+ operand::Index padding_index;
+ operand::Index activation_index;
+ };
public:
virtual void accept(NodeVisitor &&) const override;
Param _param;
};
-} // namespace Implicit
-} // namespace Conv2D
-} // namespace coperation
+} // namespace operation
} // namespace graph
} // namespace neurun
{
namespace operation
{
-namespace FullyConnected
-{
-void Node::accept(NodeVisitor &&v) const { v.visit(*this); }
+void FullyConnectedNode::accept(NodeVisitor &&v) const { v.visit(*this); }
-Node::Node(const graph::operation::Node::InitParam &init_param)
+FullyConnectedNode::FullyConnectedNode(const graph::operation::Node::InitParam &init_param)
: operation::Node{OperandConstraint::createExact(3u)}
{
assert(init_param.input_count == 4 && init_param.output_count == 1);
_param.activation_index = operand::Index{init_param.inputs[3]};
}
-} // namespace FullyConnected
} // namespace operation
} // namespace graph
} // namespace neurun
{
namespace operation
{
-namespace FullyConnected
-{
-enum Input
+class FullyConnectedNode : public graph::operation::Node
{
- INPUT = 0,
- WEIGHT,
- BIAS
-};
+public:
+ FullyConnectedNode(const graph::operation::Node::InitParam &init_param);
-struct Param
-{
- operand::Index activation_index;
-};
+ enum Input
+ {
+ INPUT = 0,
+ WEIGHT,
+ BIAS
+ };
-class Node : public graph::operation::Node
-{
-public:
- Node(const graph::operation::Node::InitParam &init_param);
+ struct Param
+ {
+ operand::Index activation_index;
+ };
public:
virtual void accept(NodeVisitor &&) const override;
Param _param;
};
-} // namespace FullyConnected
} // namespace operation
} // namespace graph
} // namespace neurun
{
namespace operation
{
-namespace MaxPool2D
-{
-namespace Implicit
-{
-void Node::accept(NodeVisitor &&v) const { v.visit(*this); }
+void MaxPool2DNode::accept(NodeVisitor &&v) const { v.visit(*this); }
-Node::Node(const graph::operation::Node::InitParam &init_param)
+MaxPool2DNode::MaxPool2DNode(const graph::operation::Node::InitParam &init_param)
: operation::Node{OperandConstraint::createExact(1u)}
{
assert(init_param.input_count == 7);
_param.activation_index = operand::Index{init_param.inputs[6]};
}
-} // namespace Implicit
-} // namespace MaxPool2D
} // namespace operation
} // namespace graph
} // namespace neurun
{
namespace operation
{
-namespace MaxPool2D
-{
-namespace Implicit
-{
-enum Input
+class MaxPool2DNode : public graph::operation::Node
{
- INPUT = 0
-};
+public:
+ MaxPool2DNode(const graph::operation::Node::InitParam &init_param);
-struct Param
-{
- operand::Index kw_index;
- operand::Index kh_index;
+ enum Input
+ {
+ INPUT = 0
+ };
- operand::Index hstride_index;
- operand::Index vstride_index;
+ struct Param
+ {
+ operand::Index kw_index;
+ operand::Index kh_index;
- operand::Index padding_index;
- operand::Index activation_index;
-};
+ operand::Index hstride_index;
+ operand::Index vstride_index;
+
+ operand::Index padding_index;
+ operand::Index activation_index;
+ };
-class Node : public graph::operation::Node
-{
public:
virtual void accept(NodeVisitor &&) const override;
virtual std::string getName() const override { return "MaxPool2D"; }
public:
- Node(const graph::operation::Node::InitParam &init_param);
-
-public:
const Param ¶m() const { return _param; }
private:
Param _param;
};
-} // namespace Implicit
-} // namespace MaxPool2D
} // namespace operation
} // namespace graph
} // namespace neurun
{
namespace operation
{
-namespace NOP
-{
-void Node::accept(NodeVisitor &&v) const { v.visit(*this); }
+void NOPNode::accept(NodeVisitor &&v) const { v.visit(*this); }
-Node::Node(const graph::operation::Node::InitParam &)
+NOPNode::NOPNode(const graph::operation::Node::InitParam &)
: operation::Node{OperandConstraint::createExact(1u)}
{
}
-} // namespace NOP
} // namespace operation
} // namespace graph
} // namespace neurun
{
namespace operation
{
-namespace NOP
-{
-class Node : public graph::operation::Node
+class NOPNode : public graph::operation::Node
{
public:
- Node(const graph::operation::Node::InitParam &);
+ NOPNode(const graph::operation::Node::InitParam &);
public:
virtual void accept(NodeVisitor &&) const override;
virtual std::string getName() const override { return "NOP"; }
};
-} // namespace NOP
} // namespace operation
} // namespace graph
} // namespace neurun
{
virtual ~NodeVisitor() = default;
- virtual void visit(const Conv2D::Implicit::Node &) = 0;
- virtual void visit(const MaxPool2D::Implicit::Node &) = 0;
- virtual void visit(const AvgPool2D::Implicit::Node &) = 0;
- virtual void visit(const Concat::Node &) = 0;
- virtual void visit(const Reshape::Node &) = 0;
- virtual void visit(const FullyConnected::Node &) = 0;
- virtual void visit(const Softmax::Node &) = 0;
- virtual void visit(const NOP::Node &) = 0;
- virtual void visit(const Permute::Node &) = 0;
+ virtual void visit(const Conv2DNode &) = 0;
+ virtual void visit(const MaxPool2DNode &) = 0;
+ virtual void visit(const AvgPool2DNode &) = 0;
+ virtual void visit(const ConcatNode &) = 0;
+ virtual void visit(const ReshapeNode &) = 0;
+ virtual void visit(const FullyConnectedNode &) = 0;
+ virtual void visit(const SoftmaxNode &) = 0;
+ virtual void visit(const NOPNode &) = 0;
+ virtual void visit(const PermuteNode &) = 0;
virtual void visit(const AddNode &) = 0;
};
// NOTE The relation between "Internal Name" and "NN API Name" is "1 : N".
-// Internal Name | NN API Name
-OP(Conv2D::Implicit , CONV_2D)
-OP(AvgPool2D::Implicit , AVERAGE_POOL_2D)
-OP(MaxPool2D::Implicit , MAX_POOL_2D)
-OP(Concat , CONCATENATION)
-OP(FullyConnected , FULLY_CONNECTED)
-OP(Reshape , RESHAPE)
-OP(Softmax , SOFTMAX)
+// Internal Name | NN API Name
+OP(Conv2DNode , CONV_2D)
+OP(AvgPool2DNode , AVERAGE_POOL_2D)
+OP(MaxPool2DNode , MAX_POOL_2D)
+OP(ConcatNode , CONCATENATION)
+OP(FullyConnectedNode , FULLY_CONNECTED)
+OP(ReshapeNode , RESHAPE)
+OP(SoftmaxNode , SOFTMAX)
{
namespace operation
{
-namespace Permute
-{
-void Node::accept(NodeVisitor &&v) const { v.visit(*this); }
+void PermuteNode::accept(NodeVisitor &&v) const { v.visit(*this); }
-Node::Node(const operand::Index &input, const operand::Index &output)
+PermuteNode::PermuteNode(const operand::Index &input, const operand::Index &output)
: operation::Node{OperandConstraint::createExact(1u)}
{
setInputs({input});
setOutputs({output});
}
-} // namespace Permute
} // namespace operation
} // namespace graph
} // namespace neurun
{
namespace operation
{
-namespace Permute
-{
-class Node : public graph::operation::Node
+class PermuteNode : public graph::operation::Node
{
public:
virtual void accept(NodeVisitor &&) const override;
virtual std::string getName() const override { return "Permute"; }
public:
- Node(const operand::Index &input, const operand::Index &output);
+ PermuteNode(const operand::Index &input, const operand::Index &output);
};
-} // namespace Permute
} // namespace operation
} // namespace graph
} // namespace neurun
{
namespace operation
{
-namespace Reshape
-{
-void Node::accept(NodeVisitor &&v) const { v.visit(*this); }
+void ReshapeNode::accept(NodeVisitor &&v) const { v.visit(*this); }
-Node::Node(const graph::operation::Node::InitParam &init_param)
+ReshapeNode::ReshapeNode(const graph::operation::Node::InitParam &init_param)
: operation::Node{OperandConstraint::createExact(1u)}
{
assert(init_param.input_count == 2 && init_param.output_count == 1);
setOutputs({init_param.outputs[0]});
}
-} // namespace Reshape
} // namespace operation
} // namespace graph
} // namespace neurun
{
namespace operation
{
-namespace Reshape
-{
-enum Input
+class ReshapeNode : public graph::operation::Node
{
- INPUT = 0
-};
+public:
+ ReshapeNode(const graph::operation::Node::InitParam &init_param);
+
+ enum Input
+ {
+ INPUT = 0
+ };
-class Node : public graph::operation::Node
-{
public:
virtual void accept(NodeVisitor &&) const override;
virtual std::string getName() const override { return "Reshape"; }
-
-public:
- Node(const graph::operation::Node::InitParam &init_param);
};
-} // namespace Reshape
} // namespace operation
} // namespace graph
} // namespace neurun
{
namespace operation
{
-namespace Softmax
-{
-void Node::accept(NodeVisitor &&v) const { v.visit(*this); }
+void SoftmaxNode::accept(NodeVisitor &&v) const { v.visit(*this); }
-Node::Node(const graph::operation::Node::InitParam &init_param)
+SoftmaxNode::SoftmaxNode(const graph::operation::Node::InitParam &init_param)
: operation::Node{OperandConstraint::createExact(1u)}
{
assert(init_param.input_count == 2 && init_param.output_count == 1);
_param.scale_index = operand::Index{init_param.inputs[1]};
}
-} // namespace Softmax
} // namespace operation
} // namespace graph
} // namespace neurun
{
namespace operation
{
-namespace Softmax
-{
-enum Input
+class SoftmaxNode : public graph::operation::Node
{
- INPUT = 0
-};
+public:
+ SoftmaxNode(const graph::operation::Node::InitParam &init_param);
+ enum Input
+ {
+ INPUT = 0
+ };
-struct Param
-{
- operand::Index scale_index;
-};
+ struct Param
+ {
+ operand::Index scale_index;
+ };
-class Node : public graph::operation::Node
-{
public:
virtual void accept(NodeVisitor &&) const override;
virtual std::string getName() const override { return "SoftMax"; }
public:
- Node(const graph::operation::Node::InitParam &init_param);
-
-public:
const Param ¶m() const { return _param; }
private:
Param _param;
};
-} // namespace Softmax
} // namespace operation
} // namespace graph
} // namespace neurun
operand.lower_info()->addUseBackend(operand.lower_info()->def_backends().getOnlyElement());
// Insert permute operation to the graph
- auto insert_node = nnfw::make_unique<operation::Permute::Node>(operand_index, out_operand_index);
+ auto insert_node = nnfw::make_unique<operation::PermuteNode>(operand_index, out_operand_index);
insert_node->lower_info(
nnfw::make_unique<operation::LowerInfo>(_graph.backend_resolver()->getDefaultBackend()));
}
uint32_t outoperand = graph.addOperand(shape, type).asInt();
- using GraphNode = neurun::graph::operation::Conv2D::Implicit::Node;
+ using GraphNode = neurun::graph::operation::Conv2DNode;
auto conv = nnfw::make_unique<GraphNode>(GraphNodeInitParam{7, params.data(), 1, &outoperand});
ASSERT_EQ(conv->getInputs().at(Index{0}).asInt(), params[0]);
}
uint32_t outoperand = graph.addOperand(shape, type).asInt();
- using GraphNode = neurun::graph::operation::Concat::Node;
+ using GraphNode = neurun::graph::operation::ConcatNode;
auto concat = nnfw::make_unique<GraphNode>(GraphNodeInitParam{7, params.data(), 1, &outoperand});