void NNInterpreter::setInput(const std::string &name, const TensorVariant& t) { data.emplace(name, t); }
void NNInterpreter::visit(ops::VariableOp& op) {
- mapByName(&op);
(void)op;
auto it = data.find(op.getName());
if( it == data.end() )
}
}
-std::vector<TensorVariant> &NNInterpreter::getOperationResult(const std::string &nodeName) {
- auto it = _opByName.find(nodeName);
- if (it == _opByName.end())
- throw std::runtime_error("Node not found <" + nodeName + ">");
-
- return getResult(it->second);
-}
-
void NNInterpreter::visit(ops::ConcatOp& op) {
- mapByName(&op);
auto &operands = op.getPrevNodes();
std::vector<TensorVariant> ins;
for (auto &in : operands)
}
void NNInterpreter::visit(ops::Conv2DOp& op) {
- mapByName(&op);
auto operand = op.getPrevNodes()[0];
var(op.getId()) = Conv2D(var(operand.op->getId())[operand.index], op)();
}
void NNInterpreter::visit(ops::ReshapeOp& op) {
- mapByName(&op);
auto operand = op.getPrevNodes()[0];
auto input = var(operand.op->getId())[operand.index];
var(op.getId()) = Reshape<float>(input, op.getOutputShape(0))();
}
void NNInterpreter::visit(ops::ReluOp& op) {
- mapByName(&op);
auto operand = op.getPrevNodes()[0];
Tensor<float> input(var(operand.op->getId())[operand.index]);
var(op.getId()) = Fill<float>(
}
void NNInterpreter::visit(ops::SigmoidOp& op) {
- mapByName(&op);
auto operand = op.getPrevNodes()[0];
Tensor<float> input(var(operand.op->getId())[operand.index]);
var(op.getId()) = Fill<float>(op.getOutputShape(0), [&input](const Index& id) {
}
void NNInterpreter::visit(ops::SoftmaxOp& op) {
- mapByName(&op);
auto operand = op.getPrevNodes()[0];
auto input = var(operand.op->getId())[operand.index];
var(op.getId()) = Softmax(op.getInputShape(0), input, op.getAxis())();
}
void NNInterpreter::visit(ops::PoolOp& op) {
- mapByName(&op);
auto operand = op.getPrevNodes()[0];
auto input = var(operand.op->getId())[operand.index];
var(op.getId()) = Pool(input, op)();
}
void NNInterpreter::visit(ops::FullyConnectedOp& op) {
- mapByName(&op);
auto operand = op.getPrevNodes()[0];
TensorVariant input = var(operand.op->getId())[operand.index];
var(op.getId()) = FullyConnected<float>(input, op)();
}
void NNInterpreter::visit(ops::GemmOp& op) {
- mapByName(&op);
auto operand = op.getPrevNodes()[0];
TensorVariant input = var(operand.op->getId())[operand.index];
var(op.getId()) = Gemm<float>(input, op)();
}
void NNInterpreter::visit(ops::CappedReluOp& op) {
- mapByName(&op);
auto operand = op.getPrevNodes()[0];
Tensor<float> input(var(operand.op->getId())[operand.index]);
var(op.getId()) = Fill<float>(op.getOutputShape(0), [&input, &op](const Index &id) {
}
void NNInterpreter::visit(ops::DepthwiseConv2DOp& op){
- mapByName(&op);
auto operand = op.getPrevNodes()[0];
TensorVariant input(var(operand.op->getId())[operand.index]);
var(op.getId()) = DepthwiseConv2D(input, op)();
}
void NNInterpreter::visit(ops::BiasAddOp& op) {
- mapByName(&op);
auto operand = op.getPrevNodes()[0];
auto input = var(operand.op->getId())[operand.index];
var(op.getId()) = BiasAdd(input, op.getWeights(), op.getOutputShape(0))();
}
void NNInterpreter::visit(ops::BatchNormOp& op) {
- mapByName(&op);
auto operand = op.getPrevNodes()[0];
TensorVariant input(var(operand.op->getId())[operand.index]);
// TODO implement this
}
void NNInterpreter::visit(ops::ScaleOp& op) {
- mapByName(&op);
auto operand = op.getPrevNodes()[0];
TensorVariant input(var(operand.op->getId())[operand.index]);
// TODO implement this
void NNInterpreter::visit(ops::SliceOp& op) {
- mapByName(&op);
auto operand = op.getPrevNodes()[0];
auto input = Tensor<float>(var(operand.op->getId())[operand.index]);
var(op.getId()) = Fill<float>(op.getOutputShape(0), [&input, &op](const Index& id) {
}
void NNInterpreter::visit(ops::DropoutOp& op) {
- mapByName(&op);
auto operand = op.getPrevNodes()[0];
TensorVariant input(var(operand.op->getId())[operand.index]);
// TODO implement this
var(op.getId()) = Dropout<float>(input, op)();
}
-void NNInterpreter::mapByName(Operation* op) {
- auto &nodeName = op->getName();
- if (_opByName.find(nodeName) != _opByName.end())
- {
- // TODO use common debug macro
- // std::cout << "Warning: duplicate node name <" + nodeName + "> ignore node." << std::endl;
- return;
- }
-
- _opByName[nodeName] = op;
-}
-
void NNInterpreter::visit(ops::TanhOp& op) {
- mapByName(&op);
auto operand = op.getPrevNodes()[0];
Tensor<float> input(var(operand.op->getId())[operand.index]);
var(op.getId()) = Fill<float>(op.getOutputShape(0), [&input, &op](const Index &id) {
}
void NNInterpreter::visit(ops::ElementwiseOp& op) {
- mapByName(&op);
auto operands = op.getPrevNodes();
std::vector<Tensor<float>> ins;
// Reserve space for tensor variants to avoid reference invalidation when pushing into vector
}
void NNInterpreter::visit(ops::DeConv2DOp& op) {
- mapByName(&op);
auto operand = op.getPrevNodes()[0];
var(op.getId()) = DeConv2D(var(operand.op->getId())[operand.index], op)();
}
void NNInterpreter::visit(ops::EluOp& op) {
- mapByName(&op);
auto operand = op.getPrevNodes()[0];
Tensor<float> input(var(operand.op->getId())[operand.index]);
var(op.getId()) = Fill<float>(op.getOutputShape(0), [&input, &op](const Index &id) {
}
void NNInterpreter::visit(ops::SqueezeOp& op) {
- mapByName(&op);
auto operand = op.getPrevNodes()[0];
auto& input = var(operand.op->getId())[operand.index];
//Squeeze is just a special case of reshape
}
void NNInterpreter::visit(ops::PadOp& op) {
- mapByName(&op);
auto operand = op.getPrevNodes()[0];
auto& input = var(operand.op->getId())[operand.index];
var(op.getId()) = Pad(input, op)();
}
void NNInterpreter::visit(ops::SqrtOp& op) {
- mapByName(&op);
auto operand = op.getPrevNodes()[0];
auto input = Tensor<float>(var(operand.op->getId())[operand.index]);
var(op.getId()) = Fill<float>(op.getOutputShape(0), [&input](const Index id) {
}
void NNInterpreter::visit(ops::ResizeOp& op) {
- mapByName(&op);
auto operand = op.getPrevNodes()[0];
Tensor<float> input(var(operand.op->getId())[operand.index]);
switch (op.getMode()) {
}
void NNInterpreter::visit(ops::ReduceFOp& op) {
- mapByName(&op);
// should always be an integer in a float
const float reduction_area =
static_cast<float>(op.getInputShape(0).numElements() / op.getOutputShape(0).numElements());
}
void NNInterpreter::visit(ops::TransposeOp& op) {
- mapByName(&op);
auto operand = op.getPrevNodes()[0];
auto& input = var(operand.op->getId())[operand.index];
var(op.getId()) = Transpose(input, op)();
}
void NNInterpreter::visit(ops::GatherOp& op) {
- mapByName(&op);
auto data_descr = op.getPrevNodes()[0];
auto indices_descr = op.getPrevNodes()[1];
const auto& data = var(data_descr.op->getId())[data_descr.index];