bag_ctx[ofm_name] = ofm_bag;
shape_ctx[ofm_name] = ofm_shape;
}
+ else if (layer.type() == "ReLU")
+ {
+ assert(layer.bottom().size() == 1);
+ assert(layer.top().size() == 1);
+
+ // PReLU is not supported, yet
+ // TODO Support PReLU
+ assert(!layer.has_relu_param());
+
+ // NOTE The current implementation treats ReLU as Feature op
+ // TODO Support ReLU over general tensor
+ const auto ifm_name = layer.bottom(0);
+ const auto ifm_shape = shape_ctx.at(ifm_name);
+ auto ifm_bag = bag_ctx.at(ifm_name);
+ auto ifm_obj = m->entity()->object()->create(morph::caffe::as_feature_shape(ifm_shape));
+
+ ifm_obj->bag(ifm_bag);
+ ifm_obj->reorder<feature::CHWLayout>();
+
+ const auto ofm_name = layer.top(0);
+ const auto ofm_shape = ifm_shape;
+ auto ofm_bag = m->entity()->bag()->create(num_elements(ofm_shape));
+ auto ofm_obj = m->entity()->object()->create(morph::caffe::as_feature_shape(ofm_shape));
+
+ ofm_obj->bag(ofm_bag);
+ ofm_obj->reorder<feature::CHWLayout>();
+
+ // Create a ReLU op
+ auto op = m->entity()->op()->create<coco::ReLU>();
+
+ // Create a UnitF instruction
+ // TODO Use UnitT later
+ auto ins = m->entity()->instr()->create<coco::UnitF>();
+
+ ins->ifm(ifm_obj);
+ ins->ofm(ofm_obj);
+ ins->op(op);
+
+ // Append the instruction to the block
+ blk->instr()->append(ins);
+
+ // Update bag and shape context
+ bag_ctx[ofm_name] = ofm_bag;
+ shape_ctx[ofm_name] = ofm_shape;
+ }
else
{
throw std::runtime_error{"Not supported: " + layer.type()};