class CV_EXPORTS ChannelsPReLULayer : public ActivationLayer
{
public:
- static Ptr<ChannelsPReLULayer> create(const LayerParams& params);
+ static Ptr<Layer> create(const LayerParams& params);
};
class CV_EXPORTS ELULayer : public ActivationLayer
shape.push_back((int)_shape.dim(i));
}
else
- CV_Error(Error::StsError, "Unknown shape of input blob");
+ shape.resize(1, 1); // Is a scalar.
}
void blobFromProto(const caffe::BlobProto &pbBlob, cv::Mat &dstBlob)
struct BlobNote
{
BlobNote(const std::string &_name, int _layerId, int _outNum) :
- name(_name.c_str()), layerId(_layerId), outNum(_outNum) {}
+ name(_name), layerId(_layerId), outNum(_outNum) {}
- const char *name;
+ std::string name;
int layerId, outNum;
};
CV_DNN_REGISTER_LAYER_CLASS(ReLU, ReLULayer);
CV_DNN_REGISTER_LAYER_CLASS(ReLU6, ReLU6Layer);
CV_DNN_REGISTER_LAYER_CLASS(ChannelsPReLU, ChannelsPReLULayer);
+ CV_DNN_REGISTER_LAYER_CLASS(PReLU, ChannelsPReLULayer);
CV_DNN_REGISTER_LAYER_CLASS(Sigmoid, SigmoidLayer);
CV_DNN_REGISTER_LAYER_CLASS(TanH, TanHLayer);
CV_DNN_REGISTER_LAYER_CLASS(ELU, ELULayer);
return l;
}
-Ptr<ChannelsPReLULayer> ChannelsPReLULayer::create(const LayerParams& params)
+Ptr<Layer> ChannelsPReLULayer::create(const LayerParams& params)
{
+ CV_Assert(params.blobs.size() == 1);
+ if (params.blobs[0].total() == 1)
+ {
+ LayerParams reluParams = params;
+ reluParams.set("negative_slope", params.blobs[0].at<float>(0));
+ return ReLULayer::create(reluParams);
+ }
Ptr<ChannelsPReLULayer> l(new ElementWiseLayer<ChannelsPReLUFunctor>(ChannelsPReLUFunctor(params.blobs[0])));
l->setParamsFrom(params);
testLayerUsingCaffeModels("layer_eltwise");
}
+TEST(Layer_Test_PReLU, Accuracy)
+{
+ testLayerUsingCaffeModels("layer_prelu", DNN_TARGET_CPU, true);
+}
+
//template<typename XMat>
//static void test_Layer_Concat()
//{