Set the tanh as an default activation function
**Self evaluation:**
1. Build test: [X]Passed [ ]Failed [ ]Skipped
2. Run test: [X]Passed [ ]Failed [ ]Skipped
Signed-off-by: jijoong.moon <jijoong.moon@samsung.com>
if (current.getType() == RNNLayer::type) {
// No need to add activation layer for RNN Layer
+ // Default activation is tanh
+ if (act == ActivationType::ACT_NONE)
+ act = ActivationType::ACT_TANH;
current.setActivation(act);
return status;
}
* @brief Constructor of RNNLayer
*/
template <typename... Args>
- RNNLayer(unsigned int unit_ = 0, Args... args) :
- Layer(args...),
- unit(unit_) {}
+ RNNLayer(unsigned int unit_ = 0, Args... args) : Layer(args...), unit(unit_) {
+ /* Default Activation Type is tanh */
+ if (getActivationType() == ActivationType::ACT_NONE)
+ setActivation(ActivationType::ACT_TANH);
+ }
/**
* @brief Destructor of RNNLayer
typedef nntrainer_abstractLayer<nntrainer::RNNLayer> super;
virtual void prepareLayer() {
- int status =
- setProperty("unit=3 | weight_initializer=ones | activation=tanh");
+ int status = setProperty("unit=3 | weight_initializer=ones");
EXPECT_EQ(status, ML_ERROR_NONE);
setInputDim("2:1:3:3");
setBatch(2);