std::shared_ptr<nntrainer::Optimizer> Opt;
Opt = nnopt->optimizer;
-
+
returnable f = [&]() {
return Opt->setProperty(arg_list);
};
* @retval #ML_ERROR_NONE Successful.
* @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
*/
- virtual int setProperty(std::vector<std::string> values) = 0;
+ virtual int setProperty(std::vector<std::string> values);
/**
* @brief Optimizer Setter
* 11. kernel_size : ( n , m )
* 12. stride : ( n, m )
* 13. padding : ( n, m )
- * 14, pooling_size : ( n,m )
- * 15, pooling : max, average, global_max, global_average
+ * 14. pooling_size : ( n,m )
+ * 15. pooling : max, average, global_max, global_average
+ * 16. flatten : bool
*/
enum class PropertyType {
input_shape = 0,
protected:
-/**
- * @brief check if current layer's weight decay type is l2norm
- * @return bool is weightdecay type is L2 Norm
- */
+ /**
+ * @brief check if current layer's weight decay type is l2norm
+ * @return bool is weightdecay type is L2 Norm
+ */
bool isWeightDecayL2Norm() {
return weight_decay.type == WeightDecayType::l2norm;
}
* @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
*/
int ActivationLayer::setProperty(std::vector<std::string> values) {
- int status = ML_ERROR_NONE;
-
- if (values.size() != 1) {
- return ML_ERROR_INVALID_PARAMETER;
- }
-
- std::string key;
- std::string value;
-
- status = getKeyValue(values[0], key, value);
- NN_RETURN_STATUS();
-
- if (static_cast<PropertyType>(parseLayerProperty(key))
- != PropertyType::activation) {
- return ML_ERROR_INVALID_PARAMETER;
- }
-
- try {
- this->setActivation((ActiType)parseType(value, TOKEN_ACTI));
- } catch (const std::exception &ex) {
- ml_loge("Error: Not supported Data");
- return ML_ERROR_INVALID_PARAMETER;
- }
- return ML_ERROR_NONE;
+ return Layer::setProperty(values);
}
}; // namespace nntrainer
unsigned int type = parseLayerProperty(key);
switch (static_cast<PropertyType>(type)) {
- case PropertyType::input_shape:
- status = dim.setTensorDim(values[0].c_str());
- break;
- case PropertyType::bias_init_zero: {
- status = setBoolean(bias_init_zero, value);
- NN_RETURN_STATUS();
- } break;
case PropertyType::epsilon:
status = setFloat(epsilon, value);
NN_RETURN_STATUS();
break;
default:
- ml_loge("Error: Unknown Layer Property Key: %s", key.c_str());
- status = ML_ERROR_INVALID_PARAMETER;
+ status = Layer::setProperty({values[i]});
+ NN_RETURN_STATUS();
break;
}
}
unsigned int t = parseLayerProperty(key);
switch (static_cast<PropertyType>(t)) {
- case PropertyType::input_shape:
- status = input_dim.setTensorDim(value.c_str());
- NN_RETURN_STATUS();
- break;
- case PropertyType::bias_init_zero:
- status = setBoolean(bias_init_zero, value);
- NN_RETURN_STATUS();
- break;
- case PropertyType::activation:
- status = setActivation((ActiType)parseType(value, TOKEN_ACTI));
- NN_RETURN_STATUS();
- break;
- case PropertyType::flatten:
- status = setBoolean(flatten, value);
- NN_RETURN_STATUS();
- break;
- case PropertyType::weight_decay:
- weight_decay.type = (WeightDecayType)parseType(value, TOKEN_WEIGHT_DECAY);
- if (weight_decay.type == WeightDecayType::unknown) {
- ml_loge("Error: Unknown Weight Decay");
- return ML_ERROR_INVALID_PARAMETER;
- }
- break;
- case PropertyType::weight_decay_lambda:
- status = setFloat(weight_decay.lambda, value);
- NN_RETURN_STATUS();
- break;
- case PropertyType::weight_ini:
- weight_ini_type = (WeightIniType)parseType(value, TOKEN_WEIGHTINI);
- break;
case PropertyType::filter: {
int size;
status = setInt(size, value);
NN_RETURN_STATUS();
break;
default:
- ml_loge("Error: Unknown Layer Property Key : %s", key.c_str());
- status = ML_ERROR_INVALID_PARAMETER;
+ status = Layer::setProperty({values[i]});
+ NN_RETURN_STATUS();
break;
}
}
unsigned int type = parseLayerProperty(key);
switch (static_cast<PropertyType>(type)) {
- case PropertyType::input_shape:
- status = input_dim.setTensorDim(value.c_str());
- NN_RETURN_STATUS();
- break;
case PropertyType::unit: {
int width;
status = setInt(width, value);
unit = width;
output_dim.width(unit);
} break;
- case PropertyType::bias_init_zero: {
- status = setBoolean(this->bias_init_zero, value);
- NN_RETURN_STATUS();
- } break;
- case PropertyType::activation:
- status = setActivation((ActiType)parseType(value, TOKEN_ACTI));
- NN_RETURN_STATUS();
- break;
- case PropertyType::flatten:
- status = setBoolean(flatten, value);
- NN_RETURN_STATUS();
- break;
- case PropertyType::weight_decay:
- weight_decay.type = (WeightDecayType)parseType(value, TOKEN_WEIGHT_DECAY);
- if (weight_decay.type == WeightDecayType::unknown) {
- ml_loge("Error: Unknown Weight Decay");
- return ML_ERROR_INVALID_PARAMETER;
- }
- break;
- case PropertyType::weight_decay_lambda:
- status = setFloat(weight_decay.lambda, value);
- NN_RETURN_STATUS();
- break;
- case PropertyType::weight_ini:
- weight_ini_type = (WeightIniType)parseType(value, TOKEN_WEIGHTINI);
- break;
default:
- ml_loge("Error: Unknown Layer Property Key : %s", key.c_str());
- status = ML_ERROR_INVALID_PARAMETER;
+ status = Layer::setProperty({values[i]});
+ NN_RETURN_STATUS();
break;
}
}
unsigned int type = parseLayerProperty(key.c_str());
switch (static_cast<PropertyType>(type)) {
- case PropertyType::input_shape:
- status = input_dim.setTensorDim(value.c_str());
- NN_RETURN_STATUS();
- break;
- case PropertyType::bias_init_zero:
- status = setBoolean(bias_init_zero, value);
- NN_RETURN_STATUS();
- break;
case PropertyType::normalization:
status = setBoolean(normalization, value);
NN_RETURN_STATUS();
NN_RETURN_STATUS();
break;
default:
- ml_loge("Error: Unknown Layer Property Key : %s", key.c_str());
- status = ML_ERROR_INVALID_PARAMETER;
+ status = Layer::setProperty({values[i]});
+ NN_RETURN_STATUS();
break;
}
}
return std::make_shared<std::vector<Tensor>>(std::move(ele));
}
+int Layer::setProperty(std::vector<std::string> values) {
+ int status = ML_ERROR_NONE;
+
+ for (unsigned int i = 0; i < values.size(); ++i) {
+ std::string key;
+ std::string value;
+
+ status = getKeyValue(values[i], key, value);
+ NN_RETURN_STATUS();
+
+ unsigned int type = parseLayerProperty(key);
+
+ switch (static_cast<PropertyType>(type)) {
+ case PropertyType::input_shape:
+ status = input_dim.setTensorDim(value.c_str());
+ NN_RETURN_STATUS();
+ break;
+ case PropertyType::bias_init_zero:
+ status = setBoolean(this->bias_init_zero, value);
+ NN_RETURN_STATUS();
+ break;
+ case PropertyType::activation:
+ status = setActivation((ActiType)parseType(value, TOKEN_ACTI));
+ NN_RETURN_STATUS();
+ break;
+ case PropertyType::flatten:
+ status = setBoolean(flatten, value);
+ NN_RETURN_STATUS();
+ break;
+ case PropertyType::weight_decay:
+ weight_decay.type = (WeightDecayType)parseType(value, TOKEN_WEIGHT_DECAY);
+ if (weight_decay.type == WeightDecayType::unknown) {
+ ml_loge("Error: Unknown Weight Decay");
+ return ML_ERROR_INVALID_PARAMETER;
+ }
+ break;
+ case PropertyType::weight_decay_lambda:
+ status = setFloat(weight_decay.lambda, value);
+ NN_RETURN_STATUS();
+ break;
+ case PropertyType::weight_ini:
+ weight_ini_type = (WeightIniType)parseType(value, TOKEN_WEIGHTINI);
+ break;
+ default:
+ ml_loge("Error: Unknown Layer Property Key : %s", key.c_str());
+ status = ML_ERROR_INVALID_PARAMETER;
+ break;
+ }
+ }
+ return status;
+}
+
} /* namespace nntrainer */
/**
* @brief Neural Network Model Add Layer Test
*/
-TEST(nntrainer_capi_nnmodel, addLayer_03_n) {
+TEST(nntrainer_capi_nnmodel, addLayer_03_p) {
int status = ML_ERROR_NONE;
ml_nnmodel_h model;
status = ml_nnlayer_set_property(layer, "input_shape= 32:1:1:62720",
"activation=sigmoid", NULL);
- EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
+ EXPECT_EQ(status, ML_ERROR_NONE);
status = ml_nnlayer_delete(layer);
EXPECT_EQ(status, ML_ERROR_NONE);
}
/**
- * @brief Neural Network Layer Set Property Test (negative test)
+ * @brief Neural Network Layer Set Property Test (positive test)
*/
-TEST(nntrainer_capi_nnlayer, setproperty_03_n) {
+TEST(nntrainer_capi_nnlayer, setproperty_03_p) {
ml_nnlayer_h handle;
int status;
status = ml_nnlayer_create(&handle, ML_LAYER_TYPE_INPUT);
EXPECT_EQ(status, ML_ERROR_NONE);
status = ml_nnlayer_set_property(handle, "activation= sigmoid", NULL);
- EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
+ EXPECT_EQ(status, ML_ERROR_NONE);
status = ml_nnlayer_delete(handle);
EXPECT_EQ(status, ML_ERROR_NONE);
}