From 6e2a5e24f8f9f20f3a1335ba5ea47305e883df95 Mon Sep 17 00:00:00 2001 From: Parichay Kapoor Date: Wed, 30 Jun 2021 13:40:25 +0900 Subject: [PATCH] [test] Enable modelfile unittest Enable modelfile unittests with newly update LayerV2 to test building and initialization of the models. Signed-off-by: Parichay Kapoor --- nntrainer/app_context.cpp | 2 +- nntrainer/compiler/ini_interpreter.cpp | 3 +- nntrainer/compiler/ini_interpreter.h | 8 +- nntrainer/graph/network_graph.cpp | 40 ++--- nntrainer/graph/network_graph.h | 9 +- nntrainer/layers/layer_context.h | 12 +- nntrainer/layers/layer_node.cpp | 16 +- nntrainer/layers/layer_node.h | 38 ++-- nntrainer/layers/loss/cross_entropy_loss_layer.h | 2 +- .../layers/loss/cross_entropy_sigmoid_loss_layer.h | 2 +- .../layers/loss/cross_entropy_softmax_loss_layer.h | 2 +- nntrainer/layers/loss/mse_loss_layer.h | 2 +- nntrainer/tensor/manager.h | 2 +- test/unittest/meson.build | 2 +- test/unittest/unittest_nntrainer_modelfile.cpp | 191 +++++++++++---------- 15 files changed, 177 insertions(+), 154 deletions(-) diff --git a/nntrainer/app_context.cpp b/nntrainer/app_context.cpp index 10bcf8d..1fa4305 100644 --- a/nntrainer/app_context.cpp +++ b/nntrainer/app_context.cpp @@ -272,7 +272,7 @@ static void add_default_object(AppContext &ac) { CrossEntropySigmoidLossLayer::type, LayerType::LAYER_LOSS_CROSS_ENTROPY_SIGMOID); - ac.registerFactory(AppContext::unknownFactory, "unknown", + ac.registerFactory(AppContext::unknownFactory, "unknown", LayerType::LAYER_UNKNOWN); } diff --git a/nntrainer/compiler/ini_interpreter.cpp b/nntrainer/compiler/ini_interpreter.cpp index d472fc2..b7385cc 100644 --- a/nntrainer/compiler/ini_interpreter.cpp +++ b/nntrainer/compiler/ini_interpreter.cpp @@ -133,8 +133,7 @@ section2layer(dictionary *ini, const std::string &sec_name, auto properties = section2properties(ini, sec_name); - auto layer = - createLayerNode(ac.createObject(layer_type), properties); + auto layer = createLayerNode(ac.createObject(layer_type), properties); return layer; } diff --git a/nntrainer/compiler/ini_interpreter.h b/nntrainer/compiler/ini_interpreter.h index 2faf357..1cfc19d 100644 --- a/nntrainer/compiler/ini_interpreter.h +++ b/nntrainer/compiler/ini_interpreter.h @@ -67,8 +67,8 @@ private: * @param section section name * @return std::shared_ptr layer */ - std::shared_ptr loadLayerConfig(dictionary *ini, - const std::string §ion); + std::shared_ptr loadLayerConfig(dictionary *ini, + const std::string §ion); /** * @brief Create a Layer From Backbone Config @@ -77,8 +77,8 @@ private: * @param section section name * @return std::shared_ptr layer */ - std::shared_ptr loadBackboneConfigIni(dictionary *ini, - const std::string §ion); + std::shared_ptr loadBackboneConfigIni(dictionary *ini, + const std::string §ion); AppContext app_context; std::function pathResolver; diff --git a/nntrainer/graph/network_graph.cpp b/nntrainer/graph/network_graph.cpp index 7603d7a..cbcfff3 100644 --- a/nntrainer/graph/network_graph.cpp +++ b/nntrainer/graph/network_graph.cpp @@ -33,6 +33,8 @@ #include #include +#define LAYER_V2 1 + #define LNODE(x) std::static_pointer_cast(x) namespace nntrainer { @@ -85,6 +87,10 @@ void NetworkGraph::addLayerNode(std::shared_ptr layer) { graph.addNode(std::make_unique(layer, graph.size())); } +void NetworkGraph::addLayerNode(std::unique_ptr layer) { + graph.addNode(std::make_unique(std::move(layer), graph.size())); +} + void NetworkGraph::countNonTrainableLayersAtBegin() { for (auto iter = cbegin(); iter != cend(); iter++) { // TODO: check if getTrainable() was set and if trainable weights exist, @@ -282,12 +288,6 @@ int NetworkGraph::addLossLayer(const std::string &loss_type) { lnode->setInputLayers({input_str}); - /** Set output layers here as setOutputLayers will not be called after adding - * loss. */ - if (lnode->getNumOutputs() == 0) { - lnode->setOutputLayers({"__exit__"}); - } - /** * As the loss layer is always the last, it could be added manually to Sorted * for performance. @@ -324,15 +324,8 @@ void NetworkGraph::setOutputLayers() { } } - if (layer_idx->getOutputDimensions().size() != layer_idx->getNumOutputs()) { - if (layer_idx->getNumOutputs() == 0) { - /** No output layer inplies its the last layer */ - layer_idx->setOutputLayers({"__exit__"}); - last_layer_count += 1; - } else { - /** error for any other layer */ - throw std::logic_error("Graph node has fewer edges than expected."); - } + if (layer_idx->getNumOutputs() == 0) { + last_layer_count += 1; } } @@ -340,11 +333,6 @@ void NetworkGraph::setOutputLayers() { throw std::invalid_argument( "Error: Multiple last layers in the model not supported"); } - - for (auto iter = cbegin(); iter != cend(); iter++) { - if ((*iter)->getNumOutputs() == 0) - throw std::runtime_error("There is un-connected node"); - } } int NetworkGraph::isCompilable() { @@ -377,6 +365,12 @@ int NetworkGraph::checkCompiledGraph() { } } + /** Only loss layer nodes can have no output connections */ + for (auto iter = cbegin(); iter != cend(); iter++) { + if ((*iter)->getNumOutputs() == 0 && !(*iter)->requireLabel()) + throw std::runtime_error("There is un-connected node"); + } + return ML_ERROR_NONE; } @@ -793,9 +787,13 @@ int NetworkGraph::initialize(std::shared_ptr manager) { lptr->setOutputBuffers(in_out); #endif + /** no need to update input_map for the last layer */ + if (idx == graph.size() - 1) + break; + #if LAYER_V2 auto &output_layers = lnode->getOutputLayers(); - for (unsigned int i = 0; i < outputs.size(); ++i) { + for (unsigned int i = 0; i < output_layers.size(); ++i) { auto out_layer_node = getLayerNode(output_layers[i]); if (input_map.find(output_layers[i]) == input_map.end()) input_map.insert({output_layers[i], {}}); diff --git a/nntrainer/graph/network_graph.h b/nntrainer/graph/network_graph.h index 4f0ad45..98bd98b 100644 --- a/nntrainer/graph/network_graph.h +++ b/nntrainer/graph/network_graph.h @@ -356,8 +356,7 @@ private: * @details Ensures that the layer has a unique and a valid name. A valid * name pre-assigned to the layer can be changed if force_rename is enabled. */ - void ensureName(std::shared_ptr layer, - const std::string &prefix = "", + void ensureName(std::shared_ptr layer, const std::string &prefix = "", const std::string &postfix = "", bool force_rename = false); /** @@ -367,6 +366,12 @@ private: void addLayerNode(std::shared_ptr layer); /** + * @brief Create new LayerNode and add into Graph + * @param[in] layer shared_ptr of Layer + */ + void addLayerNode(std::unique_ptr layer); + + /** * @brief update input_layers, output_layers node name * * @param from update name from @a from diff --git a/nntrainer/layers/layer_context.h b/nntrainer/layers/layer_context.h index 6048667..780ea0e 100644 --- a/nntrainer/layers/layer_context.h +++ b/nntrainer/layers/layer_context.h @@ -233,6 +233,14 @@ public: /** * @brief Construct a new Run Layer Context object + * + */ + RunLayerContext(const std::string &name) : RunLayerContext() { + std::get(props).set(name); + } + + /** + * @brief Construct a new Run Layer Context object * @todo Include properties like name/trainable later * * @param w weights of the layer @@ -249,7 +257,9 @@ public: weights(w), inputs(in), outputs(out), - tensors(t) {} + tensors(t) { + std::get(props).set(name); + } /** * @brief Get the Weight tensor object diff --git a/nntrainer/layers/layer_node.cpp b/nntrainer/layers/layer_node.cpp index 5da441f..1a5e03e 100644 --- a/nntrainer/layers/layer_node.cpp +++ b/nntrainer/layers/layer_node.cpp @@ -133,8 +133,11 @@ int LayerNode::setProperty(std::vector properties) { /// until we have layerNode::finalize and must not except timedist layer if (getDistribute()) { if (layerv1 == nullptr) { - layerv1 = nullptr; - /// logic for layer v2 + // auto &ac = nntrainer::AppContext::Global(); + // std::unique_ptr dlayer = + // ac.createObject(TimeDistLayer::type); + // dynamic_cast(dlayer.get())->setDistLayer(std::move(layer)); + // layer = std::move(dlayer); } else if (layerv1->getType() != TimeDistLayer::type) { auto &ac = nntrainer::AppContext::Global(); std::shared_ptr dlayer = @@ -221,8 +224,7 @@ bool LayerNode::setProperty(const std::string &key, const std::string &value) { case PropertyType::input_layers: { static const std::regex reg("\\,+"); std::vector split_layers = split(value, reg); - layerv1->setNumInputs(split_layers.size()); - input_layers = split_layers; + setInputLayers(split_layers); break; } default: @@ -378,10 +380,10 @@ void LayerNode::save(std::ofstream &file) const { * @brief Finalize creating the layer node */ void LayerNode::finalize() { -#if LAYER_V2 - layer->finalize(init_context); -#endif + if (layer) + layer->finalize(init_context); finalized = true; + run_context = RunLayerContext(getName()); } /** diff --git a/nntrainer/layers/layer_node.h b/nntrainer/layers/layer_node.h index 6a0c6c2..11f8ced 100644 --- a/nntrainer/layers/layer_node.h +++ b/nntrainer/layers/layer_node.h @@ -31,6 +31,7 @@ #include #include #include +#include #include constexpr bool LAYER_V2 = true; @@ -264,7 +265,10 @@ public: * @return boolean true if trainable, else false */ bool supportBackwarding() const noexcept { - return getLayer()->supportBackwarding(); + if (layerv1) + return getLayer()->supportBackwarding(); + else + return layer->supportBackwarding(); } /** @@ -374,6 +378,7 @@ public: */ void addInputLayers(const std::string &in_layer) { input_layers.push_back(in_layer); + resizeInputDimensions(input_layers.size()); if (layerv1) layerv1->setNumInputs(input_layers.size()); } @@ -396,6 +401,7 @@ public: */ void setInputLayers(const std::vector &layers) { input_layers = layers; + resizeInputDimensions(input_layers.size()); if (layerv1) layerv1->setNumInputs(layers.size()); } @@ -614,23 +620,6 @@ public: } } - /** - * @brief Set loss type for the layer underneath the node - * - * @param type The loss type - * @todo this interface will be removed when loss layer is updated for LayerV2 - */ - // void setLossType(LossType type) { - // if (layerv1) { - // // if (getType() != LossLayer::type) - // // throw std::runtime_error("Setting loss type on non-loss layer"); - // // std::dynamic_pointer_cast(getLayer())->setLoss(type); - // } else { - // // TODO: set loss layer type for LayerV2 - // // will be handled when updating LossLayer for LayerV2 - // } - // } - private: /// @todo remove this std::shared_ptr @@ -700,6 +689,19 @@ private: * @throw std::invalid_argument when ActivationType is unknown */ void setActivation(ActivationType activation); + + /** + * @brief Resize the input dimensions + * + * @param size Number of input dimensions + */ + void resizeInputDimensions(unsigned int size) { + auto cur_input_dim = init_context.getInputDimensions(); + if (cur_input_dim.size() != size) { + cur_input_dim.resize(size); + init_context = InitLayerContext(cur_input_dim); + } + } }; /** diff --git a/nntrainer/layers/loss/cross_entropy_loss_layer.h b/nntrainer/layers/loss/cross_entropy_loss_layer.h index 65931df..c0b5406 100644 --- a/nntrainer/layers/loss/cross_entropy_loss_layer.h +++ b/nntrainer/layers/loss/cross_entropy_loss_layer.h @@ -58,7 +58,7 @@ public: return CrossEntropyLossLayer::type; }; - inline static const std::string type = "cross_entropy_loss"; + inline static const std::string type = "cross"; }; } // namespace nntrainer diff --git a/nntrainer/layers/loss/cross_entropy_sigmoid_loss_layer.h b/nntrainer/layers/loss/cross_entropy_sigmoid_loss_layer.h index 512dbda..014a03e 100644 --- a/nntrainer/layers/loss/cross_entropy_sigmoid_loss_layer.h +++ b/nntrainer/layers/loss/cross_entropy_sigmoid_loss_layer.h @@ -53,7 +53,7 @@ public: return CrossEntropySigmoidLossLayer::type; }; - inline static const std::string type = "cross_entropy_sigmoid_loss"; + inline static const std::string type = "cross_sigmoid"; }; } // namespace nntrainer diff --git a/nntrainer/layers/loss/cross_entropy_softmax_loss_layer.h b/nntrainer/layers/loss/cross_entropy_softmax_loss_layer.h index 45de828..c9bc751 100644 --- a/nntrainer/layers/loss/cross_entropy_softmax_loss_layer.h +++ b/nntrainer/layers/loss/cross_entropy_softmax_loss_layer.h @@ -53,7 +53,7 @@ public: return CrossEntropySoftmaxLossLayer::type; }; - inline static const std::string type = "cross_entropy_softmax_loss"; + inline static const std::string type = "cross_softmax"; }; } // namespace nntrainer diff --git a/nntrainer/layers/loss/mse_loss_layer.h b/nntrainer/layers/loss/mse_loss_layer.h index 5750bd6..387e92b 100644 --- a/nntrainer/layers/loss/mse_loss_layer.h +++ b/nntrainer/layers/loss/mse_loss_layer.h @@ -50,7 +50,7 @@ public: */ const std::string getType() const override { return MSELossLayer::type; }; - inline static const std::string type = "mse_loss"; + inline static const std::string type = "mse"; }; } // namespace nntrainer diff --git a/nntrainer/tensor/manager.h b/nntrainer/tensor/manager.h index 2e449af..86d3a93 100644 --- a/nntrainer/tensor/manager.h +++ b/nntrainer/tensor/manager.h @@ -583,10 +583,10 @@ private: tensors_list.emplace_back(std::make_unique(ts)); } - layer_objs_list.emplace_back(std::move(tensors_list)); std::transform(tensors_list.begin(), tensors_list.end(), std::back_inserter(ret), [](auto const &elem) { return elem.get(); }); + layer_objs_list.emplace_back(std::move(tensors_list)); return ret; } diff --git a/test/unittest/meson.build b/test/unittest/meson.build index f2cd849..fbd6210 100644 --- a/test/unittest/meson.build +++ b/test/unittest/meson.build @@ -31,7 +31,7 @@ test_target = [ 'unittest_nntrainer_tensor', 'unittest_util_func', 'unittest_databuffer_file', - # 'unittest_nntrainer_modelfile', + 'unittest_nntrainer_modelfile', # 'unittest_nntrainer_models', # 'unittest_nntrainer_graph', 'unittest_nntrainer_appcontext', diff --git a/test/unittest/unittest_nntrainer_modelfile.cpp b/test/unittest/unittest_nntrainer_modelfile.cpp index dbca312..d5abc74 100644 --- a/test/unittest/unittest_nntrainer_modelfile.cpp +++ b/test/unittest/unittest_nntrainer_modelfile.cpp @@ -165,8 +165,11 @@ TEST_P(nntrainerIniTest, initThreetime_n) { /// @todo add run test could be added with iniTest flag to control skip static nntrainer::IniSection nw_base("model", "Type = NeuralNetwork | " - "batch_size = 32 | " - "loss = cross"); + "batch_size = 32"); + +static nntrainer::IniSection nw_base_cross("model", "Type = NeuralNetwork | " + "batch_size = 32 | " + "loss = cross"); static nntrainer::IniSection nw_base_mse("model", "Type = NeuralNetwork | " "batch_size = 32 | " @@ -181,13 +184,13 @@ static nntrainer::IniSection adam("Optimizer", "Type = adam |" static nntrainer::IniSection sgd("Optimizer", "Type = sgd |" "Learning_rate = 1"); -// static nntrainer::IniSection nw_sgd = nw_base + "Optimizer = sgd |" +// static nntrainer::IniSection nw_sgd = nw_base_cross + "Optimizer = sgd |" // "Learning_rate = 1"; -// static nntrainer::IniSection nw_adam = nw_base + adam; +// static nntrainer::IniSection nw_adam = nw_base_cross + adam; -// static nntrainer::IniSection nw_adam_n = nw_base + "Learning_rate = -1"; -// static nntrainer::IniSection adam_n = adam + "Learning_rate = -1"; +// static nntrainer::IniSection nw_adam_n = nw_base_cross + "Learning_rate = +// -1"; static nntrainer::IniSection adam_n = adam + "Learning_rate = -1"; static nntrainer::IniSection dataset("DataSet", "BufferSize = 100 |" "TrainData = trainingSet.dat | " @@ -206,7 +209,6 @@ static nntrainer::IniSection flatten("flat", "Type = flatten"); static nntrainer::IniSection input("inputlayer", "Type = input |" "Input_Shape = 1:1:62720 |" - "bias_initializer = zeros |" "Normalization = true |" "Activation = sigmoid"); @@ -293,55 +295,59 @@ mkIniTc(const char *name, const nntrainer::IniWrapper::Sections vec, int flag) { INSTANTIATE_TEST_CASE_P( nntrainerIniAutoTests, nntrainerIniTest, ::testing::Values( /**< positive: basic valid scenarios (2 positive and 3 negative cases) */ - mkIniTc("basic_p", {nw_base, adam, input, out+"input_layers=inputlayer"}, SUCCESS), - mkIniTc("basic2_p", {nw_base, sgd, input, out+"input_layers=inputlayer"}, SUCCESS), - mkIniTc("basic_act_p", {nw_base, sgd, input + "-Activation", act_relu+"input_layers=inputlayer", out+"input_layers=activation_relu" }, SUCCESS), - mkIniTc("basic_bn_p", {nw_base, sgd, input + "-Activation", batch_normal+"input_layers=inputlayer", act_relu+"input_layers=bn", out+"input_layers=activation_relu" }, SUCCESS), - mkIniTc("basic_bn2_p", {nw_base, sgd, input + "-Activation", batch_normal + "Activation = relu"+"input_layers=inputlayer", out+"input_layers=bn" }, SUCCESS), - mkIniTc("basic_dataset_p", {nw_base, adam, dataset, input, out+"input_layers=inputlayer"}, SUCCESS), - mkIniTc("basic_dataset2_p", {nw_base, sgd, input, out+"input_layers=inputlayer", dataset}, SUCCESS), - mkIniTc("basic_dataset3_p", {dataset, nw_base, sgd, input, out+"input_layers=inputlayer"}, SUCCESS), - mkIniTc("basic_conv2d_p", {nw_base, adam, conv2d + "input_shape = 1:10:10"}, SUCCESS), - mkIniTc("no_testSet_p", {nw_base, adam, dataset + "-TestData", input, out+"input_layers=inputlayer"}, SUCCESS), - mkIniTc("no_validSet_p", {nw_base, adam, dataset + "-ValidData", input, out+"input_layers=inputlayer"}, SUCCESS), - mkIniTc("no_bufferSize_p", {nw_base, adam, dataset + "-BufferSize", input, out+"input_layers=inputlayer"}, SUCCESS), - mkIniTc("buffer_size_smaller_than_batch_size_p", {nw_base, adam, dataset + "BufferSize=26", input, out+"input_layers=inputlayer"}, SUCCESS), - mkIniTc("buffer_size_smaller_than_batch_size2_p", {nw_base, adam, input, out+"input_layers=inputlayer", dataset + "BufferSize=26"}, SUCCESS), - - /**< half negative: init fail cases (1 positive and 4 negative cases) */ - mkIniTc("unknown_loss_n", {nw_base + "loss = unknown", adam, input, out+"input_layers=inputlayer"}, COMPFAIL | INITFAIL), - mkIniTc("activation_very_first_n", {nw_base, sgd, act_relu, input+"input_layers=activation_relu", out+"input_layers=inputlayer"}, COMPFAIL | INITFAIL), - mkIniTc("bnlayer_very_first_n", {nw_base, sgd, batch_normal, input+"input_layers=bn", out+"input_layers=inputlayer"}, COMPFAIL | INITFAIL), - mkIniTc("last_act_layer_relu_n", {nw_base, sgd, input, out+"input_layers=inputlayer", act_relu+"input_layers=fclayer" }, COMPFAIL | INITFAIL), - mkIniTc("last_act_layer_relu2_n", {nw_base, sgd, input, out+"input_layers=inputlayer" + "-Activation", act_relu+"input_layers=fclayer" }, COMPFAIL | INITFAIL), - mkIniTc("basic_conv2d_n", {nw_base, adam, conv2d + "input_shape = 1:1:62720"}, INITFAIL), - - /**< negative: basic invalid scenarios (5 negative cases) */ - mkIniTc("no_model_sec_name_n", {I(nw_base, "-", "")}, ALLFAIL), - mkIniTc("no_model_sec_n", {input, out+"input_layers=inputlayer"}, ALLFAIL), + mkIniTc("basic_p", {nw_base_mse, adam, input + "-Activation", out+"input_layers=inputlayer" + "-Activation"}, SUCCESS), + mkIniTc("basic2_p", {nw_base_mse, sgd, input + "-Activation", out+"input_layers=inputlayer" + "-Activation"}, SUCCESS), + mkIniTc("basic3_p", {nw_base + "loss=cross_sigmoid", adam, input + "-Activation", out+"input_layers=inputlayer" + "-Activation"}, SUCCESS), + mkIniTc("basic4_p", {nw_base + "loss=cross_softmax", adam, input + "-Activation", out+"input_layers=inputlayer" + "-Activation"}, SUCCESS), + // mkIniTc("basic_p", {nw_base_cross, adam, input, out+"input_layers=inputlayer"}, SUCCESS), + // mkIniTc("basic2_p", {nw_base_cross, sgd, input, out+"input_layers=inputlayer"}, SUCCESS) + // mkIniTc("basic_act_p", {nw_base_cross, sgd, input + "-Activation", act_relu+"input_layers=inputlayer", out+"input_layers=activation_relu" }, SUCCESS), + // mkIniTc("basic_bn_p", {nw_base_cross, sgd, input + "-Activation", batch_normal+"input_layers=inputlayer", act_relu+"input_layers=bn", out+"input_layers=activation_relu" }, SUCCESS), + // mkIniTc("basic_bn2_p", {nw_base_cross, sgd, input + "-Activation", batch_normal + "Activation = relu"+"input_layers=inputlayer", out+"input_layers=bn" }, SUCCESS), + // mkIniTc("basic_dataset_p", {nw_base_cross, adam, dataset, input, out+"input_layers=inputlayer"}, SUCCESS), + // mkIniTc("basic_dataset2_p", {nw_base_cross, sgd, input, out+"input_layers=inputlayer", dataset}, SUCCESS), + // mkIniTc("basic_dataset3_p", {dataset, nw_base_cross, sgd, input, out+"input_layers=inputlayer"}, SUCCESS), + // mkIniTc("basic_conv2d_p", {nw_base_cross, adam, conv2d + "input_shape = 1:10:10"}, SUCCESS), + // mkIniTc("no_testSet_p", {nw_base_cross, adam, dataset + "-TestData", input, out+"input_layers=inputlayer"}, SUCCESS), + // mkIniTc("no_validSet_p", {nw_base_cross, adam, dataset + "-ValidData", input, out+"input_layers=inputlayer"}, SUCCESS), + // mkIniTc("no_bufferSize_p", {nw_base_cross, adam, dataset + "-BufferSize", input, out+"input_layers=inputlayer"}, SUCCESS), + // mkIniTc("buffer_size_smaller_than_batch_size_p", {nw_base_cross, adam, dataset + "BufferSize=26", input, out+"input_layers=inputlayer"}, SUCCESS), + // mkIniTc("buffer_size_smaller_than_batch_size2_p", {nw_base_cross, adam, input, out+"input_layers=inputlayer", dataset + "BufferSize=26"}, SUCCESS), + + /*// *< half negative: init fail cases (1 positive and 4 negative cases) */ + // mkIniTc("unknown_loss_n", {nw_base_cross + "loss = unknown", adam, input, out+"input_layers=inputlayer"}, COMPFAIL | INITFAIL), + // mkIniTc("activation_very_first_n", {nw_base_cross, sgd, act_relu, input+"input_layers=activation_relu", out+"input_layers=inputlayer"}, COMPFAIL | INITFAIL), + // mkIniTc("bnlayer_very_first_n", {nw_base_cross, sgd, batch_normal, input+"input_layers=bn", out+"input_layers=inputlayer"}, COMPFAIL | INITFAIL), + // mkIniTc("last_act_layer_relu_n", {nw_base_cross, sgd, input, out+"input_layers=inputlayer", act_relu+"input_layers=fclayer" }, COMPFAIL | INITFAIL), + // mkIniTc("last_act_layer_relu2_n", {nw_base_cross, sgd, input, out+"input_layers=inputlayer" + "-Activation", act_relu+"input_layers=fclayer" }, COMPFAIL | INITFAIL), + // mkIniTc("basic_conv2d_n", {nw_base_cross, adam, conv2d + "input_shape = 1:1:62720"}, INITFAIL), + + /*// *< negative: basic invalid scenarios (5 negative cases) */ + mkIniTc("no_model_sec_name_n", {I(nw_base_cross, "-", "")}, ALLFAIL), + // mkIniTc("no_model_sec_n", {input, out+"input_layers=inputlayer"}, ALLFAIL), mkIniTc("empty_n", {}, ALLFAIL), mkIniTc("no_layers_n", {nw_base, adam}, ALLFAIL), - mkIniTc("no_layers_2_n", {nw_base, adam, dataset}, ALLFAIL), - /// #391 - // mkIniTc("ini_has_empty_value_n", {nw_base, adam + "epsilon = _", input, out}, ALLFAIL), - - /**< negative: property(hyperparam) validation (5 negative cases) */ - mkIniTc("wrong_opt_type_n", {nw_base, adam + "Type = wrong_opt", input, out+"input_layers=inputlayer"}, ALLFAIL), - mkIniTc("adam_minus_lr_n", {nw_base, adam + "Learning_rate = -0.1", input, out+"input_layers=inputlayer"}, ALLFAIL), - mkIniTc("sgd_minus_lr_n", {nw_base, sgd + "Learning_rate = -0.1", input, out+"input_layers=inputlayer"}, ALLFAIL), - mkIniTc("no_loss_p", {nw_base + "-loss", adam, input, out+"input_layers=inputlayer"}, SUCCESS), - mkIniTc("unknown_layer_type_n", {nw_base, adam, input + "Type = asdf", out+"input_layers=inputlayer"}, ALLFAIL), - mkIniTc("unknown_layer_type2_n", {nw_base, adam, input, out + "Type = asdf"+"input_layers=inputlayer", I(out, "outlayer", "")}, ALLFAIL), - - /**< negative: little bit of tweeks to check determinancy (5 negative cases) */ - mkIniTc("wrong_nw_dataset_n", {nw_base, adam, input, out+"input_layers=inputlayer", dataset + "-TrainData"}, ALLFAIL), - mkIniTc("wrong_nw_dataset2_n", {nw_base, adam, dataset + "-TrainData", input, out+"input_layers=inputlayer"}, ALLFAIL), - - /**< negative: dataset is not complete (5 negative cases) */ - mkIniTc("no_trainingSet_n", {nw_base, adam, dataset + "-TrainData", input, out+"input_layers=inputlayer"}, ALLFAIL) - - /// @todo: correct below - // mkIniTc("backbone_filemissing_n", {nw_base, adam, dataset + "-LabelData", input, out+"input_layers=inputlayer"}, ALLFAIL) + mkIniTc("no_layers_2_n", {nw_base, adam, dataset}, ALLFAIL) + // /// #391 + // // mkIniTc("ini_has_empty_value_n", {nw_base, adam + "epsilon = _", input, out}, ALLFAIL), + + /*// *< negative: property(hyperparam) validation (5 negative cases) */ + // mkIniTc("wrong_opt_type_n", {nw_base, adam + "Type = wrong_opt", input, out+"input_layers=inputlayer"}, ALLFAIL), + // mkIniTc("adam_minus_lr_n", {nw_base, adam + "Learning_rate = -0.1", input, out+"input_layers=inputlayer"}, ALLFAIL), + // mkIniTc("sgd_minus_lr_n", {nw_base, sgd + "Learning_rate = -0.1", input, out+"input_layers=inputlayer"}, ALLFAIL), + // mkIniTc("no_loss_p", {nw_base + "-loss", adam, input, out+"input_layers=inputlayer"}, SUCCESS), + // mkIniTc("unknown_layer_type_n", {nw_base, adam, input + "Type = asdf", out+"input_layers=inputlayer"}, ALLFAIL), + // mkIniTc("unknown_layer_type2_n", {nw_base, adam, input, out + "Type = asdf"+"input_layers=inputlayer", I(out, "outlayer", "")}, ALLFAIL), + + /*// *< negative: little bit of tweeks to check determinancy (5 negative cases) */ + // mkIniTc("wrong_nw_dataset_n", {nw_base, adam, input, out+"input_layers=inputlayer", dataset + "-TrainData"}, ALLFAIL), + // mkIniTc("wrong_nw_dataset2_n", {nw_base, adam, dataset + "-TrainData", input, out+"input_layers=inputlayer"}, ALLFAIL), + + /*// *< negative: dataset is not complete (5 negative cases) */ + // mkIniTc("no_trainingSet_n", {nw_base, adam, dataset + "-TrainData", input, out+"input_layers=inputlayer"}, ALLFAIL) + + // /// @todo: correct below + // // mkIniTc("backbone_filemissing_n", {nw_base, adam, dataset + "-LabelData", input, out+"input_layers=inputlayer"}, ALLFAIL) ), [](const testing::TestParamInfo& info){ return std::get<0>(info.param); }); @@ -350,8 +356,8 @@ INSTANTIATE_TEST_CASE_P( /** * @brief Ini file unittest with backbone with wrong file */ -TEST(nntrainerIniTest, backbone_n_01) { - ScopedIni s{"backbone_n1", {nw_base, adam, backbone_random}}; +TEST(nntrainerIniTest, DISABLED_backbone_n_01) { + ScopedIni s{"backbone_n1", {nw_base_cross, adam, backbone_random}}; nntrainer::NeuralNetwork NN; EXPECT_EQ(NN.loadFromConfig(s.getIniName()), ML_ERROR_INVALID_PARAMETER); @@ -360,9 +366,9 @@ TEST(nntrainerIniTest, backbone_n_01) { /** * @brief Ini file unittest with backbone with empty backbone */ -TEST(nntrainerIniTest, backbone_n_02) { - ScopedIni b{"base", {nw_base}}; - ScopedIni s{"backbone_n2", {nw_base, adam, backbone_valid}}; +TEST(nntrainerIniTest, DISABLED_backbone_n_02) { + ScopedIni b{"base", {nw_base_cross}}; + ScopedIni s{"backbone_n2", {nw_base_cross, adam, backbone_valid}}; nntrainer::NeuralNetwork NN; EXPECT_EQ(NN.loadFromConfig(s.getIniName()), ML_ERROR_INVALID_PARAMETER); @@ -371,9 +377,9 @@ TEST(nntrainerIniTest, backbone_n_02) { /** * @brief Ini file unittest with backbone with normal backbone */ -TEST(nntrainerIniTest, backbone_p_03) { - ScopedIni b{"base", {nw_base, batch_normal}}; - ScopedIni s{"backbone_p3", {nw_base, adam, backbone_valid}}; +TEST(nntrainerIniTest, DISABLED_backbone_p_03) { + ScopedIni b{"base", {nw_base_cross, batch_normal}}; + ScopedIni s{"backbone_p3", {nw_base_cross, adam, backbone_valid}}; nntrainer::NeuralNetwork NN; EXPECT_EQ(NN.loadFromConfig(s.getIniName()), ML_ERROR_NONE); @@ -382,9 +388,9 @@ TEST(nntrainerIniTest, backbone_p_03) { /** * @brief Ini file unittest with backbone without model parameters */ -TEST(nntrainerIniTest, backbone_p_04) { +TEST(nntrainerIniTest, DISABLED_backbone_p_04) { ScopedIni b{"base", {flatten, conv2d}}; - ScopedIni s{"backbone_p4", {nw_base, adam, backbone_valid}}; + ScopedIni s{"backbone_p4", {nw_base_cross, adam, backbone_valid}}; nntrainer::NeuralNetwork NN; EXPECT_EQ(NN.loadFromConfig(s.getIniName()), ML_ERROR_NONE); @@ -393,14 +399,14 @@ TEST(nntrainerIniTest, backbone_p_04) { /** * @brief Ini file unittest matching model with and without backbone */ -TEST(nntrainerIniTest, backbone_p_05) { +TEST(nntrainerIniTest, DISABLED_backbone_p_05) { /** Create a backbone.ini */ - ScopedIni b("base", {nw_base, conv2d}); + ScopedIni b("base", {nw_base_cross, conv2d}); /** Create a model of 4 conv layers using backbone */ ScopedIni backbone_made( - "backbone_made", {nw_base, sgd, input2d, + "backbone_made", {nw_base_cross, sgd, input2d, I("block1") + backbone_valid + "input_layers=inputlayer", I("block2") + backbone_valid + "input_layers=block1", I("block3") + backbone_valid + "input_layers=block2", @@ -421,7 +427,7 @@ TEST(nntrainerIniTest, backbone_p_05) { /** Create the same model directly without using backbone */ // std::string conv2d_orig_name = conv2d.getName(); ScopedIni direct_made( - "direct_made", {nw_base, sgd, input2d, + "direct_made", {nw_base_cross, sgd, input2d, I("block1conv2d") + conv2d + "input_layers=inputlayer", I("block2conv2d") + conv2d + "input_layers=block1conv2d", I("block3conv2d") + conv2d + "input_layers=block2conv2d", @@ -459,9 +465,9 @@ TEST(nntrainerIniTest, backbone_p_05) { /** * @brief Ini file unittest matching model with and without trainable */ -TEST(nntrainerIniTest, backbone_p_06) { +TEST(nntrainerIniTest, DISABLED_backbone_p_06) { ScopedIni b("base", {flatten, conv2d}); - ScopedIni s("backbone_p6", {nw_base, adam, backbone_valid}); + ScopedIni s("backbone_p6", {nw_base_cross, adam, backbone_valid}); nntrainer::NeuralNetwork NN; EXPECT_EQ(NN.loadFromConfig(s.getIniName()), ML_ERROR_NONE); @@ -475,9 +481,10 @@ TEST(nntrainerIniTest, backbone_p_06) { /** * @brief Ini file unittest matching model with and without trainable */ -TEST(nntrainerIniTest, backbone_p_07) { +TEST(nntrainerIniTest, DISABLED_backbone_p_07) { ScopedIni b("base", {conv2d}); - ScopedIni s("backbone_p7", {nw_base, adam, backbone_notrain, backbone_train}); + ScopedIni s("backbone_p7", + {nw_base_cross, adam, backbone_notrain, backbone_train}); nntrainer::NeuralNetwork NN; EXPECT_EQ(NN.loadFromConfig(s.getIniName()), ML_ERROR_NONE); @@ -491,8 +498,8 @@ TEST(nntrainerIniTest, backbone_p_07) { /** * @brief Ini file unittest with backbone with normal backbone */ -TEST(nntrainerIniTest, backbone_n_08) { - ScopedIni s("backbone_n8", {nw_base, adam, backbone_random_external}); +TEST(nntrainerIniTest, DISABLED_backbone_n_08) { + ScopedIni s("backbone_n8", {nw_base_cross, adam, backbone_random_external}); nntrainer::NeuralNetwork NN; @@ -508,7 +515,7 @@ TEST(nntrainerIniTest, backbone_n_08) { /** * @brief Ini file unittest with backbone with normal backbone */ -TEST(nntrainerIniTest, backbone_p_09) { +TEST(nntrainerIniTest, DISABLED_backbone_p_09) { ScopedIni s("backbone_p9", {nw_base_mse + "-batch_size", adam, backbone_valid_external}); nntrainer::NeuralNetwork NN; @@ -526,7 +533,7 @@ TEST(nntrainerIniTest, backbone_p_09) { * @brief Ini file unittest with backbone with normal backbone */ // Enable after sepearet memory assign and initialization of graph -TEST(nntrainerIniTest, backbone_p_10) { +TEST(nntrainerIniTest, DISABLED_backbone_p_10) { ScopedIni s("backbone_p10", {nw_base_mse, adam, backbone_valid_external_no_shape}); nntrainer::NeuralNetwork NN; @@ -544,7 +551,7 @@ TEST(nntrainerIniTest, backbone_p_10) { * @brief Ini file unittest with backbone * @note Input shape is provided in model file */ -TEST(nntrainerIniTest, backbone_n_15) { +TEST(nntrainerIniTest, DISABLED_backbone_n_15) { ScopedIni base("base", {conv2d, conv2d}); ScopedIni full("backbone_n15_scaled", {nw_base_mse, adam, backbone_valid}); @@ -564,7 +571,7 @@ TEST(nntrainerIniTest, backbone_n_15) { * @brief Ini file unittest with backbone * @note Input shape is striped from backbone and not provided in model file */ -TEST(nntrainerIniTest, backbone_p_17) { +TEST(nntrainerIniTest, DISABLED_backbone_p_17) { nntrainer::NeuralNetwork NN_scaled, NN_full; ScopedIni base("base", {conv2d_shape, conv2d + "input_layers=conv2d_shape"}); @@ -590,7 +597,7 @@ TEST(nntrainerIniTest, backbone_p_17) { * @brief Ini file unittest with backbone * @note Output layer name not found, epmty backbone */ -TEST(nntrainerIniTest, backbone_n_18) { +TEST(nntrainerIniTest, DISABLED_backbone_n_18) { nntrainer::NeuralNetwork NN; ScopedIni base("base", {input2d, conv2d + "input_layers=inputlayer", @@ -607,7 +614,7 @@ TEST(nntrainerIniTest, backbone_n_18) { * @brief Ini file unittest with backbone * @note Input layer name not found, epmty backbone */ -TEST(nntrainerIniTest, backbone_n_19) { +TEST(nntrainerIniTest, DISABLED_backbone_n_19) { nntrainer::NeuralNetwork NN; ScopedIni base("base", {input2d, conv2d + "input_layers=inputlayer", @@ -625,7 +632,7 @@ TEST(nntrainerIniTest, backbone_n_19) { * @brief Ini file unittest with backbone * @note input and output layer specified are found */ -TEST(nntrainerIniTest, backbone_p_20) { +TEST(nntrainerIniTest, DISABLED_backbone_p_20) { nntrainer::NeuralNetwork NN; ScopedIni base("base", @@ -647,10 +654,10 @@ TEST(nntrainerIniTest, backbone_p_20) { * it should be referred relative to the .ini * */ -TEST(nntrainerIniTest, backbone_relative_to_ini_p) { - ScopedIni b{getResPath("base"), {nw_base, batch_normal}}; +TEST(nntrainerIniTest, DISABLED_backbone_relative_to_ini_p) { + ScopedIni b{getResPath("base"), {nw_base_cross, batch_normal}}; ScopedIni s{getResPath("original"), - {nw_base + "loss=mse", adam, input, + {nw_base_cross + "loss=mse", adam, input, backbone_valid + "input_layers=inputlayer"}}; nntrainer::NeuralNetwork NN; @@ -665,10 +672,10 @@ TEST(nntrainerIniTest, backbone_relative_to_ini_p) { * it should be referred relative to the .ini * */ -TEST(nntrainerIniTest, backbone_from_different_directory_n) { - ScopedIni b{"base", {nw_base, batch_normal}}; +TEST(nntrainerIniTest, DISABLED_backbone_from_different_directory_n) { + ScopedIni b{"base", {nw_base_cross, batch_normal}}; ScopedIni s{getResPath("original"), - {nw_base + "loss=mse", adam, input, + {nw_base_cross + "loss=mse", adam, input, backbone_valid + "input_layers=inputlayer"}}; nntrainer::NeuralNetwork NN; @@ -681,10 +688,10 @@ TEST(nntrainerIniTest, backbone_from_different_directory_n) { * it should be referred relative to the .ini * */ -TEST(nntrainerIniTest, backbone_based_on_working_directory_p) { - ScopedIni b{getResPath("base", {"test"}), {nw_base, batch_normal}}; +TEST(nntrainerIniTest, DISABLED_backbone_based_on_working_directory_p) { + ScopedIni b{getResPath("base", {"test"}), {nw_base_cross, batch_normal}}; ScopedIni s{getResPath("original"), - {nw_base + "loss=mse", adam, input, + {nw_base_cross + "loss=mse", adam, input, backbone_valid + "input_layers=inputlayer"}}; nntrainer::AppContext ac(nntrainer::AppContext::Global()); @@ -697,10 +704,10 @@ TEST(nntrainerIniTest, backbone_based_on_working_directory_p) { /** * @brief Ini file unittest with distributed layer */ -TEST(nntrainerIniTest, distribute_p_01) { +TEST(nntrainerIniTest, DISABLED_distribute_p_01) { ScopedIni s{ "distribute_p1", - {nw_base, adam, + {nw_base_cross, adam, input + "-Activation" + "-Input_Shape" + "Input_Shape = 3:1:10:10", out + "distribute=true"}}; nntrainer::NeuralNetwork NN; -- 2.7.4