add_project_arguments('-DENABLE_TFLITE_BACKBONE=1', language:['c','cpp'])
endif
-gtest_dep = dependency('gtest', required: false)
+gtest_dep = dependency('gtest', static: true, main: false, required: false)
+gtest_main_dep = dependency('gtest', static: true, main: true, required: false)
opencv_dep = dependency('opencv', required: false)
if not opencv_dep.found()
endif
if get_option('enable-test')
- subdir('test')
+ if gtest_dep.found()
+ subdir('test')
+ else
+ error('test enabled but gtest not found')
+ endif
endif
if get_option('enable-nnstreamer-tensor-filter')
#ifdef __cplusplus
#include <fstream>
-#include <gtest/gtest.h>
#include <unordered_map>
#include <neuralnet.h>
} \
} while (0)
-#define ASSERT_EXCEPTION(TRY_BLOCK, EXCEPTION_TYPE, MESSAGE) \
- try { \
- TRY_BLOCK \
- FAIL() << "exception '" << MESSAGE << "' not thrown at all!"; \
- } catch (const EXCEPTION_TYPE &e) { \
- EXPECT_EQ(std::string(MESSAGE), e.what()) \
- << " exception message is incorrect. Expected the following " \
- "message:\n\n" \
- << MESSAGE << "\n"; \
- } catch (...) { \
- FAIL() << "exception '" << MESSAGE << "' not thrown with expected type '" \
- << #EXCEPTION_TYPE << "'!"; \
- }
-
#define RESET_CONFIG(conf_name) \
do { \
std::ifstream file_stream(conf_name, std::ifstream::in); \
nntrainer_test_inc = include_directories('./include')
nntrainer_test_deps = [
- nntrainer_dep #gtest is linked in nntrainer_testutil_lib
+ nntrainer_dep
]
-# build test util when gtest is found
-if gtest_dep.found()
- nntrainer_testutil_lib = static_library(
- 'nntrainer_test_util',
- 'nntrainer_test_util.cpp',
- dependencies: [nntrainer_test_deps, gtest_dep],
- include_directories: nntrainer_test_inc
- )
- nntrainer_testutil_dep = declare_dependency(
- link_with: nntrainer_testutil_lib,
- include_directories: nntrainer_test_inc
- )
- nntrainer_test_deps += nntrainer_testutil_dep
-endif
+nntrainer_testutil_lib = static_library(
+ 'nntrainer_test_util',
+ 'nntrainer_test_util.cpp',
+ dependencies: [nntrainer_test_deps],
+ include_directories: nntrainer_test_inc
+)
+
+nntrainer_testutil_dep = declare_dependency(
+ link_with: nntrainer_testutil_lib,
+ include_directories: nntrainer_test_inc
+)
+
+nntrainer_test_deps += nntrainer_testutil_dep
+nntrainer_test_deps += gtest_dep
if get_option('enable-capi')
subdir('tizen_capi')
}
int status = nntrainer::getKeyValue(cur, key, value);
- EXPECT_EQ(status, ML_ERROR_NONE);
+ NNTR_THROW_IF(status != ML_ERROR_NONE, std::invalid_argument)
+ << "getKeyValue Failed";
entry[key] = value;
}
}
* @author Jijoong Moon <jijoong.moon@samsung.com>
* @bug No known bugs
*/
+#include <gtest/gtest.h>
#include <nntrainer.h>
#include <nntrainer_internal.h>
status = ml_train_model_get_summary(handle, ML_TRAIN_SUMMARY_TENSOR, &sum);
EXPECT_EQ(status, ML_ERROR_NONE);
- EXPECT_GT(strlen(sum), 100);
+ EXPECT_GT(strlen(sum), 100u);
status = ml_train_model_destroy(handle);
EXPECT_EQ(status, ML_ERROR_NONE);
* @author Parichay Kapoor <pk.kapoor@samsung.com>
* @bug No known bugs
*/
+#include <gtest/gtest.h>
#include <nntrainer.h>
#include <nntrainer_internal.h>
* @author Jijoong Moon <jijoong.moon@samsung.com>
* @bug No known bugs
*/
+#include <gtest/gtest.h>
#include <nntrainer.h>
#include <nntrainer_internal.h>
* @author Jijoong Moon <jijoong.moon@samsung.com>
* @bug No known bugs
*/
+#include <gtest/gtest.h>
#include <nntrainer.h>
#include <nntrainer_internal.h>
* @author Jijoong Moon <jijoong.moon@samsung.com>
* @bug No known bugs
*/
+#include <gtest/gtest.h>
+
#include "nntrainer_test_util.h"
#include <databuffer_file.h>
#include <fstream>
* @author Parichay Kapoor <pk.kapoor@samsung.com>
* @bug No known bugs
*/
+#include <gtest/gtest.h>
#include <activation_layer.h>
-#include <gtest/gtest.h>
#include <neuralnet.h>
#include <nntrainer_error.h>
#include <nntrainer_log.h>
* @author Jijoong Moon <jijoong.moon@samsung.com>
* @bug No known bugs
*/
+#include <gtest/gtest.h>
+
#include <fstream>
#include <databuffer_file.h>
* @author Jijoong Moon <jijoong.moon@samsung.com>
* @bug No known bugs
*/
+#include <gtest/gtest.h>
+
#include <fstream>
#include <regex>
EXPECT_EQ(status, ML_ERROR_NONE);
dim = layer.getInputDimension()[0];
- EXPECT_EQ(dim.getTensorDim(0), 1);
- EXPECT_EQ(dim.getTensorDim(1), 3);
- EXPECT_EQ(dim.getTensorDim(2), 2);
- EXPECT_EQ(dim.getTensorDim(3), 1);
+ EXPECT_EQ(dim.getTensorDim(0), 1u);
+ EXPECT_EQ(dim.getTensorDim(1), 3u);
+ EXPECT_EQ(dim.getTensorDim(2), 2u);
+ EXPECT_EQ(dim.getTensorDim(3), 1u);
}
TEST_F(nntrainer_InputLayer, set_property_03_p) {
EXPECT_EQ(status, ML_ERROR_NONE);
dim = layer.getInputDimension()[0];
- EXPECT_EQ(dim.getTensorDim(0), 1);
- EXPECT_EQ(dim.getTensorDim(1), 3);
- EXPECT_EQ(dim.getTensorDim(2), 2);
- EXPECT_EQ(dim.getTensorDim(3), 1);
+ EXPECT_EQ(dim.getTensorDim(0), 1u);
+ EXPECT_EQ(dim.getTensorDim(1), 3u);
+ EXPECT_EQ(dim.getTensorDim(2), 2u);
+ EXPECT_EQ(dim.getTensorDim(3), 1u);
}
TEST_F(nntrainer_InputLayer, set_property_04_p) {
/** Set input shape ignores batch size */
dim = layer.getInputDimension()[0];
- EXPECT_EQ(dim.getTensorDim(0), 1);
- EXPECT_EQ(dim.getTensorDim(1), 3);
- EXPECT_EQ(dim.getTensorDim(2), 2);
- EXPECT_EQ(dim.getTensorDim(3), 1);
+ EXPECT_EQ(dim.getTensorDim(0), 1u);
+ EXPECT_EQ(dim.getTensorDim(1), 3u);
+ EXPECT_EQ(dim.getTensorDim(2), 2u);
+ EXPECT_EQ(dim.getTensorDim(3), 1u);
}
TEST_F(nntrainer_InputLayer, set_property_05_p) {
EXPECT_EQ(status, ML_ERROR_NONE);
dim = layer.getInputDimension()[0];
- EXPECT_EQ(dim.getTensorDim(0), 5);
- EXPECT_EQ(dim.getTensorDim(1), 3);
- EXPECT_EQ(dim.getTensorDim(2), 28);
- EXPECT_EQ(dim.getTensorDim(3), 28);
+ EXPECT_EQ(dim.getTensorDim(0), 5u);
+ EXPECT_EQ(dim.getTensorDim(1), 3u);
+ EXPECT_EQ(dim.getTensorDim(2), 28u);
+ EXPECT_EQ(dim.getTensorDim(3), 28u);
/** Original batch size is retained */
status = setProperty("input_shape=1:3:2:1");
EXPECT_EQ(status, ML_ERROR_NONE);
dim = layer.getInputDimension()[0];
- EXPECT_EQ(dim.getTensorDim(0), 5);
- EXPECT_EQ(dim.getTensorDim(1), 3);
- EXPECT_EQ(dim.getTensorDim(2), 2);
- EXPECT_EQ(dim.getTensorDim(3), 1);
+ EXPECT_EQ(dim.getTensorDim(0), 5u);
+ EXPECT_EQ(dim.getTensorDim(1), 3u);
+ EXPECT_EQ(dim.getTensorDim(2), 2u);
+ EXPECT_EQ(dim.getTensorDim(3), 1u);
/** Original batch size is retained */
status = setProperty("input_shape=4:3:2:1");
EXPECT_EQ(status, ML_ERROR_NONE);
dim = layer.getInputDimension()[0];
- EXPECT_EQ(dim.getTensorDim(0), 5);
- EXPECT_EQ(dim.getTensorDim(1), 3);
- EXPECT_EQ(dim.getTensorDim(2), 2);
- EXPECT_EQ(dim.getTensorDim(3), 1);
+ EXPECT_EQ(dim.getTensorDim(0), 5u);
+ EXPECT_EQ(dim.getTensorDim(1), 3u);
+ EXPECT_EQ(dim.getTensorDim(2), 2u);
+ EXPECT_EQ(dim.getTensorDim(3), 1u);
}
/**
/** no name is set */
layer_name = layer0.getName();
- EXPECT_EQ(layer_name.length(), 0);
+ EXPECT_EQ(layer_name.length(), 0u);
/** Set empty name */
status = layer0.setProperty({"name="});
std::stringstream ss, ss2;
layer.printPreset(ss, nntrainer::Layer::PrintPreset::PRINT_ALL);
ss2 << layer;
- EXPECT_GT(ss.str().size(), 100);
- EXPECT_GT(ss2.str().size(), 100);
+ EXPECT_GT(ss.str().size(), 100u);
+ EXPECT_GT(ss2.str().size(), 100u);
}
/**
* @bug No known bugs except for NYI items
*
*/
+#include <gtest/gtest.h>
#include "nntrainer_test_util.h"
#include "util_func.h"
EXPECT_EQ(NN_full.getInputDimension()[0].channel(),
NN_scaled_zero.getInputDimension()[0].channel());
- EXPECT_EQ(1, NN_scaled_zero.getOutputDimension()[0].channel());
+ EXPECT_EQ(1u, NN_scaled_zero.getOutputDimension()[0].channel());
}
/**
EXPECT_EQ(NN.compile(), ML_ERROR_NONE);
EXPECT_EQ(NN.initialize(), ML_ERROR_NONE);
- EXPECT_EQ(NN.getNetworkGraph().getSorted().size(), 3);
+ EXPECT_EQ(NN.getNetworkGraph().getSorted().size(), 3u);
}
/**
EXPECT_EQ(NN.compile(), ML_ERROR_NONE);
EXPECT_EQ(NN.initialize(), ML_ERROR_NONE);
- EXPECT_EQ(NN.getNetworkGraph().getSorted().size(), 3);
+ EXPECT_EQ(NN.getNetworkGraph().getSorted().size(), 3u);
}
/**
EXPECT_EQ(NN.loadFromConfig(ini_name), ML_ERROR_NONE);
EXPECT_EQ(NN.compile(), ML_ERROR_NONE);
EXPECT_EQ(NN.initialize(), ML_ERROR_NONE);
- EXPECT_EQ(NN.getNetworkGraph().getSorted().size(), 6);
+ EXPECT_EQ(NN.getNetworkGraph().getSorted().size(), 6u);
}
/**
EXPECT_EQ(NN.loadFromConfig(ini_name), ML_ERROR_NONE);
EXPECT_EQ(NN.compile(), ML_ERROR_NONE);
EXPECT_EQ(NN.initialize(), ML_ERROR_NONE);
- EXPECT_EQ(NN.getNetworkGraph().getSorted().size(), 3);
+ EXPECT_EQ(NN.getNetworkGraph().getSorted().size(), 3u);
// EXPECT_EQ(NN.getGraph().size(), 3);
}
* @author Jijoong Moon <jijoong.moon@samsung.com>
* @bug No known bugs
*/
+#include <gtest/gtest.h>
+
#include "nntrainer_test_util.h"
#include "util_func.h"
#include <fstream>
d.setTensorDim(2, 6);
d.setTensorDim(3, 7);
- EXPECT_EQ(d.batch(), 4);
- EXPECT_EQ(d.channel(), 5);
- EXPECT_EQ(d.height(), 6);
- EXPECT_EQ(d.width(), 7);
+ EXPECT_EQ(d.batch(), 4u);
+ EXPECT_EQ(d.channel(), 5u);
+ EXPECT_EQ(d.height(), 6u);
+ EXPECT_EQ(d.width(), 7u);
}
TEST(nntrainer_Tensor, TensorWrap_p) {
/** Changing the dim of a tensor only affects local copy of the dim */
A_dim.setTensorDim(1, 100);
- EXPECT_EQ(A_dim.getTensorDim(1), 100);
+ EXPECT_EQ(A_dim.getTensorDim(1), 100u);
nntrainer::TensorDim A_dim_2 = A.getDim();
- EXPECT_EQ(A_dim_2.getTensorDim(1), 4);
+ EXPECT_EQ(A_dim_2.getTensorDim(1), 4u);
}
TEST(nntrainer_Tensor, copy_and_reshape_n) {
* @author Jijoong Moon <jijoong.moon@samsung.com>
* @bug No known bugs
*/
-
#include <gtest/gtest.h>
+
#include <nntrainer_error.h>
#include <nntrainer_log.h>
#include <nntrainer_logger.h>