NNTRAINER_ROOT := $(LOCAL_PATH)/../../..
endif
- ML_API_COMMON_INCLUDES := ${NNTRAINER_ROOT}/ml_api_common/include
-NNTRAINER_INCLUDES := $(NNTRAINER_ROOT)/nntrainer \
- $(NNTRAINER_ROOT)/nntrainer/dataset \
- $(NNTRAINER_ROOT)/nntrainer/models \
- $(NNTRAINER_ROOT)/nntrainer/layers \
- $(NNTRAINER_ROOT)/nntrainer/compiler \
- $(NNTRAINER_ROOT)/nntrainer/graph \
- $(NNTRAINER_ROOT)/nntrainer/optimizers \
- $(NNTRAINER_ROOT)/nntrainer/tensor \
- $(NNTRAINER_ROOT)/nntrainer/utils \
+ML_API_COMMON_INCLUDES := ${NNTRAINER_ROOT}/ml_api_common/include
+
+NNTRAINER_INCLUDES := $(NNTRAINER_ROOT)/nntrainer/include \
$(NNTRAINER_ROOT)/api \
$(NNTRAINER_ROOT)/api/ccapi/include \
${ML_API_COMMON_INCLUDES}
LOCAL_CFLAGS += -std=c++17 -Ofast -mcpu=cortex-a53 -Ilz4-nougat/lib
LOCAL_LDFLAGS += -Llz4-nougat/lib/obj/local/$(TARGET_ARCH_ABI)/
LOCAL_CXXFLAGS += -std=c++17 -frtti
-LOCAL_CFLAGS += -pthread -fexceptions -fopenmp
+LOCAL_CFLAGS += -pthread -fexceptions -fopenmp -static-openmp
LOCAL_LDFLAGS += -fexceptions
LOCAL_MODULE_TAGS := optional
LOCAL_ARM_MODE := arm
LOCAL_MODULE := nntrainer_logistic
-LOCAL_LDLIBS := -llog -landroid -fopenmp
+LOCAL_LDLIBS := -llog -landroid -fopenmp -static-openmp
LOCAL_SRC_FILES := main.cpp
#include <cmath>
#include <fstream>
#include <iostream>
+#include <layer.h>
+#include <model.h>
+#include <optimizer.h>
#include <random>
#include <sstream>
-#include <databuffer.h>
-#include <neuralnet.h>
-#include <tensor.h>
-
std::string data_file;
const unsigned int total_train_data_size = 90;
bool training = false;
/**
- * @brief step function
- * @param[in] x value to be distinguished
- * @retval 0.0 or 1.0
- */
-float stepFunction(float x) {
- if (x > 0.5) {
- return 1.0;
- }
-
- if (x < 0.5) {
- return 0.0;
- }
-
- return x;
-}
-
-/**
* @brief get idth Data
* @param[in] F file stream
* @param[out] input feature data
srand(time(NULL));
- auto data_train = ml::train::createDataset(ml::train::DatasetType::GENERATOR,
- getSample_train);
-
/**
* @brief Create NN
*/
- std::vector<std::vector<float>> inputVector, outputVector;
- nntrainer::NeuralNetwork NN;
+ std::unique_ptr<ml::train::Model> model;
+ model = ml::train::createModel(ml::train::ModelType::NEURAL_NET);
/**
* @brief Initialize NN with configuration file path
*/
try {
- NN.loadFromConfig(config);
- NN.compile();
- NN.initialize();
+ model->load(config, ml::train::ModelFormat::MODEL_FORMAT_INI);
+ model->compile();
+ model->initialize();
} catch (...) {
std::cerr << "Error during init" << std::endl;
return 0;
}
if (training) {
- NN.setDataset(ml::train::DatasetModeType::MODE_TRAIN,
- std::move(data_train));
+ auto data_train = ml::train::createDataset(
+ ml::train::DatasetType::GENERATOR, getSample_train);
+
+ model->setDataset(ml::train::DatasetModeType::MODE_TRAIN,
+ std::move(data_train));
try {
- NN.train({"save_path=" + weight_path});
+ model->train({"save_path=" + weight_path});
} catch (...) {
std::cerr << "Error during train" << std::endl;
return 0;
}
} else {
try {
- NN.load(weight_path, ml::train::ModelFormat::MODEL_FORMAT_BIN);
+ model->load(weight_path);
} catch (std::exception &e) {
std::cerr << "Error during loading weights: " << e.what() << "\n";
return 1;
}
std::ifstream dataFile(data_file);
int cn = 0;
+ std::vector<float *> in;
+ std::vector<float *> l;
+
+ auto step = [](const float result) {
+ if (result < 0.5) {
+ return 0;
+ } else
+ return 1;
+ };
+
for (unsigned int j = 0; j < total_val_data_size; ++j) {
- nntrainer::Tensor d;
- std::vector<float> o;
- std::vector<float> l;
- o.resize(feature_size);
- l.resize(1);
- getData(dataFile, o.data(), l.data(), j);
+ float input[feature_size];
+ float label[1];
+
+ if (!getData(dataFile, input, label, j))
+ std::cout << "error dring read file " << std::endl;
try {
- float answer =
- NN.forwarding({MAKE_SHARED_TENSOR(nntrainer::Tensor({o}))},
- {MAKE_SHARED_TENSOR(nntrainer::Tensor({l}))})[0]
- ->apply(stepFunction)
- .getValue(0, 0, 0, 0);
- std::cout << answer << " : " << l[0] << std::endl;
- cn += answer == l[0];
+
+ std::vector<float *> answer;
+
+ in.push_back(input);
+ l.push_back(label);
+
+ answer = model->inference(1, in, l);
+
+ in.clear();
+ l.clear();
+
+ int c = step(answer[0][0]);
+
+ if (c == int(label[0])) {
+ cn++;
+ std::cout << answer[0][0] << " - " << c << " : " << label[0]
+ << std::endl;
+ } else {
+ std::cout << " Something Wrong " << answer[0][0] << " " << label[0]
+ << std::endl;
+ }
} catch (...) {
std::cerr << "Error during forwarding the model" << std::endl;
return -1;
INIPARSER_INCLUDES := $(INIPARSER_ROOT)/src
LOCAL_ARM_NEON := true
-LOCAL_CFLAGS += -pthread -fexceptions -fopenmp
+LOCAL_CFLAGS += -pthread -fexceptions -fopenmp -static-openmp
LOCAL_CXXFLAGS += -std=c++17 -frtti -fexceptions
LOCAL_MODULE_TAGS := optional
-LOCAL_LDLIBS := -llog -landroid -fopenmp
+LOCAL_LDLIBS := -llog -landroid -fopenmp -static-openmp
LOCAL_MODULE := nntrainer
LOCAL_SRC_FILES := $(NNTRAINER_SRCS) $(INIPARSER_SRCS)
LOCAL_SHARED_LIBRARIES := ccapi-nntrainer ml-api-inference nntrainer
LOCAL_ARM_NEON := true
-LOCAL_CFLAGS += -pthread -fexceptions -fopenmp -DML_API_COMMON=1
+LOCAL_CFLAGS += -pthread -fexceptions -fopenmp -static-openmp -DML_API_COMMON=1
LOCAL_CXXFLAGS += -std=c++17 -frtti -fexceptions -DML_API_COMMON=1
LOCAL_MODULE_TAGS := optional
-LOCAL_LDLIBS := -llog -landroid -fopenmp
+LOCAL_LDLIBS := -llog -landroid -fopenmp -static-openmp
LOCAL_MODULE := capi-nntrainer
LOCAL_SRC_FILES := $(CAPI_NNTRAINER_SRCS)