namespace InferenceEngineImpl {
namespace TFLiteImpl {
-InferenceTFLite::InferenceTFLite(std::string protoFile, std::string weightFile,
- std::string userFile) :
+InferenceTFLite::InferenceTFLite(std::string protoFile, std::string weightFile) :
mConfigFile(protoFile),
- mWeightFile(weightFile),
- mUserFile(userFile)
+ mWeightFile(weightFile)
{
LOGI("ENTER");
LOGI("LEAVE");
;
}
-int InferenceTFLite::SetUserFile()
-{
- std::ifstream fp(mUserFile.c_str());
- if (!fp.is_open()) {
- return INFERENCE_ENGINE_ERROR_INVALID_PATH;
- }
-
- std::string userListName;
- while (!fp.eof()) {
- std::getline(fp, userListName);
- if (userListName.length())
- SetUserListName(userListName);
- }
-
- fp.close();
-
- return INFERENCE_ENGINE_ERROR_NONE;
-}
-
int InferenceTFLite::SetInputTensorParam()
{
return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
}
-int InferenceTFLite::SetInputTensorParamInput(int width, int height, int dim, int ch)
-{
- mCh = ch;
- mDim = dim;
- mInputSize = cv::Size(width, height);
-
- LOGI("InputSize is %d x %d\n", mInputSize.width, mInputSize.height);
-
- return INFERENCE_ENGINE_ERROR_NONE;
-}
-
-int InferenceTFLite::SetInputTensorParamNorm(double deviation, double mean)
-{
- mDeviation = deviation;
- mMean = mean;
-
- return INFERENCE_ENGINE_ERROR_NONE;
-}
-
int InferenceTFLite::SetInputTensorParamNode(std::string node)
{
mInputLayer = node;
return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
}
-int InferenceTFLite::SetOutputTensorParamThresHold(double threshold)
-{
- mThreshold = threshold;
-
- return INFERENCE_ENGINE_ERROR_NONE;
-}
-
-int InferenceTFLite::SetOutputTensorParamNumbers(int number)
-{
- mOutputNumbers = number;
-
- return INFERENCE_ENGINE_ERROR_NONE;
-}
-
-int InferenceTFLite::SetOutputTensorParamType(int type)
-{
- return INFERENCE_ENGINE_ERROR_NONE;
-}
-
int InferenceTFLite::SetOutputTensorParamNodes(std::vector<std::string> nodes)
{
mOutputLayer = nodes;
mInterpreter->SetNumThreads(MV_INFERENCE_TFLITE_MAX_THREAD_NUM);
- size_t userFileLength = mUserFile.length();
- if (userFileLength > 0 && access(mUserFile.c_str(), F_OK)) {
- LOGE("userFilePath in [%s] ", mUserFile.c_str());
- return INFERENCE_ENGINE_ERROR_INVALID_PATH;
- }
-
- ret = (userFileLength > 0) ? SetUserFile() : INFERENCE_ENGINE_ERROR_NONE;
- if (ret != INFERENCE_ENGINE_ERROR_NONE)
- LOGW("Fail to read categoryList");
-
// input tensor
if (mInterpreter->inputs().size()) {
mInputLayerId = mInterpreter->inputs()[0];
return INFERENCE_ENGINE_ERROR_NONE;
}
-int InferenceTFLite::PrepareInputLayerPassage(inference_input_type_e type)
+int InferenceTFLite::GetInputLayerAttrType()
{
- switch (type) {
- case INFERENCE_INPUT_GENERAL:
- break;
- case INFERENCE_INPUT_IMAGE:
- {
- if (mInputAttrType == kTfLiteUInt8)
- mMatType = CV_8UC3;
- else if (mInputAttrType == kTfLiteFloat32)
- mMatType = CV_32FC3;
- else
- return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED_FORMAT;
-
- mInputBuffer = cv::Mat(mInputSize.height, mInputSize.width, mMatType, mInputData);
- }
- break;
- default:
- LOGE("Not supported");
- return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED_FORMAT;
- }
-
- return INFERENCE_ENGINE_ERROR_NONE;
+ return mInputAttrType;
}
-int InferenceTFLite::setInput(cv::Mat cvImg)
+void * InferenceTFLite::GetInputDataPtr()
{
- mSourceSize = cvImg.size();
- int width = mInputSize.width;
- int height = mInputSize.height;
-
- //PreProcess();
- cv::Mat sample;
- if (cvImg.channels() == 3 && mCh == 1)
- cv::cvtColor(cvImg, sample, cv::COLOR_BGR2GRAY);
- else
- sample = cvImg;
-
- // size
- cv::Mat sampleResized;
- if (sample.size() != cv::Size(width, height))
- cv::resize(sample, sampleResized, cv::Size(width, height));
- else
- sampleResized = sample;
-
- // type
- cv::Mat sampleFloat;
- if (mCh == 3)
- sampleResized.convertTo(sampleFloat, CV_32FC3);
- else
- sampleResized.convertTo(sampleFloat, CV_32FC1);
-
- // normalize
- cv::Mat sampleNormalized;
- cv::Mat meanMat;
- if (mCh == 3)
- meanMat = cv::Mat(sampleFloat.size(), CV_32FC3, cv::Scalar((float)mMean, (float)mMean, (float)mMean));
- else
- meanMat = cv::Mat(sampleFloat.size(), CV_32FC1, cv::Scalar((float)mMean));
-
- cv::subtract(sampleFloat, meanMat, sampleNormalized);
-
- sampleNormalized /= (float)mDeviation;
-
- sampleNormalized.convertTo(mInputBuffer, mMatType);
+ return mInputData;
+}
+int InferenceTFLite::SetInputDataBuffer(tensor_t data)
+{
return INFERENCE_ENGINE_ERROR_NONE;
}
-int InferenceTFLite::Run(cv::Mat tensor)
+int InferenceTFLite::Run()
{
- int ret = setInput(tensor);
-
+ LOGI("ENTER");
TfLiteStatus status = mInterpreter->Invoke();
if (status != kTfLiteOk) {
LOGE("Fail to invoke with kTfLiteError");
- ret = INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
+ return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
}
- return ret;
+ LOGI("LEAVE");
+ return INFERENCE_ENGINE_ERROR_NONE;
}
int InferenceTFLite::Run(std::vector<float> tensor)
{
+ LOGI("ENTER");
int dataIdx = 0;
float * inputData = static_cast<float*>(mInputData);
for( std::vector<float>::iterator iter = tensor.begin();
return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
}
+ LOGI("LEAVE");
return INFERENCE_ENGINE_ERROR_NONE;
}
-int InferenceTFLite::GetInferenceResult(ImageClassificationResults& results)
-{
- // Will contain top N results in ascending order.
- std::vector<std::pair<float, int>> top_results;
- std::priority_queue<std::pair<float, int>,
- std::vector<std::pair<float, int>>,
- std::greater<std::pair<float, int>>> top_result_pq;
- float value;
-
- TfLiteIntArray* dims = mInterpreter->tensor(mOutputLayerId[0])->dims;
- const long count = dims->data[1];
-
- LOGI("dims size: %d", dims->size);
- for (int k = 0; k < dims->size; ++k) {
- LOGI("%d: %d", k, dims->data[k]);
- }
-
- float *prediction = mInterpreter->typed_output_tensor<float>(0);
- for (int i = 0; i < count; ++i) {
- value = prediction[i];
- // Only add it if it beats the threshold and has a chance at being in
- // the top N.
- top_result_pq.push(std::pair<float, int>(value, i));
-
- // If at capacity, kick the smallest value out.
- if (top_result_pq.size() > mOutputNumbers) {
- top_result_pq.pop();
- }
- }
-
- // Copy to output vector and reverse into descending order.
- while (!top_result_pq.empty()) {
- top_results.push_back(top_result_pq.top());
- top_result_pq.pop();
- }
- std::reverse(top_results.begin(), top_results.end());
-
- int classIdx = -1;
- results.number_of_classes = 0;
- for (int idx = 0; idx < mOutputNumbers; ++idx) {
- if (top_results[idx].first < mThreshold)
- continue;
- LOGI("idx:%d", idx);
- LOGI("classIdx: %d", top_results[idx].second);
- LOGI("classProb: %f", top_results[idx].first);
-
- classIdx = top_results[idx].second;
- results.indices.push_back(classIdx);
- results.confidences.push_back(top_results[idx].first);
- results.names.push_back(mUserListName[classIdx]);
- results.number_of_classes++;
- }
-
- return INFERENCE_ENGINE_ERROR_NONE;
-}
-
-int InferenceTFLite::GetInferenceResult(ObjectDetectionResults& results)
-{
- float* boxes = mInterpreter->typed_tensor<float>(mOutputLayerId[0]);
- float* classes = mInterpreter->typed_tensor<float>(mOutputLayerId[1]);
- float* scores = mInterpreter->typed_tensor<float>(mOutputLayerId[2]);
-
- int number_of_detections = (int)(*mInterpreter->typed_tensor<float>(mOutputLayerId[3]));
- int left, top, right, bottom;
- cv::Rect loc;
-
- results.number_of_objects = 0;
- for (int idx = 0; idx < number_of_detections; ++idx) {
- if (scores[idx] < mThreshold)
- continue;
-
- left = (int)(boxes[idx*4 + 1] * mSourceSize.width);
- top = (int)(boxes[idx*4 + 0] * mSourceSize.height);
- right = (int)(boxes[idx*4 + 3] * mSourceSize.width);
- bottom = (int)(boxes[idx*4 + 2] * mSourceSize.height);
-
- loc.x = left;
- loc.y = top;
- loc.width = right -left + 1;
- loc.height = bottom - top + 1;
-
- results.indices.push_back((int)classes[idx]);
- results.confidences.push_back(scores[idx]);
- results.names.push_back(mUserListName[(int)classes[idx]]);
- results.locations.push_back(loc);
- results.number_of_objects++;
-
- LOGI("objectClass: %d", (int)classes[idx]);
- LOGI("confidence:%f", scores[idx]);
- LOGI("left:%d, top:%d, right:%d, bottom:%d", left, top, right, bottom);
- }
-
- return INFERENCE_ENGINE_ERROR_NONE;
-}
-
-int InferenceTFLite::GetInferenceResult(FaceDetectionResults& results)
-{
- float* boxes = mInterpreter->typed_tensor<float>(mOutputLayerId[0]);
- float* classes = mInterpreter->typed_tensor<float>(mOutputLayerId[1]);
- float* scores = mInterpreter->typed_tensor<float>(mOutputLayerId[2]);
-
- int number_of_detections = (int)(*mInterpreter->typed_tensor<float>(mOutputLayerId[3]));
- int left, top, right, bottom;
- cv::Rect loc;
-
- results.number_of_faces = 0;
- for (int idx = 0; idx < number_of_detections; ++idx) {
- if (scores[idx] < mThreshold)
- continue;
-
- left = (int)(boxes[idx*4 + 1] * mSourceSize.width);
- top = (int)(boxes[idx*4 + 0] * mSourceSize.height);
- right = (int)(boxes[idx*4 + 3] * mSourceSize.width);
- bottom = (int)(boxes[idx*4 + 2] * mSourceSize.height);
-
- loc.x = left;
- loc.y = top;
- loc.width = right -left + 1;
- loc.height = bottom - top + 1;
-
- results.confidences.push_back(scores[idx]);
- results.locations.push_back(loc);
- results.number_of_faces++;
-
- LOGI("confidence:%f", scores[idx]);
- LOGI("class: %f", classes[idx]);
- LOGI("left:%f, top:%f, right:%f, bottom:%f", boxes[idx*4 + 1], boxes[idx*4 + 0], boxes[idx*4 + 3], boxes[idx*4 + 2]);
- LOGI("left:%d, top:%d, right:%d, bottom:%d", left, top, right, bottom);
- }
-
- return INFERENCE_ENGINE_ERROR_NONE;
-}
-
-int InferenceTFLite::GetInferenceResult(FacialLandMarkDetectionResults& results)
+int InferenceTFLite::GetInferenceResult(tensor_t& results)
{
- TfLiteIntArray* dims = mInterpreter->tensor(mOutputLayerId[0])->dims;
- const long number_of_detections = dims->data[1];
- float* loc = mInterpreter->typed_tensor<float>(mOutputLayerId[0]);
-
- cv::Point point(0,0);
- results.number_of_landmarks = 0;
- LOGI("imgW:%d, imgH:%d", mSourceSize.width, mSourceSize.height);
- for (int idx = 0; idx < number_of_detections; idx+=2) {
- point.x = (int)(loc[idx] * mSourceSize.width);
- point.y = (int)(loc[idx+1] * mSourceSize.height);
-
- results.locations.push_back(point);
- results.number_of_landmarks++;
-
- LOGI("x:%d, y:%d", point.x, point.y);
- }
-
- return INFERENCE_ENGINE_ERROR_NONE;
-}
+ LOGI("ENTER");
-int InferenceTFLite::GetInferenceResult(std::vector<std::vector<int>>& dimInfo, std::vector<float*>& results)
-{
- dimInfo.clear();
- results.clear();
TfLiteIntArray* dims = NULL;
std::vector<int> tmpDimInfo;
tmpDimInfo.push_back(dims->data[d]);
}
- dimInfo.push_back(tmpDimInfo);
- results.push_back(mInterpreter->typed_tensor<float>(mOutputLayerId[idx]));
+ results.dimInfo.push_back(tmpDimInfo);
+ results.data.push_back((void*)mInterpreter->typed_tensor<float>(mOutputLayerId[idx]));
}
+ LOGI("LEAVE");
return INFERENCE_ENGINE_ERROR_NONE;
}
-int InferenceTFLite::GetNumberOfOutputs()
-{
- return mOutputNumbers;
-}
-
-void InferenceTFLite::SetUserListName(std::string userlist)
-{
- mUserListName.push_back(userlist);
-}
-
extern "C"
{
-class IInferenceEngineVision* EngineVisionInit(std::string protoFile, std::string weightFile,
- std::string userFile)
-{
- InferenceTFLite *engine = new InferenceTFLite(protoFile, weightFile, userFile);
- return engine;
-}
-
-void EngineVisionDestroy(class IInferenceEngineVision *engine)
-{
- delete engine;
-}
-
-class IInferenceEngineCommon* EngineCommonInit(std::string protoFile, std::string weightFile,
- std::string userFile)
+class IInferenceEngineCommon* EngineCommonInit(std::string protoFile, std::string weightFile)
{
- InferenceTFLite *engine = new InferenceTFLite(protoFile, weightFile, userFile);
+ InferenceTFLite *engine = new InferenceTFLite(protoFile, weightFile);
return engine;
}
#ifndef __INFERENCE_ENGINE_IMPL_TFLite_H__
#define __INFERENCE_ENGINE_IMPL_TFLite_H__
-#include <inference_engine_vision.h>
+#include <inference_engine_common.h>
#include "tensorflow/contrib/lite/string.h"
#include "tensorflow/contrib/lite/kernels/register.h"
#include "tensorflow/contrib/lite/model.h"
#include "tensorflow/contrib/lite/context.h"
-#include <opencv2/core.hpp>
-#include <opencv2/imgproc.hpp>
#include <memory>
#include <dlog.h>
#define LOG_TAG "INFERENCE_ENGINE_TFLITE"
-using namespace InferenceEngineInterface::Vision;
using namespace InferenceEngineInterface::Common;
namespace InferenceEngineImpl {
namespace TFLiteImpl {
-class InferenceTFLite : public IInferenceEngineVision {
+class InferenceTFLite : public IInferenceEngineCommon {
public:
InferenceTFLite(std::string protoFile,
- std::string weightFile,
- std::string userFile);
+ std::string weightFile);
~InferenceTFLite();
int SetInputTensorParamNode(std::string node = "input") override;
- int SetInputTensorParamInput(int width, int height, int dim, int ch) override;
-
- int SetInputTensorParamNorm(double deviation = 1.0, double mean = 0.0) override;
-
// Output Tensor Params
int SetOutputTensorParam() override;
- int SetOutputTensorParamThresHold(double threshold) override;
-
- int SetOutputTensorParamNumbers(int number) override;
-
- int SetOutputTensorParamType(int type) override;
-
int SetOutputTensorParamNodes(std::vector<std::string> nodes) override;
int SetTargetDevice(inference_target_type_e type) override;
int CreateInputLayerPassage() override;
- int PrepareInputLayerPassage(inference_input_type_e type) override;
-
- int Run(cv::Mat tensor) override;
-
- int Run(std::vector<float> tensor) override;
+ int GetInputLayerAttrType() override;
- int GetInferenceResult(ImageClassificationResults& results);
+ void * GetInputDataPtr() override;
- int GetInferenceResult(ObjectDetectionResults& results);
+ int SetInputDataBuffer(tensor_t data) override;
- int GetInferenceResult(FaceDetectionResults& results);
+ int Run() override;
- int GetInferenceResult(FacialLandMarkDetectionResults& results);
-
- int GetInferenceResult(std::vector<std::vector<int>>& dimInfo, std::vector<float*>& results);
-
- int GetNumberOfOutputs() override;
+ int Run(std::vector<float> tensor) override;
- void SetUserListName(std::string userList) override;
+ int GetInferenceResult(tensor_t& results);
-public:
- int SetUserFile();
- int setInput(cv::Mat cvImg);
private:
std::unique_ptr<tflite::Interpreter> mInterpreter;
int mInputLayerId;
std::vector<int> mOutputLayerId;
- int mMatType;
+
TfLiteType mInputAttrType;
void *mInputData;
- cv::Mat mInputBuffer;
-
- int mCh;
- int mDim;
- cv::Size mInputSize;
-
- double mDeviation;
- double mMean;
- double mThreshold;
- int mOutputNumbers;
- cv::Size mSourceSize;
std::string mConfigFile;
std::string mWeightFile;
- std::string mUserFile;
- std::vector<std::string> mUserListName;
};
} /* InferenceEngineImpl */